Commit 6a9193c7 by Suma Shivaprasad

Merged with master

parents b5bf5cd9 e4385af6
...@@ -35,19 +35,22 @@ ...@@ -35,19 +35,22 @@
<properties> <properties>
<hive.version>1.1.0</hive.version> <hive.version>1.1.0</hive.version>
<calcite.version>0.9.2-incubating</calcite.version> <calcite.version>0.9.2-incubating</calcite.version>
<hadoop.version>2.5.0</hadoop.version> <hadoop.version>2.6.0</hadoop.version>
</properties> </properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId> <artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency> </dependency>
<dependency> <dependency>
...@@ -55,6 +58,13 @@ ...@@ -55,6 +58,13 @@
<artifactId>metadata-typesystem</artifactId> <artifactId>metadata-typesystem</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<!-- Logging --> <!-- Logging -->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
...@@ -92,7 +102,12 @@ ...@@ -92,7 +102,12 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
<scope>provided</scope> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop.version}</version>
</dependency> </dependency>
<dependency> <dependency>
...@@ -104,9 +119,15 @@ ...@@ -104,9 +119,15 @@
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-webapp</artifactId> <artifactId>metadata-webapp</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
<type>war</type> <classifier>classes</classifier>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>
...@@ -254,6 +275,12 @@ ...@@ -254,6 +275,12 @@
<skip>false</skip> <skip>false</skip>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<inherited>true</inherited>
<extensions>true</extensions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>
...@@ -59,9 +59,11 @@ for i in "${BASEDIR}/bridge/hive/"*.jar; do ...@@ -59,9 +59,11 @@ for i in "${BASEDIR}/bridge/hive/"*.jar; do
METADATACPPATH="${METADATACPPATH}:$i" METADATACPPATH="${METADATACPPATH}:$i"
done done
echo $METADATACPPATH # log dir for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
JAVA_PROPERTIES="$METADATA_OPTS" JAVA_PROPERTIES="$METADATA_OPTS -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.log.file=import-hive.log"
shift shift
while [[ ${1} =~ ^\-D ]]; do while [[ ${1} =~ ^\-D ]]; do
...@@ -70,6 +72,7 @@ while [[ ${1} =~ ^\-D ]]; do ...@@ -70,6 +72,7 @@ while [[ ${1} =~ ^\-D ]]; do
done done
TIME=`date +%Y%m%d%H%M%s` TIME=`date +%Y%m%d%H%M%s`
#Add hive conf in classpath
if [ ! -z "$HIVE_CONF_DIR" ]; then if [ ! -z "$HIVE_CONF_DIR" ]; then
HIVE_CP=$HIVE_CONF_DIR HIVE_CP=$HIVE_CONF_DIR
elif [ ! -z "$HIVE_HOME" ]; then elif [ ! -z "$HIVE_HOME" ]; then
...@@ -86,5 +89,5 @@ echo Using Hive configuration directory [$HIVE_CP] ...@@ -86,5 +89,5 @@ echo Using Hive configuration directory [$HIVE_CP]
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
RETVAL=$? RETVAL=$?
[ $RETVAL -eq 0 ] && echo Hive Data Model Imported!!! [ $RETVAL -eq 0 ] && echo Hive Data Model imported successfully!!!
[ $RETVAL -ne 0 ] && echo Failure in Hive Data Model import!!! [ $RETVAL -ne 0 ] && echo Failed to import Hive Data Model!!!
...@@ -207,7 +207,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -207,7 +207,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
Table table = entity.getTable(); Table table = entity.getTable();
//TODO table.getDbName().toLowerCase() is required as hive stores in lowercase, //TODO table.getDbName().toLowerCase() is required as hive stores in lowercase,
// but table.getDbName() is not lowercase // but table.getDbName() is not lowercase
Referenceable dbReferenceable = getDatabaseReference(dgiBridge, table.getDbName().toLowerCase()); Referenceable dbReferenceable = dgiBridge.registerDatabase(table.getDbName().toLowerCase());
dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName()); dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName());
} }
} }
...@@ -230,7 +230,8 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -230,7 +230,8 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.info("Explain statement. Skipping..."); LOG.info("Explain statement. Skipping...");
} }
String user = hookContext.getUserName(); //todo hookContext.getUserName() is null in hdp sandbox 2.2.4
String user = hookContext.getUserName() == null ? System.getProperty("user.name") : hookContext.getUserName();
HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName()); HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
String queryId = null; String queryId = null;
String queryStr = null; String queryStr = null;
...@@ -245,7 +246,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -245,7 +246,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.debug("Registering CTAS query: {}", queryStr); LOG.debug("Registering CTAS query: {}", queryStr);
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName()); Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("processName", operation.getOperationName()); processReferenceable.set("name", operation.getOperationName());
processReferenceable.set("startTime", queryStartTime); processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", user); processReferenceable.set("userName", user);
List<Referenceable> source = new ArrayList<>(); List<Referenceable> source = new ArrayList<>();
...@@ -253,19 +254,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -253,19 +254,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
if (readEntity.getTyp() == Entity.Type.TABLE) { if (readEntity.getTyp() == Entity.Type.TABLE) {
Table table = readEntity.getTable(); Table table = readEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
source.add(getTableReference(dgiBridge, dbName, table.getTableName())); source.add(dgiBridge.registerTable(dbName, table.getTableName()));
} }
} }
processReferenceable.set("sourceTableNames", source); processReferenceable.set("inputTables", source);
List<Referenceable> target = new ArrayList<>(); List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) { for (WriteEntity writeEntity : outputs) {
if (writeEntity.getTyp() == Entity.Type.TABLE) { if (writeEntity.getTyp() == Entity.Type.TABLE) {
Table table = writeEntity.getTable(); Table table = writeEntity.getTable();
String dbName = table.getDbName().toLowerCase(); String dbName = table.getDbName().toLowerCase();
target.add(getTableReference(dgiBridge, dbName, table.getTableName())); target.add(dgiBridge.registerTable(dbName, table.getTableName()));
} }
} }
processReferenceable.set("targetTableNames", target); processReferenceable.set("outputTables", target);
processReferenceable.set("queryText", queryStr); processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", queryId); processReferenceable.set("queryId", queryId);
processReferenceable.set("queryPlan", getQueryPlan(hookContext, conf)); processReferenceable.set("queryPlan", getQueryPlan(hookContext, conf));
...@@ -276,58 +277,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo ...@@ -276,58 +277,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
dgiBridge.createInstance(processReferenceable); dgiBridge.createInstance(processReferenceable);
} }
/**
* Gets reference for the database. Creates new instance if it doesn't exist
*
* @param dgiBridge
* @param dbName database name
* @return Reference for database
* @throws Exception
*/
private Referenceable getDatabaseReference(HiveMetaStoreBridge dgiBridge, String dbName) throws Exception {
String typeName = HiveDataTypes.HIVE_DB.getName();
MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
JSONObject result = dgiClient.rawSearch(typeName, "name", dbName);
JSONArray results = (JSONArray) result.get("results");
if (results.length() == 0) {
//Create new instance
return dgiBridge.registerDatabase(dbName);
} else {
String guid = (String) ((JSONObject) results.get(0)).get("guid");
return new Referenceable(guid, typeName, null);
}
}
/**
* Gets reference for the table. Creates new instance if it doesn't exist
*
* @param dgiBridge
* @param dbName
* @param tableName table name
* @return table reference
* @throws Exception
*/
private Referenceable getTableReference(HiveMetaStoreBridge dgiBridge, String dbName, String tableName) throws Exception {
String typeName = HiveDataTypes.HIVE_TABLE.getName();
MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
JSONObject result = dgiClient.rawSearch(typeName, "tableName", tableName);
JSONArray results = (JSONArray) result.get("results");
if (results.length() == 0) {
Referenceable dbRererence = getDatabaseReference(dgiBridge, dbName);
return dgiBridge.registerTable(dbRererence, dbName, tableName).first;
} else {
//There should be just one instance with the given name
String guid = (String) ((JSONObject) results.get(0)).get("guid");
return new Referenceable(guid, typeName, null);
}
}
private String getQueryPlan(HookContext hookContext, HiveConf conf) throws Exception { private String getQueryPlan(HookContext hookContext, HiveConf conf) throws Exception {
//We need to somehow get the sem associated with the plan and use it here. //We need to somehow get the sem associated with the plan and use it here.
......
...@@ -340,8 +340,8 @@ public class HiveDataModelGenerator { ...@@ -340,8 +340,8 @@ public class HiveDataModelGenerator {
private void createPartitionClass() throws MetadataException { private void createPartitionClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.COLLECTION, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(), new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.getName(), new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.getName(),
...@@ -354,10 +354,9 @@ public class HiveDataModelGenerator { ...@@ -354,10 +354,9 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns", new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.COLLECTION, true, null), Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
}; };
HierarchicalTypeDefinition<ClassType> definition = HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class, new HierarchicalTypeDefinition<>(ClassType.class,
...@@ -368,7 +367,7 @@ public class HiveDataModelGenerator { ...@@ -368,7 +367,7 @@ public class HiveDataModelGenerator {
private void createTableClass() throws MetadataException { private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(), new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
...@@ -385,9 +384,9 @@ public class HiveDataModelGenerator { ...@@ -385,9 +384,9 @@ public class HiveDataModelGenerator {
new AttributeDefinition("partitionKeys", new AttributeDefinition("partitionKeys",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
// new AttributeDefinition("columns", new AttributeDefinition("columns",
// DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()), DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
// Multiplicity.COLLECTION, true, null), Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null), Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
...@@ -481,7 +480,7 @@ public class HiveDataModelGenerator { ...@@ -481,7 +480,7 @@ public class HiveDataModelGenerator {
private void createProcessClass() throws MetadataException { private void createProcessClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{ AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("processName", DataTypes.STRING_TYPE.getName(), new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
new AttributeDefinition("startTime", DataTypes.INT_TYPE.getName(), new AttributeDefinition("startTime", DataTypes.INT_TYPE.getName(),
Multiplicity.REQUIRED, false, null), Multiplicity.REQUIRED, false, null),
......
...@@ -7,7 +7,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin ...@@ -7,7 +7,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_order(StructType) - [col, order] * hive_order(StructType) - [col, order]
* hive_resourceuri(StructType) - [resourceType, uri] * hive_resourceuri(StructType) - [resourceType, uri]
* hive_serde(StructType) - [name, serializationLib, parameters] * hive_serde(StructType) - [name, serializationLib, parameters]
* hive_process(ClassType) - [processName, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph] * hive_process(ClassType) - [name, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph]
* hive_function(ClassType) - [functionName, dbName, className, ownerName, ownerType, createTime, functionType, resourceUris] * hive_function(ClassType) - [functionName, dbName, className, ownerName, ownerType, createTime, functionType, resourceUris]
* hive_type(ClassType) - [name, type1, type2, fields] * hive_type(ClassType) - [name, type1, type2, fields]
* hive_partition(ClassType) - [values, dbName, tableName, createTime, lastAccessTime, sd, parameters] * hive_partition(ClassType) - [values, dbName, tableName, createTime, lastAccessTime, sd, parameters]
...@@ -16,13 +16,13 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin ...@@ -16,13 +16,13 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_role(ClassType) - [roleName, createTime, ownerName] * hive_role(ClassType) - [roleName, createTime, ownerName]
* hive_column(ClassType) - [name, type, comment] * hive_column(ClassType) - [name, type, comment]
* hive_db(ClassType) - [name, description, locationUri, parameters, ownerName, ownerType] * hive_db(ClassType) - [name, description, locationUri, parameters, ownerName, ownerType]
* hive_table(ClassType) - [tableName, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, parameters, viewOriginalText, viewExpandedText, tableType, temporary] * hive_table(ClassType) - [name, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
---++ Importing Hive Metadata ---++ Importing Hive Metadata
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this. org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this.
Set-up the following configs in <dgi package>/conf/hive-site.xml: Set-up the following configs in hive-site.xml of your hive set-up and set environment variable HIVE_CONFIG to the
* Hive metastore configuration - Refer [[https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin][Hive Metastore Configuration documentation]] hive conf directory:
* DGI endpoint - Add the following property with the DGI endpoint for your set-up * DGI endpoint - Add the following property with the DGI endpoint for your set-up
<verbatim> <verbatim>
<property> <property>
...@@ -31,7 +31,7 @@ Set-up the following configs in <dgi package>/conf/hive-site.xml: ...@@ -31,7 +31,7 @@ Set-up the following configs in <dgi package>/conf/hive-site.xml:
</property> </property>
</verbatim> </verbatim>
Usage: <dgi package>/bin/import-hive.sh Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
---++ Hive Hook ---++ Hive Hook
...@@ -57,4 +57,5 @@ The following properties in hive-site.xml control the thread pool details: ...@@ -57,4 +57,5 @@ The following properties in hive-site.xml control the thread pool details:
* hive.hook.dgi.minThreads - core number of threads. default 5 * hive.hook.dgi.minThreads - core number of threads. default 5
* hive.hook.dgi.maxThreads - maximum number of threads. default 5 * hive.hook.dgi.maxThreads - maximum number of threads. default 5
* hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10 * hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10
* hive.hook.dgi.synchronous - boolean, true to run the hook synchronously. default false
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.mortbay.jetty.Server;
import java.io.File;
import java.io.IOException;
import static org.apache.hadoop.metadata.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
/**
*
*/
public class BaseSSLAndKerberosTest extends BaseSecurityTest {
public static final String TESTUSER = "testuser";
public static final String TESTPASS = "testpass";
protected static final String DGI_URL = "https://localhost:21443/";
protected Path jksPath;
protected String providerUrl;
protected File httpKeytabFile;
private File userKeytabFile;
class TestSecureEmbeddedServer extends SecureEmbeddedServer {
public TestSecureEmbeddedServer(int port, String path) throws IOException {
super(port, path);
}
public Server getServer() {
return server;
}
@Override
public PropertiesConfiguration getConfiguration() {
return super.getConfiguration();
}
}
protected void setupCredentials() throws Exception {
Configuration conf = new Configuration(false);
File file = new File(jksPath.toUri().getPath());
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void setupKDCAndPrincipals() throws Exception {
// set up the KDC
File kdcWorkDir = startKDC();
userKeytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
httpKeytabFile = createKeytab(kdc, kdcWorkDir, "HTTP", "spnego.service.keytab");
// create a test user principal
kdc.createPrincipal(TESTUSER, TESTPASS);
StringBuilder jaas = new StringBuilder(1024);
jaas.append("TestUser {\n" +
" com.sun.security.auth.module.Krb5LoginModule required\nuseTicketCache=true;\n" +
"};\n");
jaas.append(createJAASEntry("Client", "dgi", userKeytabFile));
jaas.append(createJAASEntry("Server", "HTTP", httpKeytabFile));
File jaasFile = new File(kdcWorkDir, "jaas.txt");
FileUtils.write(jaasFile, jaas.toString());
bindJVMtoJAASFile(jaasFile);
}
protected String getWarPath() {
return String.format("/../../webapp/target/metadata-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
protected HiveConf getHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
return hiveConf;
}
}
...@@ -106,7 +106,7 @@ public class HiveHookIT { ...@@ -106,7 +106,7 @@ public class HiveHookIT {
} }
private void assertTableIsRegistered(String tableName) throws Exception { private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "tableName", tableName); assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
} }
private void assertDatabaseIsRegistered(String dbName) throws Exception { private void assertDatabaseIsRegistered(String dbName) throws Exception {
...@@ -114,10 +114,7 @@ public class HiveHookIT { ...@@ -114,10 +114,7 @@ public class HiveHookIT {
} }
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{ private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
JSONObject result = dgiCLient.rawSearch(typeName, colName, colValue); JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
JSONArray results = (JSONArray) result.get("results");
Assert.assertEquals(results.length(), 1); Assert.assertEquals(results.length(), 1);
JSONObject resultRow = (JSONObject) results.get(0);
Assert.assertEquals(resultRow.get(typeName + "." + colName), colValue);
} }
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileWriter;
import java.net.URL;
import java.nio.file.Files;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
* Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a
* kerberos user for the invocation. Need a separate use case since the Jersey layer cached the URL connection handler,
* which indirectly caches the kerberos delegation token.
*/
public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
private Driver driver;
private SessionState ss;
private TestSecureEmbeddedServer secureEmbeddedServer;
private String originalConf;
@BeforeClass
public void setUp() throws Exception {
//Set-up hive session
HiveConf conf = getHiveConf();
driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name"));
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String persistDir = null;
URL resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
// delete prior ssl-client.xml file
resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
if (resource != null) {
File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
if (sslClientFile != null && sslClientFile.exists()) {
sslClientFile.delete();
}
}
setupKDCAndPrincipals();
setupCredentials();
// client will actually only leverage subset of these properties
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("metadata.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
String confLocation = System.getProperty("metadata.conf");
URL url;
if (confLocation == null) {
url = PropertiesUtil.class.getResource("/application.properties");
} else {
url = new File(confLocation, "application.properties").toURI().toURL();
}
configuration.load(url);
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty("metadata.http.authentication.enabled", "true");
configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
configuration.setProperty("metadata.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
public PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.getServer().setHandler(webapp);
// save original setting
originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", persistDir);
secureEmbeddedServer.getServer().start();
}
@AfterClass
public void tearDown() throws Exception {
if (secureEmbeddedServer != null) {
secureEmbeddedServer.getServer().stop();
}
if (kdc != null) {
kdc.stop();
}
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
}
}
private void runCommand(final String cmd) throws Exception {
ss.setCommandType(null);
driver.run(cmd);
Assert.assertNotNull(driver.getErrorMsg());
Assert.assertTrue(driver.getErrorMsg().contains("Mechanism level: Failed to find any Kerberos tgt"));
}
@Test
public void testUnsecuredCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
}
}
...@@ -38,13 +38,71 @@ ...@@ -38,13 +38,71 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId> <artifactId>jersey-client</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.testng</groupId> <groupId>org.testng</groupId>
<artifactId>testng</artifactId> <artifactId>testng</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<inherited>true</inherited>
<extensions>true</extensions>
</plugin>
</plugins>
</build>
</project> </project>
...@@ -22,9 +22,18 @@ import com.sun.jersey.api.client.Client; ...@@ -22,9 +22,18 @@ import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.HttpMethod; import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
...@@ -33,30 +42,48 @@ import javax.ws.rs.core.UriBuilder; ...@@ -33,30 +42,48 @@ import javax.ws.rs.core.UriBuilder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED;
/** /**
* Client for metadata. * Client for metadata.
*/ */
public class MetadataServiceClient { public class MetadataServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClient.class);
public static final String REQUEST_ID = "requestId"; public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results"; public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize"; public static final String TOTAL_SIZE = "totalSize";
private static final String BASE_URI = "api/metadata/";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities";
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
private WebResource service;
private final WebResource service;
public MetadataServiceClient(String baseUrl) { public MetadataServiceClient(String baseUrl) {
DefaultClientConfig config = new DefaultClientConfig(); DefaultClientConfig config = new DefaultClientConfig();
Client client = Client.create(config); PropertiesConfiguration clientConfig = null;
try {
clientConfig = getClientProperties();
if (clientConfig.getBoolean(TLS_ENABLED) || clientConfig.getString("metadata.http.authentication.type") != null) {
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced to create a
// configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
SecureClientUtils.persistSSLClientConfiguration(clientConfig);
}
} catch (Exception e) {
LOG.info("Error processing client configuration.", e);
}
URLConnectionClientHandler handler = SecureClientUtils.getClientConnectionHandler(config, clientConfig);
Client client = new Client(handler, config);
client.resource(UriBuilder.fromUri(baseUrl).build()); client.resource(UriBuilder.fromUri(baseUrl).build());
service = client.resource(UriBuilder.fromUri(baseUrl).build()); service = client.resource(UriBuilder.fromUri(baseUrl).build());
} }
private static final String BASE_URI = "api/metadata/"; protected PropertiesConfiguration getClientProperties() throws MetadataException {
private static final String URI_TYPES = "types"; return PropertiesUtil.getClientProperties();
private static final String URI_ENTITIES = "entities"; }
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
static enum API { static enum API {
//Type operations //Type operations
...@@ -155,8 +182,14 @@ public class MetadataServiceClient { ...@@ -155,8 +182,14 @@ public class MetadataServiceClient {
* @return result json object * @return result json object
* @throws MetadataServiceException * @throws MetadataServiceException
*/ */
public JSONObject getEntity(String guid) throws MetadataServiceException { public Referenceable getEntity(String guid) throws MetadataServiceException {
return callAPI(API.GET_ENTITY, null, guid); JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
} }
public JSONObject searchEntity(String searchQuery) throws MetadataServiceException { public JSONObject searchEntity(String searchQuery) throws MetadataServiceException {
...@@ -173,14 +206,14 @@ public class MetadataServiceClient { ...@@ -173,14 +206,14 @@ public class MetadataServiceClient {
* @return result json object * @return result json object
* @throws MetadataServiceException * @throws MetadataServiceException
*/ */
public JSONObject rawSearch(String typeName, String attributeName, public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue) throws
Object attributeValue) throws MetadataServiceException { MetadataServiceException {
String gremlinQuery = String.format( // String gremlinQuery = String.format(
"g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()", // "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
typeName, typeName, attributeName, attributeValue); // typeName, typeName, attributeName, attributeValue);
return searchByGremlin(gremlinQuery); // return searchByGremlin(gremlinQuery);
// String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue); String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
// return searchByDSL(dslQuery); return searchByDSL(dslQuery);
} }
/** /**
...@@ -189,10 +222,15 @@ public class MetadataServiceClient { ...@@ -189,10 +222,15 @@ public class MetadataServiceClient {
* @return result json object * @return result json object
* @throws MetadataServiceException * @throws MetadataServiceException
*/ */
public JSONObject searchByDSL(String query) throws MetadataServiceException { public JSONArray searchByDSL(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_DSL); WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam("query", query); resource = resource.queryParam("query", query);
return callAPIWithResource(API.SEARCH_DSL, resource); JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
try {
return result.getJSONObject("results").getJSONArray("rows");
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
} }
/** /**
......
...@@ -31,20 +31,30 @@ public class PropertiesUtil { ...@@ -31,20 +31,30 @@ public class PropertiesUtil {
private static final Logger LOG = LoggerFactory.getLogger(PropertiesUtil.class); private static final Logger LOG = LoggerFactory.getLogger(PropertiesUtil.class);
private static final String APPLICATION_PROPERTIES = "application.properties"; private static final String APPLICATION_PROPERTIES = "application.properties";
public static final String CLIENT_PROPERTIES = "client.properties";
public static final PropertiesConfiguration getApplicationProperties() throws MetadataException { public static final PropertiesConfiguration getApplicationProperties() throws MetadataException {
return getPropertiesConfiguration(APPLICATION_PROPERTIES);
}
public static final PropertiesConfiguration getClientProperties() throws MetadataException {
return getPropertiesConfiguration(CLIENT_PROPERTIES);
}
private static PropertiesConfiguration getPropertiesConfiguration(String name) throws MetadataException {
String confLocation = System.getProperty("metadata.conf"); String confLocation = System.getProperty("metadata.conf");
URL url; URL url;
try { try {
if (confLocation == null) { if (confLocation == null) {
url = PropertiesUtil.class.getResource("/" + APPLICATION_PROPERTIES); url = PropertiesUtil.class.getResource("/" + name);
} else { } else {
url = new File(confLocation, APPLICATION_PROPERTIES).toURI().toURL(); url = new File(confLocation, name).toURI().toURL();
} }
LOG.info("Loading {} from {}", APPLICATION_PROPERTIES, url); LOG.info("Loading {} from {}", name, url);
return new PropertiesConfiguration(url); return new PropertiesConfiguration(url);
} catch (Exception e) { } catch (Exception e) {
throw new MetadataException("Failed to load application properties", e); throw new MetadataException("Failed to load application properties", e);
} }
} }
} }
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.security;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.Authenticator;
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.security.GeneralSecurityException;
import java.security.PrivilegedExceptionAction;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
*
*/
public class SecureClientUtils {
public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute
private static final Logger LOG = LoggerFactory.getLogger(SecureClientUtils.class);
public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
PropertiesConfiguration clientConfig) {
config.getProperties().put(
URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
true);
Configuration conf = new Configuration(false);
conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml"));
String authType = "simple";
if (clientConfig != null) {
authType = clientConfig.getString("metadata.http.authentication.type", "simple");
}
UserGroupInformation.setConfiguration(conf);
final ConnectionConfigurator connConfigurator = newConnConfigurator(conf);
Authenticator authenticator = new PseudoDelegationTokenAuthenticator();
if (!authType.equals("simple")) {
authenticator = new KerberosDelegationTokenAuthenticator();
}
authenticator.setConnectionConfigurator(connConfigurator);
final DelegationTokenAuthenticator finalAuthenticator = (DelegationTokenAuthenticator) authenticator;
final DelegationTokenAuthenticatedURL.Token token = new DelegationTokenAuthenticatedURL.Token();
HttpURLConnectionFactory httpURLConnectionFactory = new HttpURLConnectionFactory() {
@Override
public HttpURLConnection getHttpURLConnection(final URL url) throws IOException {
try {
return new DelegationTokenAuthenticatedURL(finalAuthenticator, connConfigurator)
.openConnection(url, token, null);
} catch (Exception e) {
throw new IOException(e);
}
}
};
return new URLConnectionClientHandler(httpURLConnectionFactory);
}
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR =
new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
return conn;
}
};
private static ConnectionConfigurator newConnConfigurator(Configuration conf) {
try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
} catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " +
"Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
}
}
private static ConnectionConfigurator newSslConnConfigurator(final int timeout,
Configuration conf) throws IOException, GeneralSecurityException {
final SSLFactory factory;
final SSLSocketFactory sf;
final HostnameVerifier hv;
factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
factory.init();
sf = factory.createSSLSocketFactory();
hv = factory.getHostnameVerifier();
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
c.setHostnameVerifier(hv);
}
setTimeouts(conn, timeout);
return conn;
}
};
}
private static void setTimeouts(URLConnection connection, int socketTimeout) {
connection.setConnectTimeout(socketTimeout);
connection.setReadTimeout(socketTimeout);
}
private static File getSSLClientFile() throws MetadataException {
String confLocation = System.getProperty("metadata.conf");
File sslDir;
try {
if (confLocation == null) {
String persistDir = null;
URL resource = PropertiesUtil.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
assert persistDir != null;
sslDir = new File(persistDir);
} else {
sslDir = new File(confLocation);
}
LOG.info("ssl-client.xml will be created in {}", sslDir);
} catch (Exception e) {
throw new MetadataException("Failed to find client configuration directory", e);
}
return new File(sslDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
}
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws MetadataException, IOException {
//trust settings
Configuration configuration = new Configuration(false);
File sslClientFile = getSSLClientFile();
if (!sslClientFile.exists()) {
configuration.set("ssl.client.truststore.type", "jks");
configuration.set("ssl.client.truststore.location", clientConfig.getString(TRUSTSTORE_FILE_KEY));
if (clientConfig.getBoolean(CLIENT_AUTH_KEY, false)) {
// need to get client key properties
configuration.set("ssl.client.keystore.location", clientConfig.getString(KEYSTORE_FILE_KEY));
configuration.set("ssl.client.keystore.type", "jks");
}
// add the configured credential provider
configuration.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
clientConfig.getString(CERT_STORES_CREDENTIAL_PROVIDER_PATH));
String hostnameVerifier = clientConfig.getString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY);
if (hostnameVerifier != null) {
configuration.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, hostnameVerifier);
}
configuration.writeXml(new FileWriter(sslClientFile));
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.security;
/**
*
*/
public interface SecurityProperties {
public static final String TLS_ENABLED = "metadata.enableTLS";
public static final String KEYSTORE_FILE_KEY = "keystore.file";
public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String KEYSTORE_PASSWORD_KEY = "keystore.password";
public static final String TRUSTSTORE_FILE_KEY = "truststore.file";
public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password";
public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
}
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata.web; package org.apache.hadoop.metadata.security;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
......
...@@ -823,7 +823,9 @@ ...@@ -823,7 +823,9 @@
<!--<skipTests>true</skipTests>--> <!--<skipTests>true</skipTests>-->
<forkMode>always</forkMode> <forkMode>always</forkMode>
<redirectTestOutputToFile>true</redirectTestOutputToFile> <redirectTestOutputToFile>true</redirectTestOutputToFile>
<argLine>-Djava.awt.headless=true</argLine> <argLine>-Djava.awt.headless=true -Dproject.version=${project.version}
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
-Xmx1024m -XX:MaxPermSize=512m</argLine>
</configuration> </configuration>
<dependencies> <dependencies>
<dependency> <dependency>
...@@ -943,6 +945,7 @@ ...@@ -943,6 +945,7 @@
<exclude>**/maven-eclipse.xml</exclude> <exclude>**/maven-eclipse.xml</exclude>
<exclude>**/.externalToolBuilders/**</exclude> <exclude>**/.externalToolBuilders/**</exclude>
<exclude>dashboard/**</exclude> <exclude>dashboard/**</exclude>
<exclude>**/build.log</exclude>
</excludes> </excludes>
</configuration> </configuration>
<executions> <executions>
......
...@@ -40,6 +40,11 @@ ...@@ -40,6 +40,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>
</dependency> </dependency>
......
...@@ -100,7 +100,6 @@ public class HiveLineageService implements LineageService { ...@@ -100,7 +100,6 @@ public class HiveLineageService implements LineageService {
public String getOutputs(String tableName) throws DiscoveryException { public String getOutputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs for tableName={}", tableName); LOG.info("Fetching lineage outputs for tableName={}", tableName);
try {
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery( HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME, HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
...@@ -108,9 +107,11 @@ public class HiveLineageService implements LineageService { ...@@ -108,9 +107,11 @@ public class HiveLineageService implements LineageService {
graphPersistenceStrategy, titanGraph); graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr(); Expressions.Expression expression = outputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
return discoveryService.evaluate(expression).toJson(); return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException } catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e); throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
} }
} }
...@@ -124,7 +125,6 @@ public class HiveLineageService implements LineageService { ...@@ -124,7 +125,6 @@ public class HiveLineageService implements LineageService {
public String getInputs(String tableName) throws DiscoveryException { public String getInputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs for tableName={}", tableName); LOG.info("Fetching lineage inputs for tableName={}", tableName);
try {
HiveLineageQuery inputsQuery = new HiveLineageQuery( HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME, HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
...@@ -132,9 +132,11 @@ public class HiveLineageService implements LineageService { ...@@ -132,9 +132,11 @@ public class HiveLineageService implements LineageService {
graphPersistenceStrategy, titanGraph); graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = inputsQuery.expr(); Expressions.Expression expression = inputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
return discoveryService.evaluate(expression).toJson(); return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException } catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e); throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
} }
} }
...@@ -148,9 +150,10 @@ public class HiveLineageService implements LineageService { ...@@ -148,9 +150,10 @@ public class HiveLineageService implements LineageService {
public String getSchema(String tableName) throws DiscoveryException { public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists // todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME String schemaQuery = HIVE_TABLE_TYPE_NAME
+ " where name=\"" + tableName + "\", " + " where name=\"" + tableName + "\""
+ HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME; + ", " + HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME
// + " as column select column.name, column.dataType, column.comment"; // + " as column select column.name, column.dataType, column.comment"
;
return discoveryService.searchByDSL(schemaQuery); return discoveryService.searchByDSL(schemaQuery);
} }
} }
...@@ -73,7 +73,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService { ...@@ -73,7 +73,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository); this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
} }
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.0/index-backends.html for indexed query //Refer http://s3.thinkaurelius.com/docs/titan/0.5.4/index-backends.html for indexed query
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query //http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax // .html#query-string-syntax for query syntax
@Override @Override
......
...@@ -128,7 +128,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -128,7 +128,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@Override @Override
public String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) { public String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) {
return EDGE_LABEL_PREFIX + dataType.getName() + "." + aInfo.name; return getEdgeLabel(dataType.getName(), aInfo.name);
}
public String getEdgeLabel(String typeName, String attrName) {
return EDGE_LABEL_PREFIX + typeName + "." + attrName;
}
public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws MetadataException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getEdgeLabel(dataType, aInfo);
} }
@Override @Override
...@@ -275,7 +284,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -275,7 +284,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
} }
final String entityTypeName = getTypeName(instanceVertex); final String entityTypeName = getTypeName(instanceVertex);
String relationshipLabel = entityTypeName + "." + traitNameToBeDeleted; String relationshipLabel = getEdgeLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges( Iterator<Edge> results = instanceVertex.getEdges(
Direction.OUT, relationshipLabel).iterator(); Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label if (results.hasNext()) { // there should only be one edge for this label
...@@ -673,6 +682,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -673,6 +682,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
Object attrValue = typedInstance.get(attributeInfo.name); Object attrValue = typedInstance.get(attributeInfo.name);
LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue); LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue);
final String propertyName = getQualifiedName(typedInstance, attributeInfo); final String propertyName = getQualifiedName(typedInstance, attributeInfo);
String edgeLabel = getEdgeLabel(typedInstance, attributeInfo);
if (attrValue == null) { if (attrValue == null) {
return; return;
} }
...@@ -698,11 +708,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -698,11 +708,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case STRUCT: case STRUCT:
Vertex structInstanceVertex = mapStructInstanceToVertex(id, Vertex structInstanceVertex = mapStructInstanceToVertex(id,
(ITypedStruct) typedInstance.get(attributeInfo.name), (ITypedStruct) typedInstance.get(attributeInfo.name), attributeInfo, idToVertexMap);
attributeInfo, idToVertexMap);
// add an edge to the newly created vertex from the parent // add an edge to the newly created vertex from the parent
GraphHelper.addEdge( GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, propertyName); titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
break; break;
case TRAIT: case TRAIT:
...@@ -712,7 +721,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -712,7 +721,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case CLASS: case CLASS:
Id referenceId = (Id) typedInstance.get(attributeInfo.name); Id referenceId = (Id) typedInstance.get(attributeInfo.name);
mapClassReferenceAsEdge( mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, propertyName, referenceId); instanceVertex, idToVertexMap, edgeLabel, referenceId);
break; break;
default: default:
...@@ -886,7 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -886,7 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
traitInstance.fieldMapping().fields, idToVertexMap); traitInstance.fieldMapping().fields, idToVertexMap);
// add an edge to the newly created vertex from the parent // add an edge to the newly created vertex from the parent
String relationshipLabel = typedInstanceTypeName + "." + traitName; String relationshipLabel = getEdgeLabel(typedInstanceTypeName, traitName);
GraphHelper.addEdge( GraphHelper.addEdge(
titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel); titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel);
} }
...@@ -1017,7 +1026,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1017,7 +1026,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break; break;
case CLASS: case CLASS:
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo); String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
Object idOrInstance = mapClassReferenceToVertex(instanceVertex, Object idOrInstance = mapClassReferenceToVertex(instanceVertex,
attributeInfo, relationshipLabel, attributeInfo.dataType()); attributeInfo, relationshipLabel, attributeInfo.dataType());
typedInstance.set(attributeInfo.name, idOrInstance); typedInstance.set(attributeInfo.name, idOrInstance);
...@@ -1221,7 +1230,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository { ...@@ -1221,7 +1230,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
ITypedStruct structInstance = structType.createInstance(); ITypedStruct structInstance = structType.createInstance();
typedInstance.set(attributeInfo.name, structInstance); typedInstance.set(attributeInfo.name, structInstance);
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo); String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel); LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) { for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
final Vertex structInstanceVertex = edge.getVertex(Direction.IN); final Vertex structInstanceVertex = edge.getVertex(Direction.IN);
......
...@@ -219,7 +219,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -219,7 +219,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
case CLASS: case CLASS:
// this is only A reference, index the attribute for edge // this is only A reference, index the attribute for edge
createEdgeMixedIndex(propertyName); // Commenting this out since we do not need an index for edge here
//createEdgeMixedIndex(propertyName);
break; break;
default: default:
...@@ -314,15 +315,23 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -314,15 +315,23 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
.dataType(propertyClass) .dataType(propertyClass)
.make(); .make();
if (propertyClass == Boolean.class) {
//Use standard index as backing index only supports string, int and geo types
management.buildIndex(propertyName, Vertex.class).addKey(propertyKey).buildCompositeIndex();
management.commit();
} else {
//Use backing index
TitanGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX); TitanGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
management.addIndexKey(vertexIndex, propertyKey); management.addIndexKey(vertexIndex, propertyKey);
management.commit(); management.commit();
}
LOG.info("Created mixed vertex index for property {}", propertyName); LOG.info("Created mixed vertex index for property {}", propertyName);
} }
return propertyKey; return propertyKey;
} }
/* Commenting this out since we do not need an index for edge label here
private void createEdgeMixedIndex(String propertyName) { private void createEdgeMixedIndex(String propertyName) {
TitanManagement management = titanGraph.getManagementSystem(); TitanManagement management = titanGraph.getManagementSystem();
EdgeLabel edgeLabel = management.getEdgeLabel(propertyName); EdgeLabel edgeLabel = management.getEdgeLabel(propertyName);
...@@ -332,5 +341,5 @@ public class GraphBackedSearchIndexer implements SearchIndexer { ...@@ -332,5 +341,5 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
management.commit(); management.commit();
LOG.info("Created index for edge label {}", propertyName); LOG.info("Created index for edge label {}", propertyName);
} }
} } */
} }
...@@ -191,7 +191,7 @@ public class DefaultMetadataService implements MetadataService { ...@@ -191,7 +191,7 @@ public class DefaultMetadataService implements MetadataService {
Preconditions.checkNotNull(guid, "guid cannot be null"); Preconditions.checkNotNull(guid, "guid cannot be null");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid); final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return Serialization$.MODULE$.toJson(instance); return InstanceSerialization.toJson(instance, true);
} }
/** /**
......
...@@ -104,7 +104,6 @@ class GremlinEvaluator(qry: GremlinQuery, persistenceStrategy: GraphPersistenceS ...@@ -104,7 +104,6 @@ class GremlinEvaluator(qry: GremlinQuery, persistenceStrategy: GraphPersistenceS
val v = rV.getColumn(src).get(idx) val v = rV.getColumn(src).get(idx)
sInstance.set(cName, persistenceStrategy.constructInstance(aE.dataType, v)) sInstance.set(cName, persistenceStrategy.constructInstance(aE.dataType, v))
} }
sInstance
addPathStruct(r, sInstance) addPathStruct(r, sInstance)
} }
GremlinQueryResult(qry.expr.toString, rType, rows.toList) GremlinQueryResult(qry.expr.toString, rType, rows.toList)
......
...@@ -29,6 +29,7 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; ...@@ -29,6 +29,7 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample; import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils; import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphHelper; import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider; import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
...@@ -224,6 +225,7 @@ public class GraphBackedDiscoveryServiceTest { ...@@ -224,6 +225,7 @@ public class GraphBackedDiscoveryServiceTest {
{"Table as _loop0 loop (LoadProcess outputTable) withPath"}, {"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"}, {"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"}, {"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"},
{"Table where name='sales_fact', db where name='Reporting'"}
}; };
} }
...@@ -268,39 +270,6 @@ public class GraphBackedDiscoveryServiceTest { ...@@ -268,39 +270,6 @@ public class GraphBackedDiscoveryServiceTest {
} }
@Test @Test
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "Column as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "Column");
String name = row.getString("name");
Assert.assertNotEquals(name, "null");
}
}
@Test
public void testSearchForTypeInheritance() throws Exception { public void testSearchForTypeInheritance() throws Exception {
createTypesWithMultiLevelInheritance(); createTypesWithMultiLevelInheritance();
createInstances(); createInstances();
......
...@@ -145,8 +145,7 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -145,8 +145,7 @@ public class GraphBackedMetadataRepositoryTest {
@Test (dependsOnMethods = "testSubmitEntity") @Test (dependsOnMethods = "testSubmitEntity")
public void testGetTraitLabel() throws Exception { public void testGetTraitLabel() throws Exception {
Assert.assertEquals(repositoryService.getTraitLabel( Assert.assertEquals(repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TABLE_TYPE),
typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION); CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
} }
...@@ -317,6 +316,39 @@ public class GraphBackedMetadataRepositoryTest { ...@@ -317,6 +316,39 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE); Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
} }
@Test(dependsOnMethods = "testCreateEntity")
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "hive_database as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "hive_database");
String name = row.getString("name");
Assert.assertEquals(name, DATABASE_NAME);
}
}
/** /**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in * Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in * GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
......
...@@ -99,7 +99,7 @@ mkdir -p $METADATA_LOG_DIR ...@@ -99,7 +99,7 @@ mkdir -p $METADATA_LOG_DIR
pushd ${BASEDIR} > /dev/null pushd ${BASEDIR} > /dev/null
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF}" JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF} -Dmetadata.log.file=application.log"
shift shift
while [[ ${1} =~ ^\-D ]]; do while [[ ${1} =~ ^\-D ]]; do
......
...@@ -20,8 +20,9 @@ import sys ...@@ -20,8 +20,9 @@ import sys
import metadata_config as mc import metadata_config as mc
METADATA_LOG_OPTS="-Dmetadata.log.dir=%s" METADATA_LOG_OPTS="-Dmetadata.log.dir=%s -Dmetadata.log.file=application.log"
METADATA_COMMAND_OPTS="-Dmetadata.home=%s" METADATA_COMMAND_OPTS="-Dmetadata.home=%s"
METADATA_CONFIG_OPTS="-Dmetadata.conf=%s"
DEFAULT_JVM_OPTS="-Xmx1024m" DEFAULT_JVM_OPTS="-Xmx1024m"
def main(): def main():
...@@ -37,6 +38,9 @@ def main(): ...@@ -37,6 +38,9 @@ def main():
cmd_opts = (METADATA_COMMAND_OPTS % metadata_home) cmd_opts = (METADATA_COMMAND_OPTS % metadata_home)
jvm_opts_list.extend(cmd_opts.split()) jvm_opts_list.extend(cmd_opts.split())
config_opts = (METADATA_CONFIG_OPTS % confdir)
jvm_opts_list.extend(config_opts.split())
default_jvm_opts = DEFAULT_JVM_OPTS default_jvm_opts = DEFAULT_JVM_OPTS
metadata_jvm_opts = os.environ.get(mc.METADATA_OPTS, default_jvm_opts) metadata_jvm_opts = os.environ.get(mc.METADATA_OPTS, default_jvm_opts)
jvm_opts_list.extend(metadata_jvm_opts.split()) jvm_opts_list.extend(metadata_jvm_opts.split())
......
...@@ -30,12 +30,13 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000 ...@@ -30,12 +30,13 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs ######### ######### Hive Lineage Configs #########
# This models follows the quick-start guide # This models follows the quick-start guide
metadata.lineage.hive.table.type.name=Table metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.column.type.name=Column
metadata.lineage.hive.table.column.name=columns metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=LoadProcess metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.inputs.name=inputTables metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables metadata.lineage.hive.process.outputs.name=outputTables
#Currently unused
#metadata.lineage.hive.column.type.name=Column
######### Security Properties ######### ######### Security Properties #########
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
</appender> </appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/application.log"/> <param name="File" value="${metadata.log.dir}/${metadata.log.file}"/>
<param name="Append" value="true"/> <param name="Append" value="true"/>
<param name="Threshold" value="debug"/> <param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout"> <layout class="org.apache.log4j.PatternLayout">
......
...@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase): ...@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase):
'org.apache.hadoop.metadata.Main', 'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.home=metadata_home', '-Xmx1024m']) ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
else: else:
java_mock.assert_called_with( java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main', 'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'], ['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*', 'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.home=metadata_home', '-Xmx1024m']) ['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
pass pass
......
...@@ -20,6 +20,8 @@ package org.apache.hadoop.metadata.typesystem; ...@@ -20,6 +20,8 @@ package org.apache.hadoop.metadata.typesystem;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import java.util.Map;
/** /**
* Represents a Struct or Trait or Object. * Represents a Struct or Trait or Object.
*/ */
...@@ -31,4 +33,6 @@ public interface IInstance { ...@@ -31,4 +33,6 @@ public interface IInstance {
void set(String attrName, Object val) throws MetadataException; void set(String attrName, Object val) throws MetadataException;
Map<String, Object> getValuesMap() throws MetadataException;
} }
...@@ -56,7 +56,7 @@ public class Struct implements IStruct { ...@@ -56,7 +56,7 @@ public class Struct implements IStruct {
values.put(attrName, value); values.put(attrName, value);
} }
@InterfaceAudience.Private @Override
public Map<String, Object> getValuesMap() { public Map<String, Object> getValuesMap() {
return values; return values;
} }
......
...@@ -22,6 +22,9 @@ import org.apache.hadoop.metadata.MetadataException; ...@@ -22,6 +22,9 @@ import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IStruct; import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.types.DownCastFieldMapping; import org.apache.hadoop.metadata.typesystem.types.DownCastFieldMapping;
import java.util.HashMap;
import java.util.Map;
public class DownCastStructInstance implements IStruct { public class DownCastStructInstance implements IStruct {
public final String typeName; public final String typeName;
...@@ -49,6 +52,20 @@ public class DownCastStructInstance implements IStruct { ...@@ -49,6 +52,20 @@ public class DownCastStructInstance implements IStruct {
public void set(String attrName, Object val) throws MetadataException { public void set(String attrName, Object val) throws MetadataException {
fieldMapping.set(this, attrName, val); fieldMapping.set(this, attrName, val);
} }
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
m.put(attr, get(attr));
}
return m;
}
} }
...@@ -27,6 +27,7 @@ import org.apache.hadoop.metadata.typesystem.types.FieldMapping; ...@@ -27,6 +27,7 @@ import org.apache.hadoop.metadata.typesystem.types.FieldMapping;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date; import java.util.Date;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
public class Id implements ITypedReferenceableInstance { public class Id implements ITypedReferenceableInstance {
...@@ -142,6 +143,11 @@ public class Id implements ITypedReferenceableInstance { ...@@ -142,6 +143,11 @@ public class Id implements ITypedReferenceableInstance {
return null; return null;
} }
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
}
public void setNull(String attrName) throws MetadataException { public void setNull(String attrName) throws MetadataException {
set(attrName, null); set(attrName, null);
} }
......
...@@ -36,6 +36,7 @@ import org.apache.hadoop.metadata.typesystem.types.ValueConversionException; ...@@ -36,6 +36,7 @@ import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class StructInstance implements ITypedStruct { public class StructInstance implements ITypedStruct {
...@@ -233,6 +234,20 @@ public class StructInstance implements ITypedStruct { ...@@ -233,6 +234,20 @@ public class StructInstance implements ITypedStruct {
nullFlags[nullPos] = true; nullFlags[nullPos] = true;
} }
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fields.keySet()) {
m.put(attr, get(attr));
}
return m;
}
public boolean getBoolean(String attrName) throws MetadataException { public boolean getBoolean(String attrName) throws MetadataException {
AttributeInfo i = fieldMapping.fields.get(attrName); AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) { if (i == null) {
......
...@@ -266,17 +266,17 @@ object InstanceSerialization { ...@@ -266,17 +266,17 @@ object InstanceSerialization {
def asScala(v : Any) : Any = v match { def asScala(v : Any) : Any = v match {
case i : Id => _Id(i._getId(), i.getVersion, i.getClassName) case i : Id => _Id(i._getId(), i.getVersion, i.getClassName)
case r : Referenceable => { case r : IReferenceableInstance => {
val traits = r.getTraits.map { tName => val traits = r.getTraits.map { tName =>
val t = r.getTrait(tName).asInstanceOf[Struct] val t = r.getTrait(tName).asInstanceOf[IStruct]
(tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]])) (tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
}.toMap }.toMap
_Reference(asScala(r.getId).asInstanceOf[_Id], _Reference(asScala(r.getId).asInstanceOf[_Id],
r.typeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]], r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
asScala(r.getTraits).asInstanceOf[List[String]], asScala(r.getTraits).asInstanceOf[List[String]],
traits.asInstanceOf[Map[String, _Struct]]) traits.asInstanceOf[Map[String, _Struct]])
} }
case s : Struct => _Struct(s.typeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]]) case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap
case _ => v case _ => v
...@@ -301,7 +301,7 @@ object InstanceSerialization { ...@@ -301,7 +301,7 @@ object InstanceSerialization {
writePretty(_s) writePretty(_s)
} }
def toJson(value: Struct, withBigDecimals : Boolean = false): String = { def toJson(value: IStruct, withBigDecimals : Boolean = false): String = {
_toJson(value, withBigDecimals) _toJson(value, withBigDecimals)
} }
......
...@@ -19,8 +19,7 @@ ...@@ -19,8 +19,7 @@
package org.apache.hadoop.metadata.typesystem.json package org.apache.hadoop.metadata.typesystem.json
import com.google.common.collect.ImmutableList import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata._ import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance, Id}
import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance}
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.hadoop.metadata.typesystem.types._
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil
import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct} import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct}
...@@ -127,10 +126,7 @@ class SerializationTest extends BaseTest { ...@@ -127,10 +126,7 @@ class SerializationTest extends BaseTest {
Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}")
} }
@Test def testClass { def defineHRTypes(ts: TypeSystem) : Unit = {
val ts: TypeSystem = getTypeSystem
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Department", "Department",
ImmutableList.of[String], ImmutableList.of[String],
...@@ -155,18 +151,29 @@ class SerializationTest extends BaseTest { ...@@ -155,18 +151,29 @@ class SerializationTest extends BaseTest {
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef) ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
) )
}
def defineHRDept() : Referenceable = {
val hrDept: Referenceable = new Referenceable("Department") val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person") val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance") val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
hrDept.set("name", "hr") hrDept.set("name", "hr")
john.set("name", "John") john.set("name", "John")
john.set("department", hrDept) john.set("department", hrDept.getId)
jane.set("name", "Jane") jane.set("name", "Jane")
jane.set("department", hrDept) jane.set("department", hrDept.getId)
john.set("manager", jane) john.set("manager", jane.getId)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane)) hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john)) jane.set("subordinates", ImmutableList.of[Id](john.getId))
jane.getTrait("SecurityClearance").set("level", 1) jane.getTrait("SecurityClearance").set("level", 1)
hrDept
}
@Test def testClass {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED) val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
...@@ -185,44 +192,8 @@ class SerializationTest extends BaseTest { ...@@ -185,44 +192,8 @@ class SerializationTest extends BaseTest {
@Test def testReference { @Test def testReference {
val ts: TypeSystem = getTypeSystem val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( val hrDept: Referenceable = defineHRDept()
"Department",
ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"))
val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Person", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")
)
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Manager", ImmutableList.of[String]("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"))
val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] =
TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE))
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef),
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
)
val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
hrDept.set("name", "hr")
john.set("name", "John")
john.set("department", hrDept.getId)
jane.set("name", "Jane")
jane.set("department", hrDept.getId)
john.set("manager", jane.getId)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john))
jane.getTrait("SecurityClearance").set("level", 1)
val jsonStr = InstanceSerialization.toJson(hrDept) val jsonStr = InstanceSerialization.toJson(hrDept)
...@@ -242,4 +213,30 @@ class SerializationTest extends BaseTest { ...@@ -242,4 +213,30 @@ class SerializationTest extends BaseTest {
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
} }
@Test def testReference2 {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
val jsonStr = InstanceSerialization.toJson(hrDept2)
val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr)
val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept4}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept4)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
} }
\ No newline at end of file
...@@ -48,9 +48,17 @@ ...@@ -48,9 +48,17 @@
<artifactId>metadata-repository</artifactId> <artifactId>metadata-repository</artifactId>
</dependency> </dependency>
<!--<dependency>-->
<!--<groupId>org.apache.hadoop.metadata</groupId>-->
<!--<artifactId>metadata-client</artifactId>-->
<!--</dependency>-->
<dependency> <dependency>
<groupId>org.apache.hadoop.metadata</groupId> <groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId> <artifactId>metadata-client</artifactId>
<version>${version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
...@@ -214,6 +222,7 @@ ...@@ -214,6 +222,7 @@
<artifactId>maven-war-plugin</artifactId> <artifactId>maven-war-plugin</artifactId>
<version>2.4</version> <version>2.4</version>
<configuration> <configuration>
<attachClasses>true</attachClasses>
<webResources> <webResources>
<resource> <resource>
<directory>../dashboard/v3</directory> <directory>../dashboard/v3</directory>
......
...@@ -26,13 +26,15 @@ import org.apache.hadoop.security.alias.JavaKeyStoreProvider; ...@@ -26,13 +26,15 @@ import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import java.io.*; import java.io.*;
import java.util.Arrays; import java.util.Arrays;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/** /**
* A utility class for generating a credential provider containing the entries required for supporting the SSL implementation * A utility class for generating a credential provider containing the entries required for supporting the SSL implementation
* of the DGC server. * of the DGC server.
*/ */
public class CredentialProviderUtility { public class CredentialProviderUtility {
private static final String[] KEYS = new String[] {SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY, private static final String[] KEYS = new String[] {KEYSTORE_PASSWORD_KEY,
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY}; TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY};
public static abstract class TextDevice { public static abstract class TextDevice {
public abstract void printf(String fmt, Object... params); public abstract void printf(String fmt, Object... params);
......
...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters; ...@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton; import com.google.inject.Singleton;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
...@@ -49,8 +50,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter { ...@@ -49,8 +50,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException { protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
PropertiesConfiguration configuration; PropertiesConfiguration configuration;
try { try {
configuration = new PropertiesConfiguration("application.properties"); configuration = PropertiesUtil.getApplicationProperties();
} catch (ConfigurationException e) { } catch (Exception e) {
throw new ServletException(e); throw new ServletException(e);
} }
...@@ -95,6 +96,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter { ...@@ -95,6 +96,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
config.put(KerberosAuthenticationHandler.PRINCIPAL, principal); config.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
} }
LOG.info("AuthenticationFilterConfig: {}", config);
return config; return config;
} }
......
...@@ -27,6 +27,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; ...@@ -27,6 +27,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.RepositoryMetadataModule; import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore; import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
...@@ -80,11 +81,14 @@ public class GuiceServletConfig extends GuiceServletContextListener { ...@@ -80,11 +81,14 @@ public class GuiceServletConfig extends GuiceServletContextListener {
} }
private void configureAuthenticationFilter() throws ConfigurationException { private void configureAuthenticationFilter() throws ConfigurationException {
PropertiesConfiguration configuration = try {
new PropertiesConfiguration("application.properties"); PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties();
if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) { if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) {
filter("/*").through(MetadataAuthenticationFilter.class); filter("/*").through(MetadataAuthenticationFilter.class);
} }
} catch (MetadataException e) {
LOG.warn("Error loading configuration and initializing authentication filter", e);
}
} }
}); });
......
...@@ -19,6 +19,8 @@ package org.apache.hadoop.metadata.web.listeners; ...@@ -19,6 +19,8 @@ package org.apache.hadoop.metadata.web.listeners;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
...@@ -120,7 +122,11 @@ public class LoginProcessor { ...@@ -120,7 +122,11 @@ public class LoginProcessor {
* @throws ConfigurationException * @throws ConfigurationException
*/ */
protected PropertiesConfiguration getPropertiesConfiguration() throws ConfigurationException { protected PropertiesConfiguration getPropertiesConfiguration() throws ConfigurationException {
return new PropertiesConfiguration("application.properties"); try {
return PropertiesUtil.getApplicationProperties();
} catch (MetadataException e) {
throw new ConfigurationException(e);
}
} }
/** /**
......
...@@ -28,23 +28,16 @@ import org.mortbay.jetty.security.SslSocketConnector; ...@@ -28,23 +28,16 @@ import org.mortbay.jetty.security.SslSocketConnector;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/** /**
* This is a jetty server which requires client auth via certificates. * This is a jetty server which requires client auth via certificates.
*/ */
public class SecureEmbeddedServer extends EmbeddedServer { public class SecureEmbeddedServer extends EmbeddedServer {
public static final String KEYSTORE_FILE_KEY = "keystore.file";
public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String KEYSTORE_PASSWORD_KEY = "keystore.password";
public static final String TRUSTSTORE_FILE_KEY = "truststore.file";
public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password";
public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
private static final Logger LOG = LoggerFactory.getLogger(SecureEmbeddedServer.class); private static final Logger LOG = LoggerFactory.getLogger(SecureEmbeddedServer.class);
public SecureEmbeddedServer(int port, String path) throws IOException { public SecureEmbeddedServer(int port, String path) throws IOException {
......
...@@ -31,6 +31,8 @@ import java.io.IOException; ...@@ -31,6 +31,8 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.*; import java.util.*;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/** /**
* *
*/ */
...@@ -75,11 +77,11 @@ public class CredentialProviderUtilityIT { ...@@ -75,11 +77,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0); CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry = CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY); provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY); entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY); entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
} }
...@@ -138,11 +140,11 @@ public class CredentialProviderUtilityIT { ...@@ -138,11 +140,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0); CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry = CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY); provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY); entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY); entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
} }
...@@ -192,11 +194,11 @@ public class CredentialProviderUtilityIT { ...@@ -192,11 +194,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0); CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry = CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY); provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY); entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY); entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry); assertCredentialEntryCorrect(entry);
} }
...@@ -260,11 +262,11 @@ public class CredentialProviderUtilityIT { ...@@ -260,11 +262,11 @@ public class CredentialProviderUtilityIT {
char[] newpass = "newpass".toCharArray(); char[] newpass = "newpass".toCharArray();
CredentialProvider.CredentialEntry entry = CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY); provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass); assertCredentialEntryCorrect(entry, newpass);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY); entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass); assertCredentialEntryCorrect(entry, newpass);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY); entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass); assertCredentialEntryCorrect(entry, newpass);
} }
} }
...@@ -19,7 +19,7 @@ package org.apache.hadoop.metadata.web.filters; ...@@ -19,7 +19,7 @@ package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.metadata.web.BaseSecurityTest; import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer; import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server; import org.mortbay.jetty.Server;
import org.testng.Assert; import org.testng.Assert;
...@@ -59,6 +59,9 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest { ...@@ -59,6 +59,9 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
@Test @Test
public void testKerberosBasedLogin() throws Exception { public void testKerberosBasedLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
setupKDCAndPrincipals(); setupKDCAndPrincipals();
TestEmbeddedServer server = null; TestEmbeddedServer server = null;
...@@ -102,6 +105,12 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest { ...@@ -102,6 +105,12 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
server.getServer().stop(); server.getServer().stop();
kdc.stop(); kdc.stop();
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
} else {
System.clearProperty("metadata.conf");
}
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
package org.apache.hadoop.metadata.web.filters; package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.hadoop.metadata.web.BaseSecurityTest; import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer; import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server; import org.mortbay.jetty.Server;
import org.testng.Assert; import org.testng.Assert;
...@@ -45,6 +45,8 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest { ...@@ -45,6 +45,8 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
@Test @Test
public void testSimpleLogin() throws Exception { public void testSimpleLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
generateSimpleLoginConfiguration(); generateSimpleLoginConfiguration();
TestEmbeddedServer server = new TestEmbeddedServer(23001, "webapp/target/metadata-governance"); TestEmbeddedServer server = new TestEmbeddedServer(23001, "webapp/target/metadata-governance");
...@@ -71,6 +73,11 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest { ...@@ -71,6 +73,11 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
Assert.assertEquals(connection.getResponseCode(), 200); Assert.assertEquals(connection.getResponseCode(), 200);
} finally { } finally {
server.getServer().stop(); server.getServer().stop();
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
} else {
System.clearProperty("metadata.conf");
}
} }
......
...@@ -18,12 +18,10 @@ package org.apache.hadoop.metadata.web.listeners; ...@@ -18,12 +18,10 @@ package org.apache.hadoop.metadata.web.listeners;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.metadata.web.BaseSecurityTest; import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.Test; import org.testng.annotations.Test;
...@@ -99,15 +97,6 @@ public class LoginProcessorIT extends BaseSecurityTest { ...@@ -99,15 +97,6 @@ public class LoginProcessorIT extends BaseSecurityTest {
Assert.assertNotNull(kdc.getRealm()); Assert.assertNotNull(kdc.getRealm());
File keytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab"); File keytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
String dgiServerPrincipal = Shell.WINDOWS ? "dgi/127.0.0.1" : "dgi/localhost";
StringBuilder jaas = new StringBuilder(1024);
jaas.append(createJAASEntry("Client", "dgi", keytabFile));
jaas.append(createJAASEntry("Server", dgiServerPrincipal, keytabFile));
File jaasFile = new File(kdcWorkDir, "jaas.txt");
FileUtils.write(jaasFile, jaas.toString());
bindJVMtoJAASFile(jaasFile);
return keytabFile; return keytabFile;
} }
......
...@@ -28,6 +28,8 @@ import org.apache.hadoop.metadata.typesystem.TypesDef; ...@@ -28,6 +28,8 @@ import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -59,9 +61,12 @@ public abstract class BaseResourceIT { ...@@ -59,9 +61,12 @@ public abstract class BaseResourceIT {
} }
protected void createType(TypesDef typesDef) throws Exception { protected void createType(TypesDef typesDef) throws Exception {
HierarchicalTypeDefinition<ClassType> sampleType = typesDef.classTypesAsJavaList().get(0);
if (serviceClient.getType(sampleType.typeName) == null ) {
String typesAsJSON = TypesSerialization.toJson(typesDef); String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON); createType(typesAsJSON);
} }
}
protected void createType(String typesAsJSON) throws Exception { protected void createType(String typesAsJSON) throws Exception {
WebResource resource = service WebResource resource = service
......
...@@ -150,6 +150,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -150,6 +150,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String definition = response.getString(MetadataServiceClient.RESULTS); final String definition = response.getString(MetadataServiceClient.RESULTS);
Assert.assertNotNull(definition); Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition); LOG.debug("tableInstanceAfterGet = " + definition);
InstanceSerialization.fromJsonReferenceable(definition, true);
} }
private ClientResponse addProperty(String guid, String property, String value) { private ClientResponse addProperty(String guid, String property, String value) {
...@@ -419,7 +420,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -419,7 +420,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
new AttributeDefinition("serde2", new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null), "serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database", new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null)); DATABASE_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("compressed",
DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, true, null));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("classification", TypesUtil.createTraitTypeDef("classification",
...@@ -460,6 +463,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -460,6 +463,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 2); tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance); tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification"); Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl"); traitInstance.set("tag", "foundation_etl");
......
...@@ -24,12 +24,14 @@ import org.testng.annotations.Test; ...@@ -24,12 +24,14 @@ import org.testng.annotations.Test;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{ public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{
@Test @Test
public void testServerConfiguredUsingCredentialProvider() throws Exception { public void testServerConfiguredUsingCredentialProvider() throws Exception {
// setup the configuration // setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider // setup the credential provider
setupCredentials(); setupCredentials();
......
...@@ -45,6 +45,8 @@ import java.net.URL; ...@@ -45,6 +45,8 @@ import java.net.URL;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.List; import java.util.List;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/** /**
* *
*/ */
...@@ -69,7 +71,7 @@ public class SecureEmbeddedServerITBase { ...@@ -69,7 +71,7 @@ public class SecureEmbeddedServerITBase {
return false; return false;
} }
}); });
System.setProperty("javax.net.ssl.trustStore", SecureEmbeddedServer.DEFAULT_KEYSTORE_FILE_LOCATION); System.setProperty("javax.net.ssl.trustStore", DEFAULT_KEYSTORE_FILE_LOCATION);
System.setProperty("javax.net.ssl.trustStorePassword", "keypass"); System.setProperty("javax.net.ssl.trustStorePassword", "keypass");
System.setProperty("javax.net.ssl.trustStoreType", "JKS"); System.setProperty("javax.net.ssl.trustStoreType", "JKS");
} }
...@@ -122,7 +124,7 @@ public class SecureEmbeddedServerITBase { ...@@ -122,7 +124,7 @@ public class SecureEmbeddedServerITBase {
public void testMissingEntriesInCredentialProvider() throws Exception { public void testMissingEntriesInCredentialProvider() throws Exception {
// setup the configuration // setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
try { try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") { secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
...@@ -147,7 +149,7 @@ public class SecureEmbeddedServerITBase { ...@@ -147,7 +149,7 @@ public class SecureEmbeddedServerITBase {
@Test @Test
public void runOtherSuitesAgainstSecureServer() throws Exception { public void runOtherSuitesAgainstSecureServer() throws Exception {
final PropertiesConfiguration configuration = new PropertiesConfiguration(); final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl); configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider // setup the credential provider
setupCredentials(); setupCredentials();
...@@ -198,15 +200,15 @@ public class SecureEmbeddedServerITBase { ...@@ -198,15 +200,15 @@ public class SecureEmbeddedServerITBase {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry( provider.createCredentialEntry(
SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY, storepass); KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry( provider.createCredentialEntry(
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, trustpass); TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'}; char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry( provider.createCredentialEntry(
SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY, certpass); SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks // write out so that it can be found in checks
provider.flush(); provider.flush();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment