Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
atlas
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
dataplatform
atlas
Commits
c87130eb
Commit
c87130eb
authored
Apr 01, 2015
by
Jon Maron
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
kerberos service login and http authentication filter
parent
4e8718a6
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
824 additions
and
79 deletions
+824
-79
pom.xml
addons/hive-bridge/pom.xml
+7
-0
pom.xml
webapp/pom.xml
+41
-7
AuthenticationFilter.java
...che/hadoop/metadata/web/filters/AuthenticationFilter.java
+0
-54
MetadataAuthenticationFilter.java
...op/metadata/web/filters/MetadataAuthenticationFilter.java
+101
-0
GuiceServletConfig.java
...che/hadoop/metadata/web/listeners/GuiceServletConfig.java
+44
-18
LoginProcessor.java
.../apache/hadoop/metadata/web/listeners/LoginProcessor.java
+139
-0
BaseSecurityTest.java
...java/org/apache/hadoop/metadata/web/BaseSecurityTest.java
+127
-0
MetadataAuthenticationKerberosFilterIT.java
...a/web/filters/MetadataAuthenticationKerberosFilterIT.java
+165
-0
MetadataAuthenticationSimpleFilterIT.java
...ata/web/filters/MetadataAuthenticationSimpleFilterIT.java
+86
-0
LoginProcessorIT.java
...pache/hadoop/metadata/web/listeners/LoginProcessorIT.java
+114
-0
No files found.
addons/hive-bridge/pom.xml
View file @
c87130eb
...
...
@@ -92,6 +92,13 @@
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-client
</artifactId>
<version>
${hadoop.version}
</version>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.testng
</groupId>
<artifactId>
testng
</artifactId>
</dependency>
...
...
webapp/pom.xml
View file @
c87130eb
...
...
@@ -60,6 +60,27 @@
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-minikdc
</artifactId>
<version>
${hadoop.version}
</version>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
<version>
${hadoop.version}
</version>
<scope>
test
</scope>
</dependency>
<!-- supports simple auth handler -->
<dependency>
<groupId>
org.apache.httpcomponents
</groupId>
<artifactId>
httpclient
</artifactId>
<version>
4.2.5
</version>
</dependency>
<dependency>
<groupId>
joda-time
</groupId>
<artifactId>
joda-time
</artifactId>
</dependency>
...
...
@@ -156,6 +177,13 @@
<groupId>
org.mortbay.jetty
</groupId>
<artifactId>
jsp-2.1
</artifactId>
</dependency>
<dependency>
<groupId>
commons-io
</groupId>
<artifactId>
commons-io
</artifactId>
<version>
2.4
</version>
</dependency>
</dependencies>
<build>
...
...
@@ -284,13 +312,13 @@
<!--only skip int tests -->
<connectors>
<!--
<connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port>
<maxIdleTime>60000</maxIdleTime>
<keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore>
<keyPassword>metadata-passwd</keyPassword>
<password>metadata-passwd</password>
</connector>
<connector implementation="org.mortbay.jetty.security.SslSocketConnector">
<port>21443</port>
<maxIdleTime>60000</maxIdleTime>
<keystore>${project.build.directory}/../../webapp/target/metadata.keystore</keystore>
<keyPassword>metadata-passwd</keyPassword>
<password>metadata-passwd</password>
</connector>
-->
<connector
implementation=
"org.mortbay.jetty.nio.SelectChannelConnector"
>
<port>
21000
</port>
...
...
@@ -339,6 +367,12 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.felix
</groupId>
<artifactId>
maven-bundle-plugin
</artifactId>
<inherited>
true
</inherited>
<extensions>
true
</extensions>
</plugin>
</plugins>
</build>
</project>
webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuthenticationFilter.java
deleted
100755 → 0
View file @
4e8718a6
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
filters
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.servlet.Filter
;
import
javax.servlet.FilterChain
;
import
javax.servlet.FilterConfig
;
import
javax.servlet.ServletException
;
import
javax.servlet.ServletRequest
;
import
javax.servlet.ServletResponse
;
import
java.io.IOException
;
/**
* This enforces authentication as part of the filter before processing the request.
* todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}.
*/
public
class
AuthenticationFilter
implements
Filter
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
AuthenticationFilter
.
class
);
@Override
public
void
init
(
FilterConfig
filterConfig
)
throws
ServletException
{
LOG
.
info
(
"AuthenticationFilter initialization started"
);
}
@Override
public
void
doFilter
(
ServletRequest
request
,
ServletResponse
response
,
FilterChain
chain
)
throws
IOException
,
ServletException
{
chain
.
doFilter
(
request
,
response
);
}
@Override
public
void
destroy
()
{
// do nothing
}
}
webapp/src/main/java/org/apache/hadoop/metadata/web/filters/MetadataAuthenticationFilter.java
0 → 100644
View file @
c87130eb
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
filters
;
import
com.google.inject.Singleton
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.configuration.PropertiesConfiguration
;
import
org.apache.hadoop.security.SecurityUtil
;
import
org.apache.hadoop.security.authentication.server.AuthenticationFilter
;
import
org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.servlet.*
;
import
java.io.IOException
;
import
java.net.InetAddress
;
import
java.net.UnknownHostException
;
import
java.util.Enumeration
;
import
java.util.Iterator
;
import
java.util.Properties
;
/**
* This enforces authentication as part of the filter before processing the request.
* todo: Subclass of {@link org.apache.hadoop.security.authentication.server.AuthenticationFilter}.
*/
@Singleton
public
class
MetadataAuthenticationFilter
extends
AuthenticationFilter
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
MetadataAuthenticationFilter
.
class
);
static
final
String
PREFIX
=
"metadata.http.authentication."
;
static
final
String
BIND_ADDRESS
=
"bind.address"
;
@Override
protected
Properties
getConfiguration
(
String
configPrefix
,
FilterConfig
filterConfig
)
throws
ServletException
{
PropertiesConfiguration
configuration
;
try
{
configuration
=
new
PropertiesConfiguration
(
"application.properties"
);
}
catch
(
ConfigurationException
e
)
{
throw
new
ServletException
(
e
);
}
Properties
config
=
new
Properties
();
config
.
put
(
AuthenticationFilter
.
COOKIE_PATH
,
"/"
);
// add any config passed in as init parameters
Enumeration
<
String
>
enumeration
=
filterConfig
.
getInitParameterNames
();
while
(
enumeration
.
hasMoreElements
())
{
String
name
=
enumeration
.
nextElement
();
config
.
put
(
name
,
filterConfig
.
getInitParameter
(
name
));
}
// transfer application.properties config items starting with defined prefix
Iterator
<
String
>
itor
=
configuration
.
getKeys
();
while
(
itor
.
hasNext
())
{
String
name
=
itor
.
next
();
if
(
name
.
startsWith
(
PREFIX
))
{
String
value
=
configuration
.
getString
(
name
);
name
=
name
.
substring
(
PREFIX
.
length
());
config
.
put
(
name
,
value
);
}
}
//Resolve _HOST into bind address
String
bindAddress
=
config
.
getProperty
(
BIND_ADDRESS
);
if
(
bindAddress
==
null
)
{
LOG
.
info
(
"No host name configured. Defaulting to local host name."
);
try
{
bindAddress
=
InetAddress
.
getLocalHost
().
getHostName
();
}
catch
(
UnknownHostException
e
)
{
throw
new
ServletException
(
"Unable to obtain host name"
,
e
);
}
}
String
principal
=
config
.
getProperty
(
KerberosAuthenticationHandler
.
PRINCIPAL
);
if
(
principal
!=
null
)
{
try
{
principal
=
SecurityUtil
.
getServerPrincipal
(
principal
,
bindAddress
);
}
catch
(
IOException
ex
)
{
throw
new
RuntimeException
(
"Could not resolve Kerberos principal name: "
+
ex
.
toString
(),
ex
);
}
config
.
put
(
KerberosAuthenticationHandler
.
PRINCIPAL
,
principal
);
}
return
config
;
}
}
webapp/src/main/java/org/apache/hadoop/metadata/web/listeners/GuiceServletConfig.java
View file @
c87130eb
...
...
@@ -24,16 +24,21 @@ import com.google.inject.servlet.GuiceServletContextListener;
import
com.sun.jersey.api.core.PackagesResourceConfig
;
import
com.sun.jersey.guice.JerseyServletModule
;
import
com.sun.jersey.guice.spi.container.servlet.GuiceContainer
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.configuration.PropertiesConfiguration
;
import
org.apache.hadoop.metadata.MetadataException
;
import
org.apache.hadoop.metadata.RepositoryMetadataModule
;
import
org.apache.hadoop.metadata.repository.typestore.ITypeStore
;
import
org.apache.hadoop.metadata.typesystem.TypesDef
;
import
org.apache.hadoop.metadata.typesystem.types.TypeSystem
;
import
org.apache.hadoop.metadata.web.filters.AuditFilter
;
import
org.apache.hadoop.metadata.web.filters.MetadataAuthenticationFilter
;
import
org.apache.hadoop.security.authentication.server.AuthenticationFilter
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.servlet.ServletContextEvent
;
import
javax.servlet.ServletException
;
import
java.util.HashMap
;
import
java.util.Map
;
...
...
@@ -42,6 +47,8 @@ public class GuiceServletConfig extends GuiceServletContextListener {
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
GuiceServletConfig
.
class
);
private
static
final
String
GUICE_CTX_PARAM
=
"guice.packages"
;
static
final
String
HTTP_AUTHENTICATION_ENABLED
=
"metadata.http.authentication.enabled"
;
private
Injector
injector
;
@Override
protected
Injector
getInjector
()
{
...
...
@@ -52,24 +59,39 @@ public class GuiceServletConfig extends GuiceServletContextListener {
* .11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary
* .html
*/
Injector
injector
=
Guice
.
createInjector
(
new
RepositoryMetadataModule
(),
new
JerseyServletModule
()
{
@Override
protected
void
configureServlets
()
{
filter
(
"/*"
).
through
(
AuditFilter
.
class
);
String
packages
=
getServletContext
().
getInitParameter
(
GUICE_CTX_PARAM
);
LOG
.
info
(
"Jersey loading from packages: "
+
packages
);
Map
<
String
,
String
>
params
=
new
HashMap
<>();
params
.
put
(
PackagesResourceConfig
.
PROPERTY_PACKAGES
,
packages
);
serve
(
"/api/metadata/*"
).
with
(
GuiceContainer
.
class
,
params
);
}
});
LOG
.
info
(
"Guice modules loaded"
);
if
(
injector
==
null
)
{
injector
=
Guice
.
createInjector
(
new
RepositoryMetadataModule
(),
new
JerseyServletModule
()
{
@Override
protected
void
configureServlets
()
{
filter
(
"/*"
).
through
(
AuditFilter
.
class
);
try
{
configureAuthenticationFilter
();
}
catch
(
ConfigurationException
e
)
{
LOG
.
warn
(
"Unable to add and configure authentication filter"
,
e
);
}
String
packages
=
getServletContext
().
getInitParameter
(
GUICE_CTX_PARAM
);
LOG
.
info
(
"Jersey loading from packages: "
+
packages
);
Map
<
String
,
String
>
params
=
new
HashMap
<>();
params
.
put
(
PackagesResourceConfig
.
PROPERTY_PACKAGES
,
packages
);
serve
(
"/api/metadata/*"
).
with
(
GuiceContainer
.
class
,
params
);
}
private
void
configureAuthenticationFilter
()
throws
ConfigurationException
{
PropertiesConfiguration
configuration
=
new
PropertiesConfiguration
(
"application.properties"
);
if
(
Boolean
.
valueOf
(
configuration
.
getString
(
HTTP_AUTHENTICATION_ENABLED
)))
{
filter
(
"/*"
).
through
(
MetadataAuthenticationFilter
.
class
);
}
}
});
LOG
.
info
(
"Guice modules loaded"
);
}
return
injector
;
}
...
...
@@ -78,6 +100,10 @@ public class GuiceServletConfig extends GuiceServletContextListener {
public
void
contextInitialized
(
ServletContextEvent
servletContextEvent
)
{
super
.
contextInitialized
(
servletContextEvent
);
// perform login operations
LoginProcessor
loginProcessor
=
new
LoginProcessor
();
loginProcessor
.
login
();
restoreTypeSystem
();
}
...
...
webapp/src/main/java/org/apache/hadoop/metadata/web/listeners/LoginProcessor.java
0 → 100644
View file @
c87130eb
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
listeners
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.configuration.PropertiesConfiguration
;
import
org.apache.hadoop.conf.Configuration
;
import
org.apache.hadoop.security.SecurityUtil
;
import
org.apache.hadoop.security.UserGroupInformation
;
import
org.apache.hadoop.util.Shell
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.io.IOException
;
import
java.net.InetAddress
;
/**
* A class capable of performing a simple or kerberos login.
*/
public
class
LoginProcessor
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
LoginProcessor
.
class
);
public
static
final
String
METADATA_AUTHENTICATION_PREFIX
=
"metadata.authentication."
;
public
static
final
String
AUTHENTICATION_METHOD
=
METADATA_AUTHENTICATION_PREFIX
+
"method"
;
public
static
final
String
AUTHENTICATION_PRINCIPAL
=
METADATA_AUTHENTICATION_PREFIX
+
"principal"
;
public
static
final
String
AUTHENTICATION_KEYTAB
=
METADATA_AUTHENTICATION_PREFIX
+
"keytab"
;
/**
* Perform a SIMPLE login based on established OS identity or a kerberos based login using the configured
* principal and keytab (via application.properties).
*/
public
void
login
()
{
// first, let's see if we're running in a hadoop cluster and have the env configured
boolean
isHadoopCluster
=
isHadoopCluster
();
Configuration
hadoopConfig
=
isHadoopCluster
?
getHadoopConfiguration
()
:
new
Configuration
(
false
);
PropertiesConfiguration
configuration
=
null
;
try
{
configuration
=
getPropertiesConfiguration
();
}
catch
(
ConfigurationException
e
)
{
LOG
.
warn
(
"Error reading application configuration"
,
e
);
}
if
(!
isHadoopCluster
)
{
// need to read the configured authentication choice and create the UGI configuration
setupHadoopConfiguration
(
hadoopConfig
,
configuration
);
}
doServiceLogin
(
hadoopConfig
,
configuration
);
}
protected
void
doServiceLogin
(
Configuration
hadoopConfig
,
PropertiesConfiguration
configuration
)
{
UserGroupInformation
.
setConfiguration
(
hadoopConfig
);
UserGroupInformation
ugi
=
null
;
UserGroupInformation
.
AuthenticationMethod
authenticationMethod
=
SecurityUtil
.
getAuthenticationMethod
(
hadoopConfig
);
try
{
if
(
authenticationMethod
==
UserGroupInformation
.
AuthenticationMethod
.
SIMPLE
)
{
UserGroupInformation
.
loginUserFromSubject
(
null
);
}
else
if
(
authenticationMethod
==
UserGroupInformation
.
AuthenticationMethod
.
KERBEROS
)
{
UserGroupInformation
.
loginUserFromKeytab
(
getServerPrincipal
(
configuration
.
getString
(
AUTHENTICATION_PRINCIPAL
)),
configuration
.
getString
(
AUTHENTICATION_KEYTAB
));
}
LOG
.
info
(
"Logged in user {}"
,
UserGroupInformation
.
getLoginUser
());
}
catch
(
IOException
e
)
{
throw
new
IllegalStateException
(
String
.
format
(
"Unable to perform %s login."
,
authenticationMethod
),
e
);
}
}
protected
void
setupHadoopConfiguration
(
Configuration
hadoopConfig
,
PropertiesConfiguration
configuration
)
{
String
authMethod
;
authMethod
=
configuration
!=
null
?
configuration
.
getString
(
AUTHENTICATION_METHOD
)
:
null
;
// getString may return null, and would like to log the nature of the default setting
if
(
authMethod
==
null
)
{
LOG
.
info
(
"No authentication method configured. Defaulting to simple authentication"
);
authMethod
=
"simple"
;
}
SecurityUtil
.
setAuthenticationMethod
(
UserGroupInformation
.
AuthenticationMethod
.
valueOf
(
authMethod
.
toUpperCase
()),
hadoopConfig
);
}
/**
* Return a server (service) principal. The token "_HOST" in the principal will be replaced with the local host
* name (e.g. dgi/_HOST will be changed to dgi/localHostName)
* @param principal the input principal containing an option "_HOST" token
* @return the service principal.
* @throws IOException
*/
private
String
getServerPrincipal
(
String
principal
)
throws
IOException
{
return
SecurityUtil
.
getServerPrincipal
(
principal
,
InetAddress
.
getLocalHost
().
getHostName
());
}
/**
* Returns a Hadoop configuration instance.
* @return the configuration.
*/
protected
Configuration
getHadoopConfiguration
()
{
return
new
Configuration
();
}
/**
* Returns the metadata application configuration.
* @return the metadata configuration.
* @throws ConfigurationException
*/
protected
PropertiesConfiguration
getPropertiesConfiguration
()
throws
ConfigurationException
{
return
new
PropertiesConfiguration
(
"application.properties"
);
}
/**
* Uses a hadoop shell to discern whether a hadoop cluster is available/configured.
* @return true if a hadoop cluster is detected.
*/
protected
boolean
isHadoopCluster
()
{
boolean
isHadoopCluster
=
false
;
try
{
isHadoopCluster
=
Shell
.
getHadoopHome
()
!=
null
;
}
catch
(
IOException
e
)
{
// ignore - false is default setting
}
return
isHadoopCluster
;
}
}
webapp/src/test/java/org/apache/hadoop/metadata/web/BaseSecurityTest.java
0 → 100644
View file @
c87130eb
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.configuration.PropertiesConfiguration
;
import
org.apache.hadoop.minikdc.MiniKdc
;
import
org.apache.zookeeper.Environment
;
import
org.mortbay.jetty.Server
;
import
org.mortbay.jetty.webapp.WebAppContext
;
import
java.io.File
;
import
java.io.FileWriter
;
import
java.io.IOException
;
import
java.io.Writer
;
import
java.nio.file.Files
;
import
java.util.Locale
;
import
java.util.Properties
;
/**
*
*/
public
class
BaseSecurityTest
{
private
static
final
String
JAAS_ENTRY
=
"%s { \n"
+
" %s required\n"
// kerberos module
+
" keyTab=\"%s\"\n"
+
" debug=true\n"
+
" principal=\"%s\"\n"
+
" useKeyTab=true\n"
+
" useTicketCache=false\n"
+
" doNotPrompt=true\n"
+
" storeKey=true;\n"
+
"}; \n"
;
protected
MiniKdc
kdc
;
protected
String
getWarPath
()
{
return
String
.
format
(
"/target/metadata-webapp-%s.war"
,
System
.
getProperty
(
"release.version"
,
"0.1-incubating-SNAPSHOT"
));
}
protected
void
generateTestProperties
(
Properties
props
)
throws
ConfigurationException
,
IOException
{
PropertiesConfiguration
config
=
new
PropertiesConfiguration
(
System
.
getProperty
(
"user.dir"
)
+
"/../src/conf/application.properties"
);
for
(
String
propName
:
props
.
stringPropertyNames
())
{
config
.
setProperty
(
propName
,
props
.
getProperty
(
propName
));
}
File
file
=
new
File
(
System
.
getProperty
(
"user.dir"
),
"application.properties"
);
file
.
deleteOnExit
();
Writer
fileWriter
=
new
FileWriter
(
file
);
config
.
save
(
fileWriter
);
}
protected
void
startEmbeddedServer
(
Server
server
)
throws
Exception
{
WebAppContext
webapp
=
new
WebAppContext
();
webapp
.
setContextPath
(
"/"
);
webapp
.
setWar
(
System
.
getProperty
(
"user.dir"
)
+
getWarPath
());
server
.
setHandler
(
webapp
);
server
.
start
();
}
protected
File
startKDC
()
throws
Exception
{
File
target
=
Files
.
createTempDirectory
(
"sectest"
).
toFile
();
File
kdcWorkDir
=
new
File
(
target
,
"kdc"
);
Properties
kdcConf
=
MiniKdc
.
createConf
();
kdcConf
.
setProperty
(
MiniKdc
.
DEBUG
,
"true"
);
kdc
=
new
MiniKdc
(
kdcConf
,
kdcWorkDir
);
kdc
.
start
();
assert
kdc
.
getRealm
()
!=
null
;
return
kdcWorkDir
;
}
public
String
createJAASEntry
(
String
context
,
String
principal
,
File
keytab
)
{
String
keytabpath
=
keytab
.
getAbsolutePath
();
// fix up for windows; no-op on unix
keytabpath
=
keytabpath
.
replace
(
'\\'
,
'/'
);
return
String
.
format
(
Locale
.
ENGLISH
,
JAAS_ENTRY
,
context
,
getKerberosAuthModuleForJVM
(),
keytabpath
,
principal
);
}
protected
String
getKerberosAuthModuleForJVM
()
{
if
(
System
.
getProperty
(
"java.vendor"
).
contains
(
"IBM"
))
{
return
"com.ibm.security.auth.module.Krb5LoginModule"
;
}
else
{
return
"com.sun.security.auth.module.Krb5LoginModule"
;
}
}
protected
void
bindJVMtoJAASFile
(
File
jaasFile
)
{
String
path
=
jaasFile
.
getAbsolutePath
();
System
.
setProperty
(
Environment
.
JAAS_CONF_KEY
,
path
);
}
protected
File
createKeytab
(
MiniKdc
kdc
,
File
kdcWorkDir
,
String
principal
,
String
filename
)
throws
Exception
{
File
keytab
=
new
File
(
kdcWorkDir
,
filename
);
kdc
.
createPrincipal
(
keytab
,
principal
,
principal
+
"/localhost"
,
principal
+
"/127.0.0.1"
);
return
keytab
;
}
}
webapp/src/test/java/org/apache/hadoop/metadata/web/filters/MetadataAuthenticationKerberosFilterIT.java
0 → 100644
View file @
c87130eb
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
filters
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.io.FileUtils
;
import
org.apache.hadoop.hdfs.web.URLConnectionFactory
;
import
org.apache.hadoop.metadata.web.BaseSecurityTest
;
import
org.apache.hadoop.metadata.web.service.EmbeddedServer
;
import
org.mortbay.jetty.Server
;
import
org.testng.annotations.Test
;
import
javax.security.auth.Subject
;
import
javax.security.auth.callback.*
;
import
javax.security.auth.login.LoginContext
;
import
javax.security.auth.login.LoginException
;
import
java.io.File
;
import
java.io.IOException
;
import
java.net.HttpURLConnection
;
import
java.net.URL
;
import
java.security.PrivilegedExceptionAction
;
import
java.util.Properties
;
/**
*
*/
public
class
MetadataAuthenticationKerberosFilterIT
extends
BaseSecurityTest
{
public
static
final
String
TEST_USER_JAAS_SECTION
=
"TestUser"
;
public
static
final
String
TESTUSER
=
"testuser"
;
public
static
final
String
TESTPASS
=
"testpass"
;
private
File
userKeytabFile
;
private
File
httpKeytabFile
;
class
TestEmbeddedServer
extends
EmbeddedServer
{
public
TestEmbeddedServer
(
int
port
,
String
path
)
throws
IOException
{
super
(
port
,
path
);
}
Server
getServer
()
{
return
server
;
}
}
@Test
public
void
testKerberosBasedLogin
()
throws
Exception
{
setupKDCAndPrincipals
();
TestEmbeddedServer
server
=
null
;
try
{
// setup the application.properties file
generateKerberosTestProperties
();
// need to create the web application programmatically in order to control the injection of the test
// application properties
server
=
new
TestEmbeddedServer
(
23000
,
"webapp/target/metadata-governance"
);
startEmbeddedServer
(
server
.
getServer
());
final
URLConnectionFactory
connectionFactory
=
URLConnectionFactory
.
DEFAULT_SYSTEM_CONNECTION_FACTORY
;
// attempt to hit server and get rejected
URL
url
=
new
URL
(
"http://localhost:23000/"
);
HttpURLConnection
connection
=
(
HttpURLConnection
)
connectionFactory
.
openConnection
(
url
,
false
);
connection
.
setRequestMethod
(
"GET"
);
connection
.
connect
();
assert
connection
.
getResponseCode
()
==
401
;
// need to populate the ticket cache with a local user, so logging in...
Subject
subject
=
loginTestUser
();
Subject
.
doAs
(
subject
,
new
PrivilegedExceptionAction
<
Object
>()
{
@Override
public
Object
run
()
throws
Exception
{
// attempt to hit server and get rejected
URL
url
=
new
URL
(
"http://localhost:23000/"
);
HttpURLConnection
connection
=
(
HttpURLConnection
)
connectionFactory
.
openConnection
(
url
,
true
);
connection
.
setRequestMethod
(
"GET"
);
connection
.
connect
();
assert
connection
.
getResponseCode
()
==
200
;
return
null
;
}
});
}
finally
{
server
.
getServer
().
stop
();
kdc
.
stop
();
}
}
protected
Subject
loginTestUser
()
throws
LoginException
,
IOException
{
LoginContext
lc
=
new
LoginContext
(
TEST_USER_JAAS_SECTION
,
new
CallbackHandler
()
{
@Override
public
void
handle
(
Callback
[]
callbacks
)
throws
IOException
,
UnsupportedCallbackException
{
for
(
int
i
=
0
;
i
<
callbacks
.
length
;
i
++)
{
if
(
callbacks
[
i
]
instanceof
PasswordCallback
)
{
PasswordCallback
passwordCallback
=
(
PasswordCallback
)
callbacks
[
i
];
passwordCallback
.
setPassword
(
TESTPASS
.
toCharArray
());
}
if
(
callbacks
[
i
]
instanceof
NameCallback
)
{
NameCallback
nameCallback
=
(
NameCallback
)
callbacks
[
i
];
nameCallback
.
setName
(
TESTUSER
);
}
}
}
});
// attempt authentication
lc
.
login
();
return
lc
.
getSubject
();
}
protected
void
generateKerberosTestProperties
()
throws
IOException
,
ConfigurationException
{
Properties
props
=
new
Properties
();
props
.
setProperty
(
"metadata.http.authentication.enabled"
,
"true"
);
props
.
setProperty
(
"metadata.http.authentication.type"
,
"kerberos"
);
props
.
setProperty
(
"metadata.http.authentication.kerberos.principal"
,
"HTTP/localhost@"
+
kdc
.
getRealm
());
props
.
setProperty
(
"metadata.http.authentication.kerberos.keytab"
,
httpKeytabFile
.
getAbsolutePath
());
props
.
setProperty
(
"metadata.http.authentication.kerberos.name.rules"
,
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"
);
generateTestProperties
(
props
);
}
public
void
setupKDCAndPrincipals
()
throws
Exception
{
// set up the KDC
File
kdcWorkDir
=
startKDC
();
userKeytabFile
=
createKeytab
(
kdc
,
kdcWorkDir
,
"dgi"
,
"dgi.keytab"
);
httpKeytabFile
=
createKeytab
(
kdc
,
kdcWorkDir
,
"HTTP"
,
"spnego.service.keytab"
);
// create a test user principal
kdc
.
createPrincipal
(
TESTUSER
,
TESTPASS
);
StringBuilder
jaas
=
new
StringBuilder
(
1024
);
jaas
.
append
(
"TestUser {\n"
+
" com.sun.security.auth.module.Krb5LoginModule required\nuseTicketCache=true;\n"
+
"};\n"
);
jaas
.
append
(
createJAASEntry
(
"Client"
,
"dgi"
,
userKeytabFile
));
jaas
.
append
(
createJAASEntry
(
"Server"
,
"HTTP"
,
httpKeytabFile
));
File
jaasFile
=
new
File
(
kdcWorkDir
,
"jaas.txt"
);
FileUtils
.
write
(
jaasFile
,
jaas
.
toString
());
bindJVMtoJAASFile
(
jaasFile
);
}
}
webapp/src/test/java/org/apache/hadoop/metadata/web/filters/MetadataAuthenticationSimpleFilterIT.java
0 → 100644
View file @
c87130eb
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
filters
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.hadoop.metadata.web.BaseSecurityTest
;
import
org.apache.hadoop.metadata.web.service.EmbeddedServer
;
import
org.mortbay.jetty.Server
;
import
org.testng.annotations.Test
;
import
java.io.IOException
;
import
java.net.HttpURLConnection
;
import
java.net.URL
;
import
java.util.Properties
;
/**
*
*/
public
class
MetadataAuthenticationSimpleFilterIT
extends
BaseSecurityTest
{
class
TestEmbeddedServer
extends
EmbeddedServer
{
public
TestEmbeddedServer
(
int
port
,
String
path
)
throws
IOException
{
super
(
port
,
path
);
}
Server
getServer
()
{
return
server
;
}
}
@Test
public
void
testSimpleLogin
()
throws
Exception
{
generateSimpleLoginConfiguration
();
TestEmbeddedServer
server
=
new
TestEmbeddedServer
(
23001
,
"webapp/target/metadata-governance"
);
try
{
startEmbeddedServer
(
server
.
getServer
());
URL
url
=
new
URL
(
"http://localhost:23001"
);
HttpURLConnection
connection
=
(
HttpURLConnection
)
url
.
openConnection
();
connection
.
setRequestMethod
(
"GET"
);
connection
.
connect
();
try
{
assert
connection
.
getResponseCode
()
==
403
;
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
url
=
new
URL
(
"http://localhost:23001/?user.name=testuser"
);
connection
=
(
HttpURLConnection
)
url
.
openConnection
();
connection
.
setRequestMethod
(
"GET"
);
connection
.
connect
();
assert
connection
.
getResponseCode
()
==
200
;
}
finally
{
server
.
getServer
().
stop
();
}
}
protected
void
generateSimpleLoginConfiguration
()
throws
IOException
,
ConfigurationException
{
Properties
config
=
new
Properties
();
config
.
setProperty
(
"metadata.http.authentication.enabled"
,
"true"
);
config
.
setProperty
(
"metadata.http.authentication.type"
,
"simple"
);
generateTestProperties
(
config
);
}
}
webapp/src/test/java/org/apache/hadoop/metadata/web/listeners/LoginProcessorIT.java
0 → 100644
View file @
c87130eb
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
org
.
apache
.
hadoop
.
metadata
.
web
.
listeners
;
import
org.apache.commons.configuration.ConfigurationException
;
import
org.apache.commons.configuration.PropertiesConfiguration
;
import
org.apache.commons.io.FileUtils
;
import
org.apache.hadoop.conf.Configuration
;
import
org.apache.hadoop.fs.CommonConfigurationKeysPublic
;
import
org.apache.hadoop.metadata.web.BaseSecurityTest
;
import
org.apache.hadoop.security.UserGroupInformation
;
import
org.apache.hadoop.util.Shell
;
import
org.testng.annotations.Test
;
import
java.io.File
;
/**
*
*/
public
class
LoginProcessorIT
extends
BaseSecurityTest
{
protected
static
final
String
kerberosRule
=
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"
;
@Test
public
void
testDefaultSimpleLogin
()
throws
Exception
{
LoginProcessor
processor
=
new
LoginProcessor
()
{
@Override
protected
PropertiesConfiguration
getPropertiesConfiguration
()
throws
ConfigurationException
{
return
new
PropertiesConfiguration
();
}
};
processor
.
login
();
assert
UserGroupInformation
.
getCurrentUser
()
!=
null
;
assert
!
UserGroupInformation
.
isLoginKeytabBased
();
assert
!
UserGroupInformation
.
isSecurityEnabled
();
}
@Test
public
void
testKerberosLogin
()
throws
Exception
{
final
File
keytab
=
setupKDCAndPrincipals
();
LoginProcessor
processor
=
new
LoginProcessor
()
{
@Override
protected
PropertiesConfiguration
getPropertiesConfiguration
()
throws
ConfigurationException
{
PropertiesConfiguration
config
=
new
PropertiesConfiguration
();
config
.
setProperty
(
"metadata.authentication.method"
,
"kerberos"
);
config
.
setProperty
(
"metadata.authentication.principal"
,
"dgi@EXAMPLE.COM"
);
config
.
setProperty
(
"metadata.authentication.keytab"
,
keytab
.
getAbsolutePath
());
return
config
;
}
@Override
protected
Configuration
getHadoopConfiguration
()
{
Configuration
config
=
new
Configuration
(
false
);
config
.
set
(
CommonConfigurationKeysPublic
.
HADOOP_SECURITY_AUTHENTICATION
,
"kerberos"
);
config
.
setBoolean
(
CommonConfigurationKeysPublic
.
HADOOP_SECURITY_AUTHORIZATION
,
true
);
config
.
set
(
CommonConfigurationKeysPublic
.
HADOOP_SECURITY_AUTH_TO_LOCAL
,
kerberosRule
);
return
config
;
}
@Override
protected
boolean
isHadoopCluster
()
{
return
true
;
}
};
processor
.
login
();
assert
UserGroupInformation
.
getLoginUser
().
getShortUserName
().
endsWith
(
"dgi"
);
assert
UserGroupInformation
.
getCurrentUser
()
!=
null
;
assert
UserGroupInformation
.
isLoginKeytabBased
();
assert
UserGroupInformation
.
isSecurityEnabled
();
kdc
.
stop
();
}
private
File
setupKDCAndPrincipals
()
throws
Exception
{
// set up the KDC
File
kdcWorkDir
=
startKDC
();
assert
kdc
.
getRealm
()
!=
null
;
File
keytabFile
=
createKeytab
(
kdc
,
kdcWorkDir
,
"dgi"
,
"dgi.keytab"
);
String
dgiServerPrincipal
=
Shell
.
WINDOWS
?
"dgi/127.0.0.1"
:
"dgi/localhost"
;
StringBuilder
jaas
=
new
StringBuilder
(
1024
);
jaas
.
append
(
createJAASEntry
(
"Client"
,
"dgi"
,
keytabFile
));
jaas
.
append
(
createJAASEntry
(
"Server"
,
dgiServerPrincipal
,
keytabFile
));
File
jaasFile
=
new
File
(
kdcWorkDir
,
"jaas.txt"
);
FileUtils
.
write
(
jaasFile
,
jaas
.
toString
());
bindJVMtoJAASFile
(
jaasFile
);
return
keytabFile
;
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment