Commit ddb14b54 by Shwetha GS

ATLAS-457 Upgrade to 0.9 version of Kafka dependency (yhemanth via shwethags)

parent e06b1ee2
......@@ -43,6 +43,7 @@ import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Option;
import java.io.File;
import java.io.IOException;
......@@ -117,7 +118,7 @@ public class KafkaNotification extends AbstractNotification implements Service {
"org.apache.kafka.common.serialization.StringDeserializer");
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringDeserializer");
properties.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY, "roundrobin");
properties.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, "roundrobin");
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "smallest");
}
......@@ -303,7 +304,8 @@ public class KafkaNotification extends AbstractNotification implements Service {
brokerConfig.setProperty("log.dirs", constructDir("kafka").getAbsolutePath());
brokerConfig.setProperty("log.flush.interval.messages", String.valueOf(1));
kafkaServer = new KafkaServer(new KafkaConfig(brokerConfig), new SystemTime());
kafkaServer = new KafkaServer(KafkaConfig.fromProps(brokerConfig), new SystemTime(),
Option.apply(this.getClass().getName()));
kafkaServer.startup();
LOG.debug("Embedded kafka server started with broker config {}", brokerConfig);
}
......
......@@ -342,7 +342,7 @@
<hadoop.version>2.7.0</hadoop.version>
<hbase.version>1.1.2</hbase.version>
<solr.version>5.1.0</solr.version>
<kafka.version>0.8.2.0</kafka.version>
<kafka.version>0.9.0.0</kafka.version>
<!-- scala versions -->
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
......
......@@ -7,6 +7,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
ALL CHANGES:
ATLAS-457 Upgrade to 0.9 version of Kafka dependency (yhemanth via shwethags)
ATLAS-398 Delete trait that exists but not linked to entity results in "400 Bad request". It should result "404 not found" (ndjouhr via shwethags)
ATLAS-372 Expose entity deletion through REST API (dkantor via shwethags)
ATLAS-452 Exceptions while running HiveHookIT#testAlterTableRename (shwethags)
......
......@@ -100,6 +100,16 @@ public class BaseSecurityTest {
protected void bindJVMtoJAASFile(File jaasFile) {
String path = jaasFile.getAbsolutePath();
System.setProperty(Environment.JAAS_CONF_KEY, path);
disableZookeeperSecurity();
}
/* We only want Atlas to work in secure mode for the tests
* for otherwise a lot more configuration is required to
* make other components like Kafka run in secure mode.
*/
private void disableZookeeperSecurity() {
System.setProperty("zookeeper.sasl.client", "false");
System.setProperty("zookeeper.sasl.clientconfig", "");
}
protected File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment