Skip to content

Commit 7884e82

Browse files
max5599manub
authored andcommitted
Make EmbeddedKafkaConfig a trait to allow other implementation (#85)
* Make EmbeddedKafkaConfig a trait with a basic implementation * Use def instead of val in trait definition
1 parent da49cee commit 7884e82

File tree

3 files changed

+45
-29
lines changed

3 files changed

+45
-29
lines changed

embedded-kafka/src/main/scala/net/manub/embeddedkafka/EmbeddedKafka.scala

Lines changed: 19 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,30 +5,20 @@ import java.util.Properties
55
import java.util.concurrent.Executors
66

77
import kafka.admin.AdminUtils
8-
import kafka.server.KafkaConfig._
98
import kafka.server.{KafkaConfig, KafkaServer}
109
import kafka.utils.ZkUtils
1110
import org.apache.kafka.clients.consumer.{KafkaConsumer, OffsetAndMetadata}
12-
import org.apache.kafka.clients.producer.{
13-
KafkaProducer,
14-
ProducerConfig,
15-
ProducerRecord
16-
}
11+
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
12+
import org.apache.kafka.common.serialization.{Deserializer, Serializer, StringDeserializer, StringSerializer}
1713
import org.apache.kafka.common.{KafkaException, TopicPartition}
18-
import org.apache.kafka.common.serialization.{
19-
Deserializer,
20-
Serializer,
21-
StringDeserializer,
22-
StringSerializer
23-
}
2414
import org.apache.zookeeper.server.{ServerCnxnFactory, ZooKeeperServer}
2515
import org.scalatest.Suite
2616

2717
import scala.collection.JavaConverters._
2818
import scala.collection.mutable
2919
import scala.collection.mutable.ListBuffer
3020
import scala.concurrent.duration._
31-
import scala.concurrent.{ExecutionContext, TimeoutException}
21+
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, TimeoutException}
3222
import scala.language.{higherKinds, postfixOps}
3323
import scala.reflect.io.Directory
3424
import scala.util.Try
@@ -118,7 +108,7 @@ object EmbeddedKafka extends EmbeddedKafkaSupport {
118108

119109
sealed trait EmbeddedKafkaSupport {
120110
private val executorService = Executors.newFixedThreadPool(2)
121-
implicit private val executionContext =
111+
implicit private val executionContext: ExecutionContextExecutorService =
122112
ExecutionContext.fromExecutorService(executorService)
123113

124114
val zkSessionTimeoutMs = 10000
@@ -136,7 +126,7 @@ sealed trait EmbeddedKafkaSupport {
136126
withRunningZooKeeper(config.zooKeeperPort) { zkPort =>
137127
withTempDir("kafka") { kafkaLogsDir =>
138128
val broker =
139-
startKafka(config.copy(zooKeeperPort = zkPort), kafkaLogsDir)
129+
startKafka(config.kafkaPort, zkPort, config.customBrokerProperties, kafkaLogsDir)
140130
try {
141131
body
142132
} finally {
@@ -162,11 +152,11 @@ sealed trait EmbeddedKafkaSupport {
162152
withRunningZooKeeper(config.zooKeeperPort) { zkPort =>
163153
withTempDir("kafka") { kafkaLogsDir =>
164154
val broker: KafkaServer =
165-
startKafka(config.copy(zooKeeperPort = zkPort), kafkaLogsDir)
155+
startKafka(config.kafkaPort, zkPort, config.customBrokerProperties, kafkaLogsDir)
166156
val kafkaPort =
167157
broker.boundPort(broker.config.listeners.head.listenerName)
168158
val actualConfig =
169-
config.copy(kafkaPort = kafkaPort, zooKeeperPort = zkPort)
159+
EmbeddedKafkaConfigImpl(kafkaPort, zkPort, config.customBrokerProperties, config.customProducerProperties, config.customConsumerProperties)
170160
try {
171161
body(actualConfig)
172162
} finally {
@@ -556,10 +546,12 @@ sealed trait EmbeddedKafkaSupport {
556546
factory
557547
}
558548

559-
def startKafka(config: EmbeddedKafkaConfig,
560-
kafkaLogDir: Directory): KafkaServer = {
561-
val zkAddress = s"localhost:${config.zooKeeperPort}"
562-
val listener = s"PLAINTEXT://localhost:${config.kafkaPort}"
549+
private def startKafka(kafkaPort: Int,
550+
zooKeeperPort: Int,
551+
customBrokerProperties: Map[String, String],
552+
kafkaLogDir: Directory) = {
553+
val zkAddress = s"localhost:$zooKeeperPort"
554+
val listener = s"PLAINTEXT://localhost:$kafkaPort"
563555

564556
val properties = new Properties
565557
properties.setProperty("zookeeper.connect", zkAddress)
@@ -577,7 +569,7 @@ sealed trait EmbeddedKafkaSupport {
577569
// The total memory used for log deduplication across all cleaner threads, keep it small to not exhaust suite memory
578570
properties.setProperty("log.cleaner.dedupe.buffer.size", "1048577")
579571

580-
config.customBrokerProperties.foreach {
572+
customBrokerProperties.foreach {
581573
case (key, value) => properties.setProperty(key, value)
582574
}
583575

@@ -586,6 +578,11 @@ sealed trait EmbeddedKafkaSupport {
586578
broker
587579
}
588580

581+
def startKafka(config: EmbeddedKafkaConfig,
582+
kafkaLogDir: Directory): KafkaServer = {
583+
startKafka(config.kafkaPort, config.zooKeeperPort, config.customBrokerProperties, kafkaLogDir)
584+
}
585+
589586
/**
590587
* Creates a topic with a custom configuration
591588
*
Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,30 @@
11
package net.manub.embeddedkafka
22

3-
case class EmbeddedKafkaConfig(kafkaPort: Int = 6001,
4-
zooKeeperPort: Int = 6000,
5-
customBrokerProperties: Map[String, String] = Map.empty,
6-
customProducerProperties: Map[String, String] = Map.empty,
7-
customConsumerProperties: Map[String, String] = Map.empty)
3+
trait EmbeddedKafkaConfig {
4+
def kafkaPort: Int
5+
def zooKeeperPort: Int
6+
def customBrokerProperties: Map[String, String]
7+
def customProducerProperties: Map[String, String]
8+
def customConsumerProperties: Map[String, String]
9+
}
10+
11+
case class EmbeddedKafkaConfigImpl(
12+
kafkaPort: Int,
13+
zooKeeperPort: Int,
14+
customBrokerProperties: Map[String, String],
15+
customProducerProperties: Map[String, String],
16+
customConsumerProperties: Map[String, String]
17+
) extends EmbeddedKafkaConfig
818

919
object EmbeddedKafkaConfig {
10-
implicit val defaultConfig = EmbeddedKafkaConfig()
20+
implicit val defaultConfig: EmbeddedKafkaConfig = apply()
21+
22+
def apply(
23+
kafkaPort: Int = 6001,
24+
zooKeeperPort: Int = 6000,
25+
customBrokerProperties: Map[String, String] = Map.empty,
26+
customProducerProperties: Map[String, String] = Map.empty,
27+
customConsumerProperties: Map[String, String] = Map.empty
28+
) : EmbeddedKafkaConfig =
29+
EmbeddedKafkaConfigImpl(kafkaPort, zooKeeperPort, customBrokerProperties, customProducerProperties, customConsumerProperties)
1130
}

embedded-kafka/src/test/scala/net/manub/embeddedkafka/EmbeddedKafkaWithRunningKafkaOnFoundPortSpec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class EmbeddedKafkaWithRunningKafkaOnFoundPortSpec
2929
// Confirm both actual configs are running on separate non-zero ports, but otherwise equal
3030
allConfigs.map(_.kafkaPort).distinct should have size 3
3131
allConfigs.map(_.zooKeeperPort).distinct should have size 3
32-
allConfigs.map(_.copy(kafkaPort = 0, zooKeeperPort = 0)).distinct should have size 1
32+
allConfigs.map(config => EmbeddedKafkaConfigImpl(kafkaPort = 0, zooKeeperPort = 0, config.customBrokerProperties, config.customProducerProperties, config.customConsumerProperties)).distinct should have size 1
3333
actualConfig2
3434
}
3535
bothKafkaAndZkAreNotAvailable(actualConfig2)

0 commit comments

Comments
 (0)