Skip to content

Commit 7b4cfb4

Browse files
authored
Merge pull request #54 from Yolean/addon-rest-new-build
Use config files and kafka-jre based build for Confluent Platform services
2 parents 713743e + ac5c75a commit 7b4cfb4

File tree

4 files changed

+118
-29
lines changed

4 files changed

+118
-29
lines changed

11confluent-config.yml

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
kind: ConfigMap
2+
metadata:
3+
name: confluent-config
4+
namespace: kafka
5+
apiVersion: v1
6+
data:
7+
schema-registry.properties: |-
8+
# Copyright 2014 Confluent Inc.
9+
#
10+
# Licensed under the Apache License, Version 2.0 (the "License");
11+
# you may not use this file except in compliance with the License.
12+
# You may obtain a copy of the License at
13+
#
14+
# http://www.apache.org/licenses/LICENSE-2.0
15+
#
16+
# Unless required by applicable law or agreed to in writing, software
17+
# distributed under the License is distributed on an "AS IS" BASIS,
18+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19+
# See the License for the specific language governing permissions and
20+
# limitations under the License.
21+
22+
listeners=http://0.0.0.0:80
23+
kafkastore.connection.url=zookeeper:2181
24+
kafkastore.topic=_schemas
25+
debug=false
26+
27+
kafka-rest.properties: |-
28+
##
29+
# Copyright 2015 Confluent Inc.
30+
#
31+
# Licensed under the Apache License, Version 2.0 (the "License");
32+
# you may not use this file except in compliance with the License.
33+
# You may obtain a copy of the License at
34+
#
35+
# http://www.apache.org/licenses/LICENSE-2.0
36+
#
37+
# Unless required by applicable law or agreed to in writing, software
38+
# distributed under the License is distributed on an "AS IS" BASIS,
39+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
40+
# See the License for the specific language governing permissions and
41+
# limitations under the License.
42+
##
43+
44+
#id=kafka-rest-test-server
45+
listeners=http://0.0.0.0:80
46+
bootstrap.servers=kafka-0.broker.kafka.svc.cluster.local:9092,kafka-1.broker.kafka.svc.cluster.local:9092,kafka-2.broker.kafka.svc.cluster.local:9092
47+
zookeeper.connect=zookeeper:2181
48+
schema.registry.url=http://schemas.kafka.svc.cluster.local:80
49+
#
50+
# Configure interceptor classes for sending consumer and producer metrics to Confluent Control Center
51+
# Make sure that monitoring-interceptors-<version>.jar is on the Java class path
52+
#consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
53+
#producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
54+
55+
log4j.properties: |-
56+
log4j.rootLogger=INFO, stdout
57+
58+
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
59+
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
60+
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
61+
62+
log4j.logger.kafka=ERROR, stdout
63+
log4j.logger.org.apache.zookeeper=ERROR, stdout
64+
log4j.logger.org.apache.kafka=ERROR, stdout
65+
log4j.logger.org.I0Itec.zkclient=ERROR, stdout
66+
log4j.additivity.kafka.server=false
67+
log4j.additivity.kafka.consumer.ZookeeperConsumerConnector=false

61schemas.yml

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,20 @@ spec:
1111
app: schema-registry
1212
spec:
1313
containers:
14-
- name: cp-schema-registry
15-
image: confluentinc/cp-schema-registry@sha256:ac1eb34d9a60ce8904eb1bc01fd94bf1f6513924ca507734679d4b513133714c
14+
- name: cp
15+
image: solsson/kafka-cp@sha256:a22047b9e8bf4b8badfd2fbba47f2d1acdcbb84dfb03c61a15e1ac203036cedf
1616
env:
17-
- name: SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL
18-
value: zookeeper:2181
19-
- name: SCHEMA_REGISTRY_HOST_NAME
20-
value: schemas
21-
- name: SCHEMA_REGISTRY_LISTENERS
22-
value: http://0.0.0.0:80
17+
- name: SCHEMA_REGISTRY_LOG4J_OPTS
18+
value: -Dlog4j.configuration=file:/etc/schema-registry/log4j.properties
19+
command:
20+
- schema-registry-start
21+
- /etc/schema-registry/schema-registry.properties
2322
ports:
2423
- containerPort: 80
24+
volumeMounts:
25+
- name: config
26+
mountPath: /etc/schema-registry
27+
volumes:
28+
- name: config
29+
configMap:
30+
name: confluent-config

71rest.yml

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,20 @@ spec:
1111
app: kafka-rest
1212
spec:
1313
containers:
14-
- name: cp-kafka-rest
15-
image: confluentinc/cp-kafka-rest@sha256:aa213c1a67eae6ce9836b52a9b5ecee4d6a0b44f2b9cc69f4e4de85131462f1d
14+
- name: cp
15+
image: solsson/kafka-cp@sha256:a22047b9e8bf4b8badfd2fbba47f2d1acdcbb84dfb03c61a15e1ac203036cedf
1616
env:
17-
- name: KAFKA_REST_ZOOKEEPER_CONNECT
18-
value: zookeeper:2181
19-
- name: KAFKA_REST_HOST_NAME
20-
value: rest
21-
- name: KAFKA_REST_LISTENERS
22-
value: http://0.0.0.0:80
23-
- name: KAFKA_REST_SCHEMA_REGISTRY_URL
24-
value: http://schemas.kafka.svc.cluster.local:80
17+
- name: KAFKAREST_LOG4J_OPTS
18+
value: -Dlog4j.configuration=file:/etc/kafka-rest/log4j.properties
19+
command:
20+
- kafka-rest-start
21+
- /etc/kafka-rest/kafka-rest.properties
2522
ports:
2623
- containerPort: 80
24+
volumeMounts:
25+
- name: config
26+
mountPath: /etc/kafka-rest
27+
volumes:
28+
- name: config
29+
configMap:
30+
name: confluent-config

test/rest-curl.yml

Lines changed: 23 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,31 +12,43 @@ data:
1212
# Keep starting up until rest proxy is up and running
1313
curl --retry 10 --retry-delay 30 --retry-connrefused -I -s $REST
1414
15+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/brokers | egrep '."brokers":.0'
1516
16-
curl -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
17+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
1718
echo ""
1819
19-
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
20+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
2021
echo ""
2122
2223
curl -X POST \
2324
-H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" \
2425
--data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date -u -Iseconds)\"}]}" \
25-
$REST/topics/$TOPIC -v --max-time 30 \
26-
|| echo " (timeout might be ok because we only want to send one message)"
27-
# TODO why does the above block?
28-
26+
$REST/topics/$TOPIC
27+
echo ""
2928
30-
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
29+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
3130
echo ""
3231
33-
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' $REST/consumers/my_json_consumer -v;
32+
curl -X POST \
33+
-H "Content-Type: application/vnd.kafka.v2+json" \
34+
--data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' \
35+
$REST/consumers/my_json_consumer
36+
echo ""
3437
35-
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data "{\"topics\":[\"$TOPIC\"]}" $REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription -v;
38+
curl -X POST \
39+
-H "Content-Type: application/vnd.kafka.v2+json" \
40+
--data "{\"topics\":[\"$TOPIC\"]}" \
41+
$REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription \
42+
-w "%{http_code}"
43+
echo ""
3644
37-
curl -X GET -H "Accept: application/vnd.kafka.json.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance/records -v;
45+
curl -X GET \
46+
-H "Accept: application/vnd.kafka.json.v2+json" \
47+
$REST/consumers/my_json_consumer/instances/my_consumer_instance/records
3848
39-
curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance -v;
49+
curl -X DELETE \
50+
-H "Content-Type: application/vnd.kafka.v2+json" \
51+
$REST/consumers/my_json_consumer/instances/my_consumer_instance
4052
4153
tail -f /tmp/testlog
4254

0 commit comments

Comments
 (0)