Skip to content

Commit ea1acda

Browse files
authored
Merge pull request #102 from Yolean/1.8-confluent-rest
Schema Registry and REST Proxy as opt-in folder
2 parents 85ef561 + 4d26f51 commit ea1acda

File tree

7 files changed

+385
-0
lines changed

7 files changed

+385
-0
lines changed

avro-tools/avro-tools-config.yml

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
kind: ConfigMap
2+
metadata:
3+
name: avro-tools-config
4+
namespace: kafka
5+
apiVersion: v1
6+
data:
7+
schema-registry.properties: |-
8+
port=80
9+
listeners=http://0.0.0.0:80
10+
kafkastore.bootstrap.servers=PLAINTEXT://bootstrap.kafka:9092
11+
kafkastore.topic=_schemas
12+
debug=false
13+
14+
# https://github.com/Landoop/schema-registry-ui#prerequisites
15+
access.control.allow.methods=GET,POST,PUT,OPTIONS
16+
access.control.allow.origin=*
17+
18+
kafka-rest.properties: |-
19+
#id=kafka-rest-test-server
20+
listeners=http://0.0.0.0:80
21+
bootstrap.servers=PLAINTEXT://bootstrap.kafka:9092
22+
schema.registry.url=http://avro-schemas.kafka:80
23+
24+
# https://github.com/Landoop/kafka-topics-ui#common-issues
25+
access.control.allow.methods=GET,POST,PUT,DELETE,OPTIONS
26+
access.control.allow.origin=*
27+
28+
log4j.properties: |-
29+
log4j.rootLogger=INFO, stdout
30+
31+
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
32+
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
33+
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
34+
35+
log4j.logger.kafka=WARN, stdout
36+
log4j.logger.org.apache.zookeeper=WARN, stdout
37+
log4j.logger.org.apache.kafka=WARN, stdout
38+
log4j.logger.org.I0Itec.zkclient=WARN, stdout
39+
log4j.additivity.kafka.server=false
40+
log4j.additivity.kafka.consumer.ZookeeperConsumerConnector=false
41+
42+
log4j.logger.org.apache.kafka.clients.Metadata=DEBUG, stdout
43+
log4j.logger.org.apache.kafka.clients.consumer.internals.AbstractCoordinator=INFO, stdout

avro-tools/rest-service.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
apiVersion: v1
2+
kind: Service
3+
metadata:
4+
name: avro-rest
5+
namespace: kafka
6+
spec:
7+
ports:
8+
- port: 80
9+
selector:
10+
app: rest-proxy

avro-tools/rest.yml

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
apiVersion: apps/v1beta2
2+
kind: Deployment
3+
metadata:
4+
name: avro-rest
5+
namespace: kafka
6+
spec:
7+
replicas: 1
8+
selector:
9+
matchLabels:
10+
app: rest-proxy
11+
strategy:
12+
type: RollingUpdate
13+
rollingUpdate:
14+
maxUnavailable: 0
15+
maxSurge: 1
16+
template:
17+
metadata:
18+
labels:
19+
app: rest-proxy
20+
spec:
21+
containers:
22+
- name: cp
23+
image: solsson/kafka-cp@sha256:2797da107f477ede2e826c29b2589f99f22d9efa2ba6916b63e07c7045e15044
24+
env:
25+
- name: KAFKAREST_LOG4J_OPTS
26+
value: -Dlog4j.configuration=file:/etc/kafka-rest/log4j.properties
27+
command:
28+
- kafka-rest-start
29+
- /etc/kafka-rest/kafka-rest.properties
30+
readinessProbe:
31+
httpGet:
32+
path: /
33+
port: 80
34+
livenessProbe:
35+
httpGet:
36+
path: /
37+
port: 80
38+
ports:
39+
- containerPort: 80
40+
volumeMounts:
41+
- name: config
42+
mountPath: /etc/kafka-rest
43+
volumes:
44+
- name: config
45+
configMap:
46+
name: avro-tools-config

avro-tools/schemas-service.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
apiVersion: v1
2+
kind: Service
3+
metadata:
4+
name: avro-schemas
5+
namespace: kafka
6+
spec:
7+
ports:
8+
- port: 80
9+
selector:
10+
app: schema-registry

avro-tools/schemas.yml

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
apiVersion: apps/v1beta2
2+
kind: Deployment
3+
metadata:
4+
name: avro-schemas
5+
namespace: kafka
6+
spec:
7+
replicas: 1
8+
selector:
9+
matchLabels:
10+
app: schema-registry
11+
strategy:
12+
type: RollingUpdate
13+
rollingUpdate:
14+
maxUnavailable: 0
15+
maxSurge: 1
16+
template:
17+
metadata:
18+
labels:
19+
app: schema-registry
20+
spec:
21+
containers:
22+
- name: cp
23+
image: solsson/kafka-cp@sha256:2797da107f477ede2e826c29b2589f99f22d9efa2ba6916b63e07c7045e15044
24+
env:
25+
- name: SCHEMA_REGISTRY_LOG4J_OPTS
26+
value: -Dlog4j.configuration=file:/etc/schema-registry/log4j.properties
27+
command:
28+
- schema-registry-start
29+
- /etc/schema-registry/schema-registry.properties
30+
readinessProbe:
31+
httpGet:
32+
path: /
33+
port: 80
34+
livenessProbe:
35+
httpGet:
36+
path: /
37+
port: 80
38+
initialDelaySeconds: 60
39+
ports:
40+
- containerPort: 80
41+
volumeMounts:
42+
- name: config
43+
mountPath: /etc/schema-registry
44+
volumes:
45+
- name: config
46+
configMap:
47+
name: avro-tools-config

avro-tools/test/70rest-test1.yml

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
apiVersion: batch/v1
2+
kind: Job
3+
metadata:
4+
name: rest-test1
5+
namespace: kafka
6+
spec:
7+
backoffLimit: 1
8+
template:
9+
metadata:
10+
name: rest-test1
11+
spec:
12+
containers:
13+
- name: curl
14+
image: solsson/curl@sha256:523319afd39573746e8f5a7c98d4a6cd4b8cbec18b41eb30c8baa13ede120ce3
15+
env:
16+
- name: REST
17+
value: http://rest.kafka.svc.cluster.local
18+
- name: TOPIC
19+
value: test1
20+
command:
21+
- /bin/bash
22+
- -ce
23+
- >
24+
curl --retry 10 --retry-delay 30 --retry-connrefused -I $REST;
25+
26+
curl -H 'Accept: application/vnd.kafka.v2+json' $REST/topics;
27+
28+
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/test1;
29+
curl -X POST -H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" --data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date)\"}]}" $REST/topics/$TOPIC -v;
30+
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/test2;
31+
32+
curl -X POST -H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" --data '{"records":[{"value":{"foo":"bar"}}]}' $REST/topics/$TOPIC -v;
33+
34+
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' $REST/consumers/my_json_consumer -v;
35+
36+
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data "{\"topics\":[\"$TOPIC\"]}" $REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription -v;
37+
38+
curl -X GET -H "Accept: application/vnd.kafka.json.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance/records -v;
39+
40+
curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance -v;
41+
42+
sleep 300
43+
restartPolicy: Never

avro-tools/test/rest-curl.yml

Lines changed: 186 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,186 @@
1+
---
2+
kind: ConfigMap
3+
metadata:
4+
name: rest-curl
5+
namespace: test-kafka
6+
apiVersion: v1
7+
data:
8+
9+
setup.sh: |-
10+
touch /tmp/testlog
11+
12+
# Keep starting up until rest proxy is up and running
13+
curl --retry 10 --retry-delay 30 --retry-connrefused -I -s $REST
14+
15+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/brokers | egrep '."brokers":.0'
16+
17+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
18+
echo ""
19+
20+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
21+
echo ""
22+
23+
curl -X POST \
24+
-H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" \
25+
--data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date -u -Iseconds)\"}]}" \
26+
$REST/topics/$TOPIC
27+
echo ""
28+
29+
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
30+
echo ""
31+
32+
curl -X POST \
33+
-H "Content-Type: application/vnd.kafka.v2+json" \
34+
--data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' \
35+
$REST/consumers/my_json_consumer
36+
echo ""
37+
38+
curl -X POST \
39+
-H "Content-Type: application/vnd.kafka.v2+json" \
40+
--data "{\"topics\":[\"$TOPIC\"]}" \
41+
$REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription \
42+
-w "%{http_code}"
43+
echo ""
44+
45+
curl -X GET \
46+
-H "Accept: application/vnd.kafka.json.v2+json" \
47+
$REST/consumers/my_json_consumer/instances/my_consumer_instance/records
48+
49+
curl -X DELETE \
50+
-H "Content-Type: application/vnd.kafka.v2+json" \
51+
$REST/consumers/my_json_consumer/instances/my_consumer_instance
52+
53+
# schema-registry
54+
55+
curl -X GET $SCHEMAS/subjects
56+
echo ""
57+
58+
curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" \
59+
--data '{"schema": "{\"type\": \"string\"}"}' \
60+
$SCHEMAS/subjects/$TOPIC-key/versions
61+
echo ""
62+
63+
curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" \
64+
--data '{"schema": "{\"type\": \"string\"}"}' \
65+
$SCHEMAS/subjects/$TOPIC-value/versions
66+
echo ""
67+
68+
curl -X GET $SCHEMAS/schemas/ids/1
69+
echo ""
70+
71+
curl -X GET $SCHEMAS/subjects/$TOPIC-value/versions/1
72+
echo ""
73+
74+
# rest + schema
75+
# TODO new topic needed because this breaks json consumer above
76+
77+
curl -X POST -H "Content-Type: application/vnd.kafka.avro.v2+json" \
78+
-H "Accept: application/vnd.kafka.v2+json" \
79+
--data '{"value_schema": "{\"type\": \"record\", \"name\": \"User\", \"fields\": [{\"name\": \"name\", \"type\": \"string\"}]}", "records": [{"value": {"name": "testUser"}}]}' \
80+
$REST/topics/$TOPIC
81+
echo ""
82+
83+
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" \
84+
--data '{"name": "my_consumer_instance", "format": "avro", "auto.offset.reset": "earliest"}' \
85+
$REST/consumers/my_avro_consumer
86+
echo ""
87+
88+
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" \
89+
--data "{\"topics\":[\"$TOPIC\"]}" \
90+
$REST/consumers/my_avro_consumer/instances/my_consumer_instance/subscription
91+
92+
curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" \
93+
$REST/consumers/my_avro_consumer/instances/my_consumer_instance/records
94+
95+
tail -f /tmp/testlog
96+
97+
continue.sh: |-
98+
exit 0
99+
100+
run.sh: |-
101+
exec >> /tmp/testlog
102+
exec 2>&1
103+
104+
exit 0
105+
106+
---
107+
apiVersion: batch/v1
108+
kind: Job
109+
metadata:
110+
name: rest-curl
111+
namespace: test-kafka
112+
spec:
113+
template:
114+
spec:
115+
containers:
116+
- name: topic-create
117+
image: solsson/kafka:1.0.0@sha256:17fdf1637426f45c93c65826670542e36b9f3394ede1cb61885c6a4befa8f72d
118+
command:
119+
- ./bin/kafka-topics.sh
120+
- --zookeeper
121+
- zookeeper.kafka.svc.cluster.local:2181
122+
- --create
123+
- --if-not-exists
124+
- --topic
125+
- test-rest-curl
126+
- --partitions
127+
- "1"
128+
- --replication-factor
129+
- "1"
130+
restartPolicy: Never
131+
---
132+
apiVersion: apps/v1beta2
133+
kind: Deployment
134+
metadata:
135+
name: rest-curl
136+
namespace: test-kafka
137+
spec:
138+
replicas: 1
139+
selector:
140+
matchLabels:
141+
test-target: kafka-confluent-rest
142+
test-type: readiness
143+
template:
144+
metadata:
145+
labels:
146+
test-target: kafka-confluent-rest
147+
test-type: readiness
148+
spec:
149+
containers:
150+
- name: testcase
151+
image: solsson/curl@sha256:523319afd39573746e8f5a7c98d4a6cd4b8cbec18b41eb30c8baa13ede120ce3
152+
env:
153+
- name: SCHEMAS
154+
value: http://schemas.kafka.svc.cluster.local
155+
- name: REST
156+
value: http://rest.kafka.svc.cluster.local
157+
- name: TOPIC
158+
value: test-rest-curl
159+
# Test set up
160+
command:
161+
- /bin/bash
162+
- -e
163+
- /test/setup.sh
164+
# Test run, again and again
165+
readinessProbe:
166+
exec:
167+
command:
168+
- /bin/bash
169+
- -e
170+
- /test/run.sh
171+
# We haven't worked on timing
172+
periodSeconds: 60
173+
# Test quit on nonzero exit
174+
livenessProbe:
175+
exec:
176+
command:
177+
- /bin/bash
178+
- -e
179+
- /test/continue.sh
180+
volumeMounts:
181+
- name: config
182+
mountPath: /test
183+
volumes:
184+
- name: config
185+
configMap:
186+
name: rest-curl

0 commit comments

Comments
 (0)