Skip to content

Commit

Permalink
Merge pull request #102 from Yolean/1.8-confluent-rest
Browse files Browse the repository at this point in the history
Schema Registry and REST Proxy as opt-in folder
  • Loading branch information
solsson authored Feb 3, 2018
2 parents 85ef561 + 4d26f51 commit ea1acda
Show file tree
Hide file tree
Showing 7 changed files with 385 additions and 0 deletions.
43 changes: 43 additions & 0 deletions avro-tools/avro-tools-config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
kind: ConfigMap
metadata:
name: avro-tools-config
namespace: kafka
apiVersion: v1
data:
schema-registry.properties: |-
port=80
listeners=http://0.0.0.0:80
kafkastore.bootstrap.servers=PLAINTEXT://bootstrap.kafka:9092
kafkastore.topic=_schemas
debug=false
# https://github.com/Landoop/schema-registry-ui#prerequisites
access.control.allow.methods=GET,POST,PUT,OPTIONS
access.control.allow.origin=*
kafka-rest.properties: |-
#id=kafka-rest-test-server
listeners=http://0.0.0.0:80
bootstrap.servers=PLAINTEXT://bootstrap.kafka:9092
schema.registry.url=http://avro-schemas.kafka:80
# https://github.com/Landoop/kafka-topics-ui#common-issues
access.control.allow.methods=GET,POST,PUT,DELETE,OPTIONS
access.control.allow.origin=*
log4j.properties: |-
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.kafka=WARN, stdout
log4j.logger.org.apache.zookeeper=WARN, stdout
log4j.logger.org.apache.kafka=WARN, stdout
log4j.logger.org.I0Itec.zkclient=WARN, stdout
log4j.additivity.kafka.server=false
log4j.additivity.kafka.consumer.ZookeeperConsumerConnector=false
log4j.logger.org.apache.kafka.clients.Metadata=DEBUG, stdout
log4j.logger.org.apache.kafka.clients.consumer.internals.AbstractCoordinator=INFO, stdout
10 changes: 10 additions & 0 deletions avro-tools/rest-service.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
apiVersion: v1
kind: Service
metadata:
name: avro-rest
namespace: kafka
spec:
ports:
- port: 80
selector:
app: rest-proxy
46 changes: 46 additions & 0 deletions avro-tools/rest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: avro-rest
namespace: kafka
spec:
replicas: 1
selector:
matchLabels:
app: rest-proxy
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 0
maxSurge: 1
template:
metadata:
labels:
app: rest-proxy
spec:
containers:
- name: cp
image: solsson/kafka-cp@sha256:2797da107f477ede2e826c29b2589f99f22d9efa2ba6916b63e07c7045e15044
env:
- name: KAFKAREST_LOG4J_OPTS
value: -Dlog4j.configuration=file:/etc/kafka-rest/log4j.properties
command:
- kafka-rest-start
- /etc/kafka-rest/kafka-rest.properties
readinessProbe:
httpGet:
path: /
port: 80
livenessProbe:
httpGet:
path: /
port: 80
ports:
- containerPort: 80
volumeMounts:
- name: config
mountPath: /etc/kafka-rest
volumes:
- name: config
configMap:
name: avro-tools-config
10 changes: 10 additions & 0 deletions avro-tools/schemas-service.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
apiVersion: v1
kind: Service
metadata:
name: avro-schemas
namespace: kafka
spec:
ports:
- port: 80
selector:
app: schema-registry
47 changes: 47 additions & 0 deletions avro-tools/schemas.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: avro-schemas
namespace: kafka
spec:
replicas: 1
selector:
matchLabels:
app: schema-registry
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 0
maxSurge: 1
template:
metadata:
labels:
app: schema-registry
spec:
containers:
- name: cp
image: solsson/kafka-cp@sha256:2797da107f477ede2e826c29b2589f99f22d9efa2ba6916b63e07c7045e15044
env:
- name: SCHEMA_REGISTRY_LOG4J_OPTS
value: -Dlog4j.configuration=file:/etc/schema-registry/log4j.properties
command:
- schema-registry-start
- /etc/schema-registry/schema-registry.properties
readinessProbe:
httpGet:
path: /
port: 80
livenessProbe:
httpGet:
path: /
port: 80
initialDelaySeconds: 60
ports:
- containerPort: 80
volumeMounts:
- name: config
mountPath: /etc/schema-registry
volumes:
- name: config
configMap:
name: avro-tools-config
43 changes: 43 additions & 0 deletions avro-tools/test/70rest-test1.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
apiVersion: batch/v1
kind: Job
metadata:
name: rest-test1
namespace: kafka
spec:
backoffLimit: 1
template:
metadata:
name: rest-test1
spec:
containers:
- name: curl
image: solsson/curl@sha256:523319afd39573746e8f5a7c98d4a6cd4b8cbec18b41eb30c8baa13ede120ce3
env:
- name: REST
value: http://rest.kafka.svc.cluster.local
- name: TOPIC
value: test1
command:
- /bin/bash
- -ce
- >
curl --retry 10 --retry-delay 30 --retry-connrefused -I $REST;
curl -H 'Accept: application/vnd.kafka.v2+json' $REST/topics;
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/test1;
curl -X POST -H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" --data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date)\"}]}" $REST/topics/$TOPIC -v;
curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/test2;
curl -X POST -H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" --data '{"records":[{"value":{"foo":"bar"}}]}' $REST/topics/$TOPIC -v;
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' $REST/consumers/my_json_consumer -v;
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data "{\"topics\":[\"$TOPIC\"]}" $REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription -v;
curl -X GET -H "Accept: application/vnd.kafka.json.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance/records -v;
curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance -v;
sleep 300
restartPolicy: Never
186 changes: 186 additions & 0 deletions avro-tools/test/rest-curl.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
---
kind: ConfigMap
metadata:
name: rest-curl
namespace: test-kafka
apiVersion: v1
data:

setup.sh: |-
touch /tmp/testlog
# Keep starting up until rest proxy is up and running
curl --retry 10 --retry-delay 30 --retry-connrefused -I -s $REST
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/brokers | egrep '."brokers":.0'
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
echo ""
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
echo ""
curl -X POST \
-H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" \
--data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date -u -Iseconds)\"}]}" \
$REST/topics/$TOPIC
echo ""
curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
echo ""
curl -X POST \
-H "Content-Type: application/vnd.kafka.v2+json" \
--data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' \
$REST/consumers/my_json_consumer
echo ""
curl -X POST \
-H "Content-Type: application/vnd.kafka.v2+json" \
--data "{\"topics\":[\"$TOPIC\"]}" \
$REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription \
-w "%{http_code}"
echo ""
curl -X GET \
-H "Accept: application/vnd.kafka.json.v2+json" \
$REST/consumers/my_json_consumer/instances/my_consumer_instance/records
curl -X DELETE \
-H "Content-Type: application/vnd.kafka.v2+json" \
$REST/consumers/my_json_consumer/instances/my_consumer_instance
# schema-registry
curl -X GET $SCHEMAS/subjects
echo ""
curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" \
--data '{"schema": "{\"type\": \"string\"}"}' \
$SCHEMAS/subjects/$TOPIC-key/versions
echo ""
curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" \
--data '{"schema": "{\"type\": \"string\"}"}' \
$SCHEMAS/subjects/$TOPIC-value/versions
echo ""
curl -X GET $SCHEMAS/schemas/ids/1
echo ""
curl -X GET $SCHEMAS/subjects/$TOPIC-value/versions/1
echo ""
# rest + schema
# TODO new topic needed because this breaks json consumer above
curl -X POST -H "Content-Type: application/vnd.kafka.avro.v2+json" \
-H "Accept: application/vnd.kafka.v2+json" \
--data '{"value_schema": "{\"type\": \"record\", \"name\": \"User\", \"fields\": [{\"name\": \"name\", \"type\": \"string\"}]}", "records": [{"value": {"name": "testUser"}}]}' \
$REST/topics/$TOPIC
echo ""
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" \
--data '{"name": "my_consumer_instance", "format": "avro", "auto.offset.reset": "earliest"}' \
$REST/consumers/my_avro_consumer
echo ""
curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" \
--data "{\"topics\":[\"$TOPIC\"]}" \
$REST/consumers/my_avro_consumer/instances/my_consumer_instance/subscription
curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" \
$REST/consumers/my_avro_consumer/instances/my_consumer_instance/records
tail -f /tmp/testlog
continue.sh: |-
exit 0
run.sh: |-
exec >> /tmp/testlog
exec 2>&1
exit 0
---
apiVersion: batch/v1
kind: Job
metadata:
name: rest-curl
namespace: test-kafka
spec:
template:
spec:
containers:
- name: topic-create
image: solsson/kafka:1.0.0@sha256:17fdf1637426f45c93c65826670542e36b9f3394ede1cb61885c6a4befa8f72d
command:
- ./bin/kafka-topics.sh
- --zookeeper
- zookeeper.kafka.svc.cluster.local:2181
- --create
- --if-not-exists
- --topic
- test-rest-curl
- --partitions
- "1"
- --replication-factor
- "1"
restartPolicy: Never
---
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: rest-curl
namespace: test-kafka
spec:
replicas: 1
selector:
matchLabels:
test-target: kafka-confluent-rest
test-type: readiness
template:
metadata:
labels:
test-target: kafka-confluent-rest
test-type: readiness
spec:
containers:
- name: testcase
image: solsson/curl@sha256:523319afd39573746e8f5a7c98d4a6cd4b8cbec18b41eb30c8baa13ede120ce3
env:
- name: SCHEMAS
value: http://schemas.kafka.svc.cluster.local
- name: REST
value: http://rest.kafka.svc.cluster.local
- name: TOPIC
value: test-rest-curl
# Test set up
command:
- /bin/bash
- -e
- /test/setup.sh
# Test run, again and again
readinessProbe:
exec:
command:
- /bin/bash
- -e
- /test/run.sh
# We haven't worked on timing
periodSeconds: 60
# Test quit on nonzero exit
livenessProbe:
exec:
command:
- /bin/bash
- -e
- /test/continue.sh
volumeMounts:
- name: config
mountPath: /test
volumes:
- name: config
configMap:
name: rest-curl

0 comments on commit ea1acda

Please sign in to comment.