aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsolsson <solsson@gmail.com>2017-08-01 14:39:30 +0200
committerGitHub <noreply@github.com>2017-08-01 14:39:30 +0200
commit7b4cfb48d0fe25a3c76d6d183a826ec8868d70b7 (patch)
tree05d684f8f601fee5f61ed0bace1c1433b9999cd3
parent713743ec4469dd40b63344fe7b3d81f6dce5a643 (diff)
parentac5c75a977c796313dc5f84831aa50529457be38 (diff)
downloadkubernetes-kafka-7b4cfb48d0fe25a3c76d6d183a826ec8868d70b7.tar.gz
kubernetes-kafka-7b4cfb48d0fe25a3c76d6d183a826ec8868d70b7.tar.bz2
kubernetes-kafka-7b4cfb48d0fe25a3c76d6d183a826ec8868d70b7.zip
Merge pull request #54 from Yolean/addon-rest-new-build
Use config files and kafka-jre based build for Confluent Platform services
-rw-r--r--11confluent-config.yml67
-rw-r--r--61schemas.yml22
-rw-r--r--71rest.yml24
-rw-r--r--test/rest-curl.yml34
4 files changed, 118 insertions, 29 deletions
diff --git a/11confluent-config.yml b/11confluent-config.yml
new file mode 100644
index 0000000..12fa7db
--- /dev/null
+++ b/11confluent-config.yml
@@ -0,0 +1,67 @@
+kind: ConfigMap
+metadata:
+ name: confluent-config
+ namespace: kafka
+apiVersion: v1
+data:
+ schema-registry.properties: |-
+ # Copyright 2014 Confluent Inc.
+ #
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+ #
+ # http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+ listeners=http://0.0.0.0:80
+ kafkastore.connection.url=zookeeper:2181
+ kafkastore.topic=_schemas
+ debug=false
+
+ kafka-rest.properties: |-
+ ##
+ # Copyright 2015 Confluent Inc.
+ #
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+ #
+ # http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ ##
+
+ #id=kafka-rest-test-server
+ listeners=http://0.0.0.0:80
+ bootstrap.servers=kafka-0.broker.kafka.svc.cluster.local:9092,kafka-1.broker.kafka.svc.cluster.local:9092,kafka-2.broker.kafka.svc.cluster.local:9092
+ zookeeper.connect=zookeeper:2181
+ schema.registry.url=http://schemas.kafka.svc.cluster.local:80
+ #
+ # Configure interceptor classes for sending consumer and producer metrics to Confluent Control Center
+ # Make sure that monitoring-interceptors-<version>.jar is on the Java class path
+ #consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
+ #producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
+
+ log4j.properties: |-
+ log4j.rootLogger=INFO, stdout
+
+ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+ log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+
+ log4j.logger.kafka=ERROR, stdout
+ log4j.logger.org.apache.zookeeper=ERROR, stdout
+ log4j.logger.org.apache.kafka=ERROR, stdout
+ log4j.logger.org.I0Itec.zkclient=ERROR, stdout
+ log4j.additivity.kafka.server=false
+ log4j.additivity.kafka.consumer.ZookeeperConsumerConnector=false
diff --git a/61schemas.yml b/61schemas.yml
index 20d1868..f85d74c 100644
--- a/61schemas.yml
+++ b/61schemas.yml
@@ -11,14 +11,20 @@ spec:
app: schema-registry
spec:
containers:
- - name: cp-schema-registry
- image: confluentinc/cp-schema-registry@sha256:ac1eb34d9a60ce8904eb1bc01fd94bf1f6513924ca507734679d4b513133714c
+ - name: cp
+ image: solsson/kafka-cp@sha256:a22047b9e8bf4b8badfd2fbba47f2d1acdcbb84dfb03c61a15e1ac203036cedf
env:
- - name: SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL
- value: zookeeper:2181
- - name: SCHEMA_REGISTRY_HOST_NAME
- value: schemas
- - name: SCHEMA_REGISTRY_LISTENERS
- value: http://0.0.0.0:80
+ - name: SCHEMA_REGISTRY_LOG4J_OPTS
+ value: -Dlog4j.configuration=file:/etc/schema-registry/log4j.properties
+ command:
+ - schema-registry-start
+ - /etc/schema-registry/schema-registry.properties
ports:
- containerPort: 80
+ volumeMounts:
+ - name: config
+ mountPath: /etc/schema-registry
+ volumes:
+ - name: config
+ configMap:
+ name: confluent-config
diff --git a/71rest.yml b/71rest.yml
index 8f52b42..f03f5b5 100644
--- a/71rest.yml
+++ b/71rest.yml
@@ -11,16 +11,20 @@ spec:
app: kafka-rest
spec:
containers:
- - name: cp-kafka-rest
- image: confluentinc/cp-kafka-rest@sha256:aa213c1a67eae6ce9836b52a9b5ecee4d6a0b44f2b9cc69f4e4de85131462f1d
+ - name: cp
+ image: solsson/kafka-cp@sha256:a22047b9e8bf4b8badfd2fbba47f2d1acdcbb84dfb03c61a15e1ac203036cedf
env:
- - name: KAFKA_REST_ZOOKEEPER_CONNECT
- value: zookeeper:2181
- - name: KAFKA_REST_HOST_NAME
- value: rest
- - name: KAFKA_REST_LISTENERS
- value: http://0.0.0.0:80
- - name: KAFKA_REST_SCHEMA_REGISTRY_URL
- value: http://schemas.kafka.svc.cluster.local:80
+ - name: KAFKAREST_LOG4J_OPTS
+ value: -Dlog4j.configuration=file:/etc/kafka-rest/log4j.properties
+ command:
+ - kafka-rest-start
+ - /etc/kafka-rest/kafka-rest.properties
ports:
- containerPort: 80
+ volumeMounts:
+ - name: config
+ mountPath: /etc/kafka-rest
+ volumes:
+ - name: config
+ configMap:
+ name: confluent-config
diff --git a/test/rest-curl.yml b/test/rest-curl.yml
index 05fc178..f8dc8de 100644
--- a/test/rest-curl.yml
+++ b/test/rest-curl.yml
@@ -12,31 +12,43 @@ data:
# Keep starting up until rest proxy is up and running
curl --retry 10 --retry-delay 30 --retry-connrefused -I -s $REST
+ curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/brokers | egrep '."brokers":.0'
- curl -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
+ curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics
echo ""
- curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
+ curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC
echo ""
curl -X POST \
-H "Content-Type: application/vnd.kafka.json.v2+json" -H "Accept: application/vnd.kafka.v2+json" \
--data "{\"records\":[{\"value\":\"Test from $HOSTNAME at $(date -u -Iseconds)\"}]}" \
- $REST/topics/$TOPIC -v --max-time 30 \
- || echo " (timeout might be ok because we only want to send one message)"
- # TODO why does the above block?
-
+ $REST/topics/$TOPIC
+ echo ""
- curl --retry 10 -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
+ curl -s -H 'Accept: application/vnd.kafka.v2+json' $REST/topics/$TOPIC/partitions
echo ""
- curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' $REST/consumers/my_json_consumer -v;
+ curl -X POST \
+ -H "Content-Type: application/vnd.kafka.v2+json" \
+ --data '{"name": "my_consumer_instance", "format": "json", "auto.offset.reset": "earliest"}' \
+ $REST/consumers/my_json_consumer
+ echo ""
- curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --data "{\"topics\":[\"$TOPIC\"]}" $REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription -v;
+ curl -X POST \
+ -H "Content-Type: application/vnd.kafka.v2+json" \
+ --data "{\"topics\":[\"$TOPIC\"]}" \
+ $REST/consumers/my_json_consumer/instances/my_consumer_instance/subscription \
+ -w "%{http_code}"
+ echo ""
- curl -X GET -H "Accept: application/vnd.kafka.json.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance/records -v;
+ curl -X GET \
+ -H "Accept: application/vnd.kafka.json.v2+json" \
+ $REST/consumers/my_json_consumer/instances/my_consumer_instance/records
- curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" $REST/consumers/my_json_consumer/instances/my_consumer_instance -v;
+ curl -X DELETE \
+ -H "Content-Type: application/vnd.kafka.v2+json" \
+ $REST/consumers/my_json_consumer/instances/my_consumer_instance
tail -f /tmp/testlog