diff --git a/Dockerfile.testing b/Dockerfile.testing
new file mode 100644
index 00000000..c5b47aff
--- /dev/null
+++ b/Dockerfile.testing
@@ -0,0 +1,81 @@
+# === BUILDER IMAGE ===
+FROM alpine:3.12 as builder
+USER root
+WORKDIR /asn1_codec
+VOLUME ["/asn1_codec_share"]
+
+# add build dependencies
+RUN apk add --upgrade --no-cache --virtual .build-deps \
+ cmake \
+ g++ \
+ make \
+ bash \
+ automake \
+ libtool \
+ autoconf \
+ librdkafka \
+ librdkafka-dev \
+ flex \
+ bison
+
+# Install pugixml
+ADD ./pugixml /asn1_codec/pugixml
+RUN cd /asn1_codec/pugixml && mkdir -p build && cd build && cmake .. && make && make install
+
+# Build and install asn1c submodule
+ADD ./usdot-asn1c /asn1_codec/asn1c
+RUN cd asn1c && test -f configure || autoreconf -iv && ./configure && make && make install
+
+# Make generated files available to the build & compile example
+RUN export LD_LIBRARY_PATH=/usr/local/lib
+ADD ./asn1c_combined /asn1_codec/asn1c_combined
+RUN cd /asn1_codec/asn1c_combined && bash doIt.sh
+
+# Remove any lingering .asn files
+RUN rm -rf /asn1c_codec/asn1c_combined/j2735-asn-files
+RUN rm -rf /asn1c_codec/asn1c_combined/semi-asn-files
+RUN rm -rf /asn1c_codec/asn1c_combined/scms-asn-files
+
+# Remove duplicate files
+RUN rm -rf /asn1c_codec/asn1c_combined/generated-files
+
+# add the source and build files
+ADD CMakeLists.txt /asn1_codec
+ADD ./config /asn1_codec/config
+ADD ./include /asn1_codec/include
+ADD ./src /asn1_codec/src
+ADD ./kafka-test /asn1_codec/kafka-test
+ADD ./unit-test-data /asn1_codec/unit-test-data
+ADD ./data /asn1_codec/data
+ADD ./run_acm.sh /asn1_codec
+ADD ./data /asn1_codec/data
+ADD ./docker-test /asn1_codec/docker-test
+
+RUN echo "export LD_LIBRARY_PATH=/usr/local/lib" >> ~/.profile
+RUN echo "export LD_LIBRARY_PATH=/usr/local/lib" >> ~/.bashrc
+RUN echo "export CC=gcc" >> ~/.profile
+RUN echo "export CC=gcc" >> ~/.bashrc
+
+# Build acm.
+RUN mkdir -p /build && cd /build && cmake /asn1_codec && make
+
+# === RUNTIME IMAGE ===
+FROM alpine:3.12
+USER root
+WORKDIR /asn1_codec
+VOLUME ["/asn1_codec_share"]
+
+# add runtime dependencies
+RUN apk add --upgrade --no-cache \
+ bash \
+ python3 \
+ librdkafka \
+ librdkafka-dev
+
+# copy the built files from the builder
+COPY --from=builder /asn1_codec /asn1_codec
+COPY --from=builder /build /build
+
+# run ACM
+RUN chmod 7777 /asn1_codec/run_acm.sh
+CMD ["/asn1_codec/run_acm.sh"]
diff --git a/config/test/c1.properties b/config/test/c1.properties
index 80b25239..5d0e6111 100644
--- a/config/test/c1.properties
+++ b/config/test/c1.properties
@@ -1,16 +1,15 @@
# Kafka group.
group.id=0
-# ACM operation type specification; default is decode if left out.
-acm.type=decode
-# acm.type=encode
+# ACM operation type specification; default is decode if left out. This gets set in the `standalone.sh` script.
+acm.type=encode
# Path (relative or absolute) to the ACM error reporting XML template.
acm.error.template=./config/Output.error.xml
# Kafka topics for ASN.1 Parsing
-asn1.topic.consumer=j2735asn1per
-asn1.topic.producer=j2735asn1xer
+asn1.topic.consumer=topic.Asn1EncoderInput
+asn1.topic.producer=topic.Asn1EncoderOutput
# Amount of time to wait when no message is available (milliseconds)
# This is a Kafka configuration parameter that we are using for the
@@ -20,10 +19,8 @@ asn1.consumer.timeout.ms=5000
# For testing purposes, use one partition.
asn1.kafka.partition=0
-# The host ip address for the Broker.
-# metadata.broker.list=160.91.216.129:9092
+# The host ip address for the Broker. This gets set in the `standalone.sh` script.
metadata.broker.list=172.17.0.1:9092
-# metadata.broker.list=localhost:9092
# specify the compression codec for all data generated: none, gzip, snappy, lz4
compression.type=none
diff --git a/data/README.md b/data/README.md
index 9c966767..d02b6af6 100644
--- a/data/README.md
+++ b/data/README.md
@@ -42,7 +42,9 @@ The data files in this directory are referenced in the following files:
| InputData.encoding.tim.odetimpayload.xml | src/tests.cpp | Encode TIM with payload type 'OdeTimPayload' |
| InputData.encoding.tim.odeasdpayload.xml | src/tests.cpp | Encode TIM with payload type 'OdeAsdPayload' |
| InputData.decoding.bsm.xml | src/tests.cpp | Decode BSM |
-| producer_test_xml.txt | do_kafka_test.sh | ./test-scripts/standalone.sh config/test/c1.properties data/producer_test_xml.txt encode 0 |
+| producer_test1_xml.txt | do_kafka_test.sh | Encode AdvisorySituationData containing HEX-encoded BSM |
+| producer_test2_xml.txt | do_kafka_test.sh | Encode Ieee1609Dot2Data containing HEX-encoded BSM |
+| producer_test3_xml.txt | do_kafka_test.sh | Encode AdvisorySituationData containing Ieee1609Dot2Data containing HEX-encoded BSM |
| InputData.Ieee1609Dot2Data.packed.xml | testing.md | Testing Documentation |
| j2735.MessageFrame.Bsm.xml | data/README.md | Building Test Data Files |
| j2735.MessageFrame.Bsm.uper | data/README.md | Building Test Data Files |
diff --git a/data/producer_test1_xml.txt b/data/producer_test1_xml.txt
new file mode 100644
index 00000000..30de0e97
--- /dev/null
+++ b/data/producer_test1_xml.txt
@@ -0,0 +1 @@
+us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514AdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-104111312008478278212017121174720181211747001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
\ No newline at end of file
diff --git a/data/producer_test2_xml.txt b/data/producer_test2_xml.txt
new file mode 100644
index 00000000..5df2eb62
--- /dev/null
+++ b/data/producer_test2_xml.txt
@@ -0,0 +1 @@
+us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514Ieee1609Dot2DataIeee1609Dot2DataCOERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData3001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
\ No newline at end of file
diff --git a/data/producer_test3_xml.txt b/data/producer_test3_xml.txt
new file mode 100644
index 00000000..37369682
--- /dev/null
+++ b/data/producer_test3_xml.txt
@@ -0,0 +1 @@
+us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514Ieee1609Dot2DataIeee1609Dot2DataCOERAdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-1041113120084782782120171211747201812117473001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
\ No newline at end of file
diff --git a/data/producer_test_xml.txt b/data/producer_test_xml.txt
deleted file mode 100644
index 19918d0d..00000000
--- a/data/producer_test_xml.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514MessageFrameMessageFrameUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData2088BE A1 00 0059299411642143-1048434120188222552556553501529012720012001-12701000000013013107120471668032713107120471757010321310712047187101236161204719180211874720472142016057162047222701827858204723540194212812047268901655111020472786014881138204728440140310902047285502320520474905013107146204758030131071131071204761070128131071204761650327670
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514AdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-104111312008478278212017121174720181211747001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514Ieee1609Dot2DataIeee1609Dot2DataCOERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData3001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514MessageFrameMessageFrameUPERIeee1609Dot2DataIeee1609Dot2DataCOERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData32088BE A1 00 0059299411642143-1048434120188222552556553501529012720012001-12701000000013013107120471668032713107120471757010321310712047187101236161204719180211874720472142016057162047222701827858204723540194212812047268901655111020472786014881138204728440140310902047285502320520474905013107146204758030131071131071204761070128131071204761650327670
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514Ieee1609Dot2DataIeee1609Dot2DataCOERAdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-1041113120084782782120171211747201812117473001480AD562FA8400039E8E717090F9665FE1BACC37FFFFFFFF0003BBAFDFA1FA1007FFF8000000000020214C1C100417FFFFFFE824E100A3FFFFFFFE8942102047FFFFFFE922A1026A40143FFE95D610423405D7FFEA75610322C0599FFEADFA10391C06B5FFEB7E6103CB40A03FFED2121033BC08ADFFED9A6102E8408E5FFEDE2E102BDC0885FFEDF0A1000BC019BFFF7F321FFFFC005DFFFC55A1FFFFFFFFFFFFDD1A100407FFFFFFFE1A2FFFE0000
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514MessageFrameMessageFrameUPERAdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-1041113120084782782120171211747201812117472088BE A1 00 0059299411642143-1048434120188222552556553501529012720012001-12701000000013013107120471668032713107120471757010321310712047187101236161204719180211874720472142016057162047222701827858204723540194212812047268901655111020472786014881138204728440140310902047285502320520474905013107146204758030131071131071204761070128131071204761650327670
-us.dot.its.jpo.ode.model.OdeAsdPayloadc05b35e8-2245-4c7e-9291-049ce912256510002017-11-14T14:46:09.736Z[UTC]3falsefalse21044.998459-111.04081741.104674-104.111312oneweek127.0.0.1v3userpassword02000127.0.0.2v3userpassword11000127.0.0.3v3userpassword11000008331117812017-12-01T17:47:11-05:002018-12-01T17:47:11-05:1514MessageFrameMessageFrameUPERIeee1609Dot2DataIeee1609Dot2DataCOERAdvisorySituationDataAdvisorySituationDataUPERus.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData156500000000084782783449984590-1110408170411046740-10411131200847827821201712117472018121174732088BE A1 00 0059299411642143-1048434120188222552556553501529012720012001-12701000000013013107120471668032713107120471757010321310712047187101236161204719180211874720472142016057162047222701827858204723540194212812047268901655111020472786014881138204728440140310902047285502320520474905013107146204758030131071131071204761070128131071204761650327670
diff --git a/do_kafka_test.sh b/do_kafka_test.sh
index 6f7e82ec..6ac18503 100755
--- a/do_kafka_test.sh
+++ b/do_kafka_test.sh
@@ -1,18 +1,176 @@
#!/bin/bash
-./start_kafka.sh
-# Wait until Kafka creates our topics.
-while true; do
- ntopics=$(docker exec asn1codec_kafka_1 /opt/kafka/bin/kafka-topics.sh --list --zookeeper 172.17.0.1 | wc -l)
+# This script tests the ACM against a kafka cluster. It starts the Kafka cluster with `start_kafka.sh`,
+# ensures that the topics are created, builds the ACM docker image, initiates tests using
+# `test-scripts/standalone.sh`, and then stops the Kafka cluster with `stop_kafka.sh`.
- if [[ $ntopics == "2" ]]; then
- echo 'Found 2 topics:'
- docker exec asn1codec_kafka_1 /opt/kafka/bin/kafka-topics.sh --list --zookeeper 172.17.0.1 2> /dev/null
+CYAN='\033[0;36m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+ACM_CONTAINER_NAME=test_acm_instance
+ACM_IMAGE_TAG=do-kafka-test-acm-image
+ACM_IMAGE_NAME=asn1_codec-acm
+
+numTests=3
+numFailures=0 # used to keep track of the number of failed tests for the summary
+
+setup() {
+ if [ -z $DOCKER_HOST_IP ]
+ then
+ export DOCKER_HOST_IP=$(ifconfig | grep -A 1 'inet ' | grep -v 'inet6\|127.0.0.1' | awk '{print $2}' | grep -E '^172\.1[6-9]\.|^172\.2[0-9]\.|^172\.3[0-1]\.|^192\.168\.' | head -n 1)
+ fi
+ if [ -z $DOCKER_HOST_IP ]
+ then
+ echo "DOCKER_HOST_IP is not set and could not be determined. Exiting."
+ exit 1
+ fi
+
+ # print setup info
+ echo "== Setup Info =="
+ echo "DOCKER_HOST_IP: $DOCKER_HOST_IP"
+ echo "KAFKA_CONTAINER_NAME is resolved dynamically"
+ echo "ACM_CONTAINER_NAME: $ACM_CONTAINER_NAME"
+ echo "ACM_IMAGE_TAG: $ACM_IMAGE_TAG"
+ echo "ACM_IMAGE_NAME: $ACM_IMAGE_NAME"
+ echo "========="
+
+ ./start_kafka.sh
+}
+
+waitForKafkaToCreateTopics() {
+ maxAttempts=100
+ attempts=0
+ KAFKA_CONTAINER_NAME=$(docker ps --format '{{.Names}}' | grep kafka)
+ while true; do
+ attempts=$((attempts+1))
+ if [ $(docker ps | grep $KAFKA_CONTAINER_NAME | wc -l) = "0" ]; then
+ echo "Kafka container '$KAFKA_CONTAINER_NAME' is not running. Exiting."
+ ./stop_kafka.sh
+ exit 1
+ fi
+
+ ltopics=$(docker exec -it $KAFKA_CONTAINER_NAME /opt/kafka/bin/kafka-topics.sh --list --zookeeper 172.17.0.1)
+ allTopicsCreated=true
+ if [ $(echo $ltopics | grep "topic.Asn1DecoderInput" | wc -l) = "0" ]; then
+ allTopicsCreated=false
+ elif [ $(echo $ltopics | grep "topic.Asn1DecoderOutput" | wc -l) = "0" ]; then
+ allTopicsCreated=false
+ elif [ $(echo $ltopics | grep "topic.Asn1EncoderInput" | wc -l) = "0" ]; then
+ allTopicsCreated=false
+ elif [ $(echo $ltopics | grep "topic.Asn1EncoderOutput" | wc -l) = "0" ]; then
+ allTopicsCreated=false
+ fi
- break
+ if [ $allTopicsCreated = true ]; then
+ echo "Kafka has created all required topics"
+ break
+ fi
+
+ sleep 1
+
+ if [ $attempts -ge $maxAttempts ]; then
+ echo "Kafka has not created all required topics after $maxAttempts attempts. Exiting."
+ ./stop_kafka.sh
+ exit 1
+ fi
+ done
+}
+
+buildACMImage() {
+ echo "== Building ACM Image =="
+ docker build . -t $ACM_IMAGE_NAME:$ACM_IMAGE_TAG -f Dockerfile.testing
+}
+
+run_tests() {
+ echo "== Running Tests =="
+
+ offset=0
+
+ echo ""
+ echo -e $YELLOW"Running test 1/$numTests - Encode AdvisorySituationData containing HEX-encoded BSM"$NC
+ echo ""
+ ./test-scripts/standalone.sh config/test/c1.properties data/producer_test1_xml.txt encode $offset
+ if [ $? -eq 0 ]; then
+ echo -e $GREEN"Test 1 passed"$NC
+ else
+ echo -e $RED"Test 1 failed"$NC
+ numFailures=$((numFailures+1))
+ fi
+
+ offset=$((offset+1))
+
+ echo ""
+ echo -e $YELLOW"Running test 2/$numTests - Encode Ieee1609Dot2Data containing HEX-encoded BSM"$NC
+ echo ""
+ ./test-scripts/standalone.sh config/test/c1.properties data/producer_test2_xml.txt encode $offset
+ if [ $? -eq 0 ]; then
+ echo -e $GREEN"Test 2 passed"$NC
+ else
+ echo -e $RED"Test 2 failed"$NC
+ numFailures=$((numFailures+1))
+ fi
+
+ offset=$((offset+1))
+
+ echo ""
+ echo -e $YELLOW"Running test 3/$numTests - Encode AdvisorySituationData containing Ieee1609Dot2Data containing HEX-encoded BSM"$NC
+ echo ""
+ ./test-scripts/standalone.sh config/test/c1.properties data/producer_test3_xml.txt encode $offset
+ if [ $? -eq 0 ]; then
+ echo -e $GREEN"Test 3 passed"$NC
+ else
+ echo -e $RED"Test 3 failed"$NC
+ numFailures=$((numFailures+1))
+ fi
+}
+
+cleanup() {
+ echo "== Cleaning Up =="
+ ./stop_kafka.sh
+
+}
+
+run() {
+ numberOfSteps=5
+ echo ""
+ echo -e $CYAN"Step 1/$numberOfSteps: Set up test environment"$NC
+ setup
+
+ echo ""
+ echo -e $CYAN"Step 2/$numberOfSteps: Wait for Kafka to create topics"$NC
+ waitForKafkaToCreateTopics
+
+ echo ""
+ echo -e $CYAN"Step 3/$numberOfSteps: Build ACM image"$NC
+ buildACMImage
+
+ echo ""
+ echo -e $CYAN"Step 4/$numberOfSteps: Run tests"$NC
+ run_tests
+
+ echo ""
+ echo -e $CYAN"Step 5/$numberOfSteps: Clean up test environment"$NC
+ cleanup
+
+ printTestSummary
+}
+
+printTestSummary() {
+ echo ""
+ echo -e $CYAN"== Tests Summary =="$NC
+ if [ $numFailures -eq 0 ]; then
+ echo -e $GREEN"All tests passed"$NC
+ else
+ echo -e $RED"$numFailures/$numTests tests failed"$NC
fi
+ echo ""
+}
+
+echo ""
- sleep 1
-done
+run
-./test-scripts/standalone.sh config/test/c1.properties data/producer_test_xml.txt encode 0
+echo ""
\ No newline at end of file
diff --git a/docker-compose-kafka.yml b/docker-compose-kafka.yml
new file mode 100644
index 00000000..e2640a9d
--- /dev/null
+++ b/docker-compose-kafka.yml
@@ -0,0 +1,16 @@
+version: '2'
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ ports:
+ - "2181:2181"
+ kafka:
+ image: wurstmeister/kafka
+ ports:
+ - "9092:9092"
+ environment:
+ KAFKA_ADVERTISED_HOST_NAME: ${DOCKER_HOST_IP}
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_CREATE_TOPICS: "j2735asn1xer:1:1,j2735asn1per:1:1,topic.Asn1DecoderInput:1:1,topic.Asn1DecoderOutput:1:1,topic.Asn1EncoderInput:1:1,topic.Asn1EncoderOutput:1:1"
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index 0df84617..935a661a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -11,7 +11,7 @@ services:
environment:
KAFKA_ADVERTISED_HOST_NAME: ${DOCKER_HOST_IP}
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
- KAFKA_CREATE_TOPICS: "j2735asn1xer:1:1,j2735asn1per:1:1,topic.Asn1DecoderInput:1:1, topic.Asn1DecoderOutput:1:1"
+ KAFKA_CREATE_TOPICS: "j2735asn1xer:1:1,j2735asn1per:1:1,topic.Asn1DecoderInput:1:1,topic.Asn1DecoderOutput:1:1,topic.Asn1EncoderInput:1:1,topic.Asn1EncoderOutput:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
asn1_codec:
diff --git a/docker-test/do_test.sh b/docker-test/do_test.sh
index fe87d09c..c2447196 100755
--- a/docker-test/do_test.sh
+++ b/docker-test/do_test.sh
@@ -1,28 +1,64 @@
#!/bin/bash
+
+# This script assumes that the ACM is already running in a container and is configured to point to a local Kafka cluster.
+# It processes input data using `test_in.py` & produces test data to the ACM using `kafka_tool`. It consumes the output
+# using `kafka_tool` and processes it using `test_out.py`. If the output does not contain the expected data or if
+# the `kafka_tool` runs into a problem, the script will exit with an error which will be reported back to the caller.
+
+# This script is used by `standalone.sh`
+
export LD_LIBRARY_PATH=/usr/local/lib
-broker=172.17.0.1:9092
+echo ""
+
+DOCKER_HOST_IP=$2
+
+# if DOCKER_HOST_IP is not set, warn and exit
+if [ -z $DOCKER_HOST_IP ]
+then
+ echo "DOCKER_HOST_IP is not set. Exiting."
+ exit 1
+fi
echo "**************************"
echo "Producing Data ..."
echo "**************************"
-cat /asn1_codec_data/test.data | /asn1_codec/docker-test/test_in.py | /build/kafka-test/kafka_tool -P -b $broker -p 0 -t j2735asn1per 2> priv.err
+# Produce data with test_in.py and pipe it to kafka_tool, which sends the data to the topic.
+cat /asn1_codec_data/test.data | /asn1_codec/docker-test/test_in.py | /build/kafka-test/kafka_tool -P -b $DOCKER_HOST_IP:9092 -p 0 -t topic.Asn1EncoderInput 2> prod.err
+if [ $? -ne 0 ]; then
+ cat prod.err
+ echo "Failed to produce data. Exiting."
+ exit 1
+fi
# Start the DI consumer.
offset=$1
echo "**************************"
-echo "Consuming Data at offset "$offset "..."
+echo "Consuming Data at offset "$offset "from topic.Asn1EncoderOutput targeting "$DOCKER_HOST_IP" ..."
echo "**************************"
while true; do
- /build/kafka-test/kafka_tool -C -b $broker -p 0 -t j2735asn1xer -e -o $offset 2> con.err | /asn1_codec/docker-test/test_out.py > tmp.out
+ # Consume data from the topic with kafka_tool and pipe it to test_out.py, then write the output to tmp.out
+ /build/kafka-test/kafka_tool -C -b $DOCKER_HOST_IP:9092 -p 0 -t topic.Asn1EncoderOutput -e -o $offset 2> con.err | /asn1_codec/docker-test/test_out.py > tmp.out
+ if [ $? -ne 0 ]; then
+ cat con.err
+ echo "Failed to consume data. Exiting."
+ exit 1
+ fi
+ # Check if the output is not empty
lines=$(cat tmp.out | wc -l)
+ # If the output is not empty, print the output and break the loop
if [[ $lines != "0" ]]; then
cat tmp.out
break
fi
+
+ # If the output is empty, print the error message and retry
+ cat con.err
done
+
+echo ""
\ No newline at end of file
diff --git a/docker-test/test_in.py b/docker-test/test_in.py
index ec26fb4b..87f92a43 100755
--- a/docker-test/test_in.py
+++ b/docker-test/test_in.py
@@ -1,21 +1,32 @@
#!/usr/bin/env python3
+# This script reads XML data from stdin, extracts the encodings from the metadata, and writes the data to stdout.
+
+# This script is used by `do_test.sh` to prepare the input data and pipe it during data processing.
+
from __future__ import print_function
-import xml.etree.ElementTree as et
+import xml.etree.ElementTree as elementTree
import sys
-for l in sys.stdin:
+exceptionOccurred = False
+
+for line in sys.stdin:
try:
- root = et.fromstring(l.strip())
+ rootElement = elementTree.fromstring(line.strip())
print('Encodings', file=sys.stderr)
- for encoding in root.findall('./metadata/encodings/encodings'):
+ for encoding in rootElement.findall('./metadata/encodings/encodings'):
print(' Name: {}'.format(encoding[0].text), file=sys.stderr)
print(' Type: {}'.format(encoding[2].text), file=sys.stderr)
except Exception as e:
+ print("Exception occurred in 'test_in.py'...")
+ exceptionOccurred = True
continue
- sys.stdout.write(l)
+ sys.stdout.write(line)
+
+if exceptionOccurred:
+ sys.exit(1)
\ No newline at end of file
diff --git a/docker-test/test_out.py b/docker-test/test_out.py
index 0430383e..d5cc6ddd 100755
--- a/docker-test/test_out.py
+++ b/docker-test/test_out.py
@@ -1,17 +1,23 @@
#!/usr/bin/env python3
+# This script reads XML data from stdin, prints the payload data to stdout, and writes any exceptions to stderr.
+
+# This script is used by `do_test.sh` to process the output data and pipe it during data processing.
+
from __future__ import print_function
-import xml.etree.ElementTree as et
+import xml.etree.ElementTree as elementTree
import sys
+exceptionOccurred = False
+
for l in sys.stdin:
try:
- root = et.fromstring(l.strip())
+ rootElement = elementTree.fromstring(l.strip())
print('****PayloadData****')
- data = list(root.findall('./payload/data'))[0]
+ data = list(rootElement.findall('./payload/data'))[0]
for frame in data:
name = frame.tag
@@ -21,9 +27,14 @@
print(frame[0].text)
else:
print('**Type: {} XML**'.format(name))
- print(et.tostring(frame))
+ print(elementTree.tostring(frame))
print('****')
except Exception as e:
+ print("Exception occurred in 'test_out.py'...")
+ exceptionOccurred = True
continue
+
+if exceptionOccurred:
+ sys.exit(1)
\ No newline at end of file
diff --git a/docs/testing.md b/docs/testing.md
index bc1bd0e8..287455e3 100644
--- a/docs/testing.md
+++ b/docs/testing.md
@@ -1,7 +1,8 @@
# Testing the ACM
-There are currently two ways to test the capabilities of the ACM.
+There are currently three ways to test the capabilities of the ACM.
- [Unit Testing](#unit-testing)
+- [Kafka Test Script](#kafka-test-script)
- [Standalone Operation / Testing](#standalone-testing)
## Unit Testing
@@ -34,6 +35,26 @@ $ cd /build
$ ./acm_tests
```
+## Kafka Test Script
+The [do_kafka_test.sh](../do_kafka_test.sh) script is designed to perform integration tests on a Kafka instance. To execute the tests, this script relies on the following scripts: standalone.sh, do_test.sh, test_in.py, and test_out.py
+
+To ensure proper execution, it is recommended to run this script outside of the dev container where docker is available. This is because the script will spin up a standalone kafka instance and will not be able to access the docker daemon from within the dev container.
+
+It should be noted that this script and any dependent scripts need to use the LF end-of-line sequence. These include the following:
+- do_kafka_test.sh
+- standalone.sh
+- do_test.sh
+- test_in.py
+- test_out.py
+
+The DOCKER_HOST_IP environment variable must be set to the IP address of the host machine. This is required for the script to function properly. This can be set by using the following command:
+
+```
+export DOCKER_HOST_IP=$(ifconfig | zgrep -m 1 -oP '(?<=inet\s)\d+(\.\d+){3}')
+```
+
+If not set, the script will attempt to resolve the IP address and will exit if it is unable to do so.
+
## Standalone Operation / Testing
If the `-F` option is used, the ACM will assume its first operand is a filename, attempt to open that file, and decode or
diff --git a/kafka-test/README.md b/kafka-test/README.md
new file mode 100644
index 00000000..b40cf4d2
--- /dev/null
+++ b/kafka-test/README.md
@@ -0,0 +1,2 @@
+# Kafka Test
+This project is used by the `do_test.sh` script (called by `standalone.sh`) as part of the `do_kafka_test.sh` script to test integration with Kafka.
\ No newline at end of file
diff --git a/kafka-test/src/rdkafka_example.cpp b/kafka-test/src/rdkafka_example.cpp
index 882264db..352ea118 100644
--- a/kafka-test/src/rdkafka_example.cpp
+++ b/kafka-test/src/rdkafka_example.cpp
@@ -113,6 +113,8 @@ static void metadata_print (const std::string &topic,
static bool run = true;
static bool exit_eof = false;
+static int numTimeouts = 0;
+static int maxTimeouts = 10;
static void sigterm (int sig) {
run = false;
@@ -182,6 +184,8 @@ class MyHashPartitionerCb : public RdKafka::PartitionerCb {
void msg_consume(RdKafka::Message* message, void* opaque) {
switch (message->err()) {
case RdKafka::ERR__TIMED_OUT:
+ std::cerr << "RdKafka::ERR__TIMED_OUT" << std::endl;
+ numTimeouts++;
break;
case RdKafka::ERR_NO_ERROR:
@@ -213,6 +217,11 @@ void msg_consume(RdKafka::Message* message, void* opaque) {
std::cerr << "Consume failed: " << message->errstr() << std::endl;
run = false;
}
+
+ if (numTimeouts == maxTimeouts) {
+ std::cerr << "Maximum number of timeouts reached" << std::endl;
+ run = false;
+ }
}
diff --git a/start_kafka.sh b/start_kafka.sh
index 1ebad0cb..0cfdbd13 100755
--- a/start_kafka.sh
+++ b/start_kafka.sh
@@ -1,7 +1,6 @@
#!/bin/bash
-# Start the docker services.
-docker-compose stop
-docker-compose rm -f -v
-docker-compose up --build -d
-docker ps
+./stop_kafka.sh
+
+# start kafka
+docker compose -f docker-compose-kafka.yml up -d
\ No newline at end of file
diff --git a/stop_kafka.sh b/stop_kafka.sh
new file mode 100644
index 00000000..cf9f625b
--- /dev/null
+++ b/stop_kafka.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# stop kafka
+docker compose -f docker-compose-kafka.yml down --remove-orphans
\ No newline at end of file
diff --git a/test-scripts/standalone.sh b/test-scripts/standalone.sh
index c9fe585c..97cbbc07 100755
--- a/test-scripts/standalone.sh
+++ b/test-scripts/standalone.sh
@@ -1,8 +1,48 @@
#!/bin/bash
+# This script runs the standalone test for the ACM. It starts the ACM in a new container, produces
+# and consumes the test data with `do_test.sh`, and then stops the ACM container. Finally, it
+# reports the result of the test back to the caller.
+
+# This script is used by `do_kafka_test.sh`
+
+ACM_CONTAINER_NAME=test_acm_instance
+ACM_IMAGE_TAG=do-kafka-test-acm-image
+ACM_IMAGE_NAME=asn1_codec-acm
+
+startACMContainer() {
+ # make sure ip can be pinged
+ while true; do
+ if ping -c 1 $DOCKER_HOST_IP &> /dev/null; then
+ break
+ else
+ echo "Docker host ip $DOCKER_HOST_IP is not pingable. Exiting."
+ exit 1
+ fi
+ done
+ echo "Starting ACM in new container '$ACM_CONTAINER_NAME'"
+ docker run --name $ACM_CONTAINER_NAME --env ACM_LOG_TO_CONSOLE=true --env ACM_LOG_LEVEL=DEBUG -v /tmp/docker-test/asn1-codec/data:/asn1_codec_data -it --rm -p '8080:8080' -d $ACM_IMAGE_NAME:$ACM_IMAGE_TAG /asn1_codec/docker-test/acm_standalone.sh $TYPE
+
+ echo "Waiting for $ACM_CONTAINER_NAME to spin up"
+ # while num lines of docker logs is less than 100, sleep 1
+ secondsToWait=10
+ sleep $secondsToWait # TODO: use check like log count to determine when to stop waiting
+ echo "$ACM_CONTAINER_NAME is ready after $secondsToWait seconds"
+
+ if [ $(docker ps | grep $ACM_CONTAINER_NAME | wc -l) == "0" ]; then
+ echo "ACM container '$ACM_CONTAINER_NAME' is not running. Exiting."
+ exit 1
+ fi
+}
+
+stopACMContainer() {
+ echo "Stopping and removing existing container if it exists"
+ docker stop $ACM_CONTAINER_NAME > /dev/null
+ docker rm $ACM_CONTAINER_NAME > /dev/null
+}
+
# There are two input files: CONFIG, TEST_DATA.
-# Offset is the offset in the topic that will be consumed and displayed in the
-# output
+# Offset is the offset in the topic that will be consumed and displayed in the output
# Type is the type of operation (encode or decode)
USAGE="standalone.sh [CONFIG] [TEST_FILE] [TYPE] [OFFSET]"
@@ -41,9 +81,22 @@ fi
mkdir -p /tmp/docker-test/asn1-codec/data
# Copy the config to the test data.
-# TODO replace map file line: sed -i '/TEXT_TO_BE_REPLACED/c\This line is removed by the admin.' /tmp/foo
cp $1 /tmp/docker-test/asn1-codec/data/config.properties
+# identify operating system
+OS=$(uname)
+if [ $OS = "Darwin" ]; then
+ # change metadata.broker.list to value of DOCKER_HOST_IP
+ sed -i '' "s/metadata.broker.list=.*/metadata.broker.list=$DOCKER_HOST_IP:9092/" /tmp/docker-test/asn1-codec/data/config.properties
+ # change acm.type to encode or decode depending on the type
+ sed -i '' "s/acm.type=.*/acm.type=$TYPE/" /tmp/docker-test/asn1-codec/data/config.properties
+else
+ # change metadata.broker.list to value of DOCKER_HOST_IP
+ sed -i "s/metadata.broker.list=.*/metadata.broker.list=$DOCKER_HOST_IP:9092/" /tmp/docker-test/asn1-codec/data/config.properties
+ # change acm.type to encode or decode depending on the type
+ sed -i "s/acm.type=.*/acm.type=$TYPE/" /tmp/docker-test/asn1-codec/data/config.properties
+fi
+
# Copy the data.
cp $2 /tmp/docker-test/asn1-codec/data/test.data
@@ -51,11 +104,21 @@ echo "**************************"
echo "Running standalone test with "$1 $2 $3 $4
echo "**************************"
-# Start the PPM in a new container.
-docker run --name acm_kafka -v /tmp/docker-test/asn1-codec/data:/asn1_codec_data -it --rm -p '8080:8080' -d asn1codec_asn1_codec:latest /asn1_codec/docker-test/acm_standalone.sh $TYPE > /dev/null
+# Stop and remove existing container if it exists
+stopACMContainer
+
+# Start the ACM in a new container.
+startACMContainer
+
+# Produce and consume the test data.
+docker exec $ACM_CONTAINER_NAME /asn1_codec/docker-test/do_test.sh $OFFSET $DOCKER_HOST_IP
+
+# return 1 if the test fails
+if [ $? -ne 0 ]; then
+ docker stop $ACM_CONTAINER_NAME > /dev/null
+ exit 1
+fi
-sleep 10
+docker stop $ACM_CONTAINER_NAME > /dev/null
-# Produce the test data.
-docker exec acm_kafka /asn1_codec/docker-test/do_test.sh $OFFSET
-docker stop acm_kafka > /dev/null
+echo ""