Skip to content

Commit

Permalink
Updating dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
bendbennett committed Dec 1, 2024
1 parent c0365b6 commit c68be5e
Show file tree
Hide file tree
Showing 43 changed files with 2,253 additions and 2,180 deletions.
14 changes: 6 additions & 8 deletions .env.dist
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
MYSQL_DBNAME=go-api-demo
MYSQL_DBNAME=go_api_demo
MYSQL_HOST=localhost
MYSQL_PASSWORD=password
MYSQL_PORT=3306
Expand All @@ -7,12 +7,10 @@ MYSQL_USER=user
STORAGE_TYPE=sql
STORAGE_QUERY_TIMEOUT=3s

METRICS_ENABLED=true
METRICS_COLLECTION_INTERVAL=5s

LOGGING_PRODUCTION=false

TRACING_ENABLED=true
TELEMETRY_ENABLED=true
TELEMETRY_METRICS_COLLECTION_INTERVAL=5s

JAEGER_REPORTER_LOG_SPANS=false
JAEGER_SAMPLER_TYPE=const
Expand All @@ -22,13 +20,13 @@ JAEGER_SERVICE_NAME=go-api-demo
KAFKA_BROKERS=localhost:9092

KAFKA_USER_CONSUMER_CACHE_GROUP_ID=user-consumer-cache-group-id
KAFKA_USER_CONSUMER_CACHE_TOPIC=go_api_demo_db.go-api-demo.users
KAFKA_USER_CONSUMER_CACHE_TOPIC=mysql.go_api_demo.users
KAFKA_USER_CONSUMER_CACHE_MAX_WAIT=1s
KAFKA_USER_CONSUMER_CACHE_REBALANCE_TIMEOUT"=1s
KAFKA_USER_CONSUMER_CACHE_REBALANCE_TIMEOUT=1s
KAFKA_USER_CONSUMER_CACHE_IS_ENABLED=true

KAFKA_USER_CONSUMER_SEARCH_GROUP_ID=user-consumer-search-group-id
KAFKA_USER_CONSUMER_SEARCH_TOPIC=go_api_demo_db.go-api-demo.users
KAFKA_USER_CONSUMER_SEARCH_TOPIC=mysql.go_api_demo.users
KAFKA_USER_CONSUMER_SEARCH_MAX_WAIT=1s
KAFKA_USER_CONSUMER_SEARCH_REBALANCE_TIMEOUT=1s
KAFKA_USER_CONSUMER_SEARCH_IS_ENABLED=true
Expand Down
12 changes: 12 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# See GitHub's documentation for more information on this file:
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "gomod"
directory: "/"
schedule:
interval: "daily"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
bin/go-api-demo
generated/*.protoset
test/coverage.txt
.vscode
10 changes: 7 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
SERVICE_NAME=go-api-demo
MYSQL_DSN=mysql://user:password@tcp(127.0.0.1:3306)/go-api-demo
MYSQL_DSN=mysql://user:password@tcp(127.0.0.1:3306)/go_api_demo
MYSQL_MIGRATION_PATH=internal/storage/mysql/migrations
export HOST_IP=${shell ipconfig getifaddr en0}

Expand Down Expand Up @@ -63,11 +63,15 @@ endif

.PHONY: docker-up
docker-up: .env
docker compose -f docker/dev/docker-compose.yml up -d
docker compose -f docker/dev/docker-compose.yml build --no-cache
docker image prune -f
docker compose -f docker/dev/docker-compose.yml up -d --force-recreate

.PHONY: docker-down
docker-down:
docker rm --force -v go-api-demo-connect go-api-demo-grafana go-api-demo-prometheus go-api-demo-jaeger go-api-demo-redis go-api-demo-elastic go-api-demo-zookeeper go-api-demo-kowl go-api-demo-db go-api-demo-kafka go-api-demo-schema-registry
docker rm --force -v go-api-demo-connect go-api-demo-grafana go-api-demo-prometheus \
go-api-demo-jaeger go-api-demo-redis go-api-demo-elastic go-api-demo-zookeeper \
go-api-demo-kowl go-api-demo-db go-api-demo-kafka go-api-demo-schema-registry
docker compose -f docker/dev/docker-compose.yml down

################################################################################
Expand Down
49 changes: 33 additions & 16 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ is used as a basis for event-driven cache and search engine population whilst av
| [v0.10.0](#v0.10.0) | <ul><li>Adds tracing for Redis, Elasticsearch and the Kafka consumers.</li><li>Switch to <a href="https://github.com/segmentio/kafka-go">kafka-go</a>.</li><li>Adds basic metrics and dashboard for the Kafka consumers.</li></ul> |
| [v0.11.0](#v0.11.0) | <ul><li>Uses 2 Kafka partitions for CDC for MySQL _users_ table.</li><li>Uses 2 consumers for populating Elasticsearch.</li></ul> |
| [v0.12.0](#v0.12.0) | Uses Avro Schema for serialization and deserialization. |
| [v0.13.0](#v0.13.0) | Updating dependencies. |

### Set-up

Expand Down Expand Up @@ -57,9 +58,21 @@ The _Makefile_ contains commands for building, running and testing the API.
* `make test` runs the linter then the tests (see [Tests](#tests)).
* `make migrate-up` runs the database migrations for MySQL (see [v0.3.0](#v0.3.0)).

### <a name="setup"></a>Setup

There is a delay between running `make docker-up` and all necessary infrastructure being
available to run the tests and the API. This delay can result in errors when running
the integration tests. Execute `make run` once the tests run successfully.

```
make docker-up
make test
make run
```

### <a name="tests"></a>Tests

Install [testify](https://github.com/stretchr/testify#installation) then run
Install [testify](https://github.com/stretchr/testify#installation) then run:

```
make docker-up
Expand All @@ -76,20 +89,24 @@ Alternatively, requests can be issued using cURL and
[gRPCurl](https://github.com/fullstorydev/grpcurl) (see [v0.2.0](#v0.2.0),
[v0.3.0](#v0.3.0), [v0.4.0](#v0.4.0)).

## <a name="v0.13.0"></a>v0.13.0

Version `0.13.0` has been updated to use more recent versions of packages and docker images. A switch from open tracing to open telemetry has also been implemented.

## <a name="v0.12.0"></a>v0.12.0

Uses Avro Schema for the serialization of users during CDC and deserialization when
the Kafka consumers populate Redis and Elasticsearch.

### Set-up

There is a delay between running docker-up and all necessary infrastructure being
available to run the tests and the API. This delay can result in errors when running
the integration tests. Execute `make run` once the tests run successfully.
Review the notes for [setup](#setup).

make docker-up
make test
make run
```
make docker-up
make test
make run
```

The schema are visible through Kowl.

Expand All @@ -103,20 +120,20 @@ Adds 2 Kafka consumers for populating Elasticsearch.

### Set-up

There is a delay between running docker-up and all necessary infrastructure being
available to run the tests and the API. This delay can result in errors when running
the integration tests. Execute `make run` once the tests run successfully.
Review the notes for [setup](#setup).

make docker-up
make test
make run
```
make docker-up
make test
make run
```

Running (see [docker](#k6_docker_post) or [local](#k6_local_post)) the [k6](https://k6.io/) script will send 50
requests per second (RPS) to the `POST /user` endpoint for 5 minutes.

#### <a name="k6_docker_post"></a>Docker

docker run -e HOST=host.docker.internal -i loadimpact/k6 run - <k6/post.js
docker run -e HOST=host.docker.internal -i grafana/k6 run - <k6/post.js

#### <a name="k6_local_post"></a>Local

Expand Down Expand Up @@ -185,7 +202,7 @@ requests per second (RPS) to the `POST /user` endpoint for 5 minutes.

#### <a name="k6_docker_post"></a>Docker

docker run -e HOST=host.docker.internal -i loadimpact/k6 run - <k6/post.js
docker run -e HOST=host.docker.internal -i grafana/k6 run - <k6/post.js

#### <a name="k6_local_post"></a>Local

Expand Down Expand Up @@ -545,7 +562,7 @@ for the [Grafana](http://localhost:3456) dashboard.

#### <a name="k6_docker"></a>Docker

docker run -e HOST=host.docker.internal -i loadimpact/k6 run - <k6/get.js
docker run -e HOST=host.docker.internal -i grafana/k6 run - <k6/get.js

#### <a name="k6_local"></a>Local

Expand Down
25 changes: 23 additions & 2 deletions docker/connect/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,27 @@
FROM debezium/connect:1.7
FROM debezium/connect:3.0.0.Final

COPY docker/connect/entrypoint-wrap.sh /entrypoint-wrap.sh
COPY docker/connect/debezium-mysql.json /debezium-mysql.json
COPY docker/connect/entrypoint-wrap.sh /entrypoint-wrap.sh

ENV KAFKA_CONNECT_PLUGINS_DIR=$KAFKA_HOME/connect \
EXTERNAL_LIBS_DIR=$KAFKA_HOME/external_libs \
CONNECT_PLUGIN_PATH=$KAFKA_CONNECT_PLUGINS_DIR \
MAVEN_DEP_DESTINATION=$KAFKA_HOME/libs \
CONFLUENT_VERSION=7.7.1 \
AVRO_VERSION=1.12.0 \
GUAVA_VERSION=33.3.1-jre \
GUAVA_FAILURE_ACCESS_VERSION=1.0.2

RUN docker-maven-download confluent kafka-connect-avro-converter "$CONFLUENT_VERSION" 262d3d4e880e78fa6bb00469fdf10489 && \
docker-maven-download confluent kafka-connect-avro-data "$CONFLUENT_VERSION" a0b8b5ac8782fc815b059965ae634706 && \
docker-maven-download confluent kafka-avro-serializer "$CONFLUENT_VERSION" c335c87e25ae347086631092a07d62bd && \
docker-maven-download confluent kafka-schema-serializer "$CONFLUENT_VERSION" 56c7911fa0561ed839fa8d9c491cc5f4 && \
docker-maven-download confluent kafka-schema-registry-client "$CONFLUENT_VERSION" bac9144955be1c0c371e6654f6572a35 && \
docker-maven-download confluent kafka-schema-converter "$CONFLUENT_VERSION" da55aa8de34c16dff1b8537aff958fe2 && \
docker-maven-download confluent common-config "$CONFLUENT_VERSION" 03052d98b8ff2c72f5f57b45c00becb4 && \
docker-maven-download confluent common-utils "$CONFLUENT_VERSION" 2ea73bcfa8c0c719d150bfacdd419ae5 && \
docker-maven-download central org/apache/avro avro "$AVRO_VERSION" 21fa3115ff1dc131ca6432bc73927fa5 && \
docker-maven-download central com/google/guava guava "$GUAVA_VERSION" 7b7d80d99af4181db55b00dad50a91bb && \
docker-maven-download central com/google/guava failureaccess "$GUAVA_FAILURE_ACCESS_VERSION" 3f75955b49b6758fd6d1e1bd9bf777b3

CMD ["/entrypoint-wrap.sh"]
19 changes: 8 additions & 11 deletions docker/connect/debezium-mysql.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,24 @@
"database.password": "root",
"database.server.id": "100000",
"database.server.name": "go_api_demo_db",
"database.include.list": "go-api-demo",
"database.include.list": "go_api_demo",
"database.history.kafka.bootstrap.servers": "go-api-demo-kafka:9092",
"database.history.kafka.topic": "schema-changes.go-api-demo",
"database.history.kafka.topic": "mysql.database_history.go_api_demo",
"database.allowPublicKeyRetrieval":"true",

"topic.prefix": "mysql",

"topic.creation.default.replication.factor": 1,
"topic.creation.default.partitions": 1,
"topic.creation.default.cleanup.policy": "compact",
"topic.creation.default.compression.type": "lz4",

"topic.creation.groups": "users",

"topic.creation.users.include": "go_api_demo_db\\.go-api-demo\\.users",
"topic.creation.users.replication.factor": 1,
"topic.creation.users.partitions": 2,
"topic.creation.users.cleanup.policy": "compact",
"topic.creation.users.compression.type": "producer",

"key.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter.schema.registry.url": "http://go-api-demo-schema-registry:8081",
"value.converter.schema.registry.url": "http://go-api-demo-schema-registry:8081"
"value.converter.schema.registry.url": "http://go-api-demo-schema-registry:8081",

"schema.history.internal.kafka.bootstrap.servers":"go-api-demo-kafka:9092",
"schema.history.internal.kafka.topic":"mysql.schema_history.go_api_demo"
}
}
10 changes: 10 additions & 0 deletions docker/connect/entrypoint-wrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@ bash -c '
uri=http://go-api-demo-connect:8083/connectors
text_break="\n=============\n"
domain=http://go-api-demo-schema-registry:8081
echo -e "\n${text_break}Waiting for Schema Registry to start listening on ${domain}${text_break}"
while [ $(curl -s -o /dev/null -w %{http_code} ${domain}) -ne 200 ] ; do
echo -e "\n${text_break}$(date) Schema Registry listener HTTP state: ${http_code}" \
$(curl -s -o /dev/null -w %{http_code} ${domain})" (waiting for 200)${text_break}"
sleep 5
done
echo -e "\n${text_break}Waiting for Kafka Connect to start listening on ${uri}${text_break}"
while [ $(curl -s -o /dev/null -w %{http_code} ${uri}) -ne 200 ] ; do
Expand Down
Loading

0 comments on commit c68be5e

Please sign in to comment.