From 436f15e460f0917ab71791dc8d1299f22f104051 Mon Sep 17 00:00:00 2001 From: Cerchie Date: Tue, 10 Dec 2024 08:24:40 -0700 Subject: [PATCH 1/2] migrate to new docker compose cmd --- docker-compose.yml | 266 +++++++++--------- scripts/sbc/add-broker.sh | 2 +- .../sbc/{docker-compose.yml => compose.yaml} | 47 ++-- .../sbc/validate_sbc_add_broker_completed.sh | 2 +- ...alidate_sbc_add_broker_plan_computation.sh | 2 +- .../validate_sbc_add_broker_reassignment.sh | 2 +- .../sbc/validate_sbc_kill_broker_completed.sh | 4 +- .../sbc/validate_sbc_kill_broker_started.sh | 2 +- scripts/start.sh | 28 +- scripts/validate/validate_rest_proxy.sh | 24 +- 10 files changed, 186 insertions(+), 193 deletions(-) rename scripts/sbc/{docker-compose.yml => compose.yaml} (86%) diff --git a/docker-compose.yml b/docker-compose.yml index 4eed76cd9..9812be32b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,18 +1,17 @@ -# docker-compose supports environment variable substitution with the ${VARIABLE-NAME} syntax. +# docker compose supports environment variable substitution with the ${VARIABLE-NAME} syntax. # Environment variables can be sourced in a variety of ways. One of those ways is through -# a well known '.env' file located in the same folder as the docker-compose.yml file. See the Docker +# a well known '.env' file located in the same folder as the compose.yaml file. See the Docker # documentation for details: https://docs.docker.com/compose/environment-variables/#the-env-file -# +# # This feature is being used to parameterize some values within this file. In this directory is also # a .env file, which is actually a symbolic link to /utils/config.env. That file -# contains values which get substituted here when docker-compose parses this file. +# contains values which get substituted here when docker compose parses this file. # -# If you'd like to view the docker-compose.yml file rendered with its environment variable substitutions -# you can execute the `docker-compose config` command. Take note that some demos provide additional -# environment variable values by exporting them in a script prior to running `docker-compose up`. +# If you'd like to view the compose.yaml file rendered with its environment variable substitutions +# you can execute the `docker compose config` command. Take note that some demos provide additional +# environment variable values by exporting them in a script prior to running `docker compose up`. --- services: - zookeeper: image: ${REPOSITORY}/cp-zookeeper:${CONFLUENT_DOCKER_TAG} restart: always @@ -68,11 +67,11 @@ services: hostname: openldap container_name: openldap environment: - LDAP_ORGANISATION: "ConfluentDemo" - LDAP_DOMAIN: "confluentdemo.io" - LDAP_BASE_DN: "dc=confluentdemo,dc=io" + LDAP_ORGANISATION: "ConfluentDemo" + LDAP_DOMAIN: "confluentdemo.io" + LDAP_BASE_DN: "dc=confluentdemo,dc=io" volumes: - - ./scripts/security/ldap_users:/container/service/slapd/assets/config/bootstrap/ldif/custom + - ./scripts/security/ldap_users:/container/service/slapd/assets/config/bootstrap/ldif/custom command: "--copy-service --loglevel debug" kafka1: @@ -93,7 +92,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: "bash -c 'if [ ! -f /etc/kafka/secrets/kafka.kafka1.keystore.jks ]; then echo \"ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)\" && exit 1 ; else /etc/confluent/docker/run ; fi'" + command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka1.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' ports: - 8091:8091 - 9091:9091 @@ -102,7 +101,7 @@ services: - 12091:12091 environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2182 - KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: 'true' + KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: "true" # Broker uses TLSv1.2 by-default for ZooKeeper TLS connections # See note for ZOOKEEPER_SSL_ENABLED_PROTOCOLS regarding TLS 1.3 support @@ -117,11 +116,11 @@ services: KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka1.truststore.jks KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_TYPE: JKS - KAFKA_ZOOKEEPER_SET_ACL: 'true' + KAFKA_ZOOKEEPER_SET_ACL: "true" KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:SASL_PLAINTEXT,TOKEN:SASL_SSL,SSL:SSL,CLEAR:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka1:9091,TOKEN://kafka1:10091,SSL://kafka1:11091,CLEAR://kafka1:12091 - + KAFKA_SUPER_USERS: User:admin;User:mds;User:superUser;User:ANONYMOUS KAFKA_LOG4J_LOGGERS: "kafka.authorizer.logger=INFO" KAFKA_LOG4J_ROOT_LOGLEVEL: INFO @@ -135,19 +134,19 @@ services: KAFKA_LISTENER_NAME_INTERNAL_SASL_ENABLED_MECHANISMS: PLAIN KAFKA_LISTENER_NAME_INTERNAL_PLAIN_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.plain.PlainLoginModule required \ - username="admin" \ - password="admin-secret" \ - user_admin="admin-secret" \ - user_mds="mds-secret"; + org.apache.kafka.common.security.plain.PlainLoginModule required \ + username="admin" \ + password="admin-secret" \ + user_admin="admin-secret" \ + user_mds="mds-secret"; # Configure TOKEN listener for Confluent Platform components and impersonation KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerValidatorCallbackHandler KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerServerLoginCallbackHandler KAFKA_LISTENER_NAME_TOKEN_SASL_ENABLED_MECHANISMS: OAUTHBEARER KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - publicKeyPath="/tmp/conf/public.pem"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + publicKeyPath="/tmp/conf/public.pem"; KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 2 @@ -157,8 +156,8 @@ services: KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_BALANCER_HEAL_BROKER_FAILURE_THRESHOLD_MS: 30000 - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false' + KAFKA_DELETE_TOPIC_ENABLE: "true" + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_DEFAULT_REPLICATION_FACTOR: 2 # Confluent Health+ @@ -176,14 +175,15 @@ services: CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 2 CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka1:9091,kafka2:9092 CONFLUENT_METRICS_REPORTER_SECURITY_PROTOCOL: SASL_PLAINTEXT - CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required \ + CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: + "org.apache.kafka.common.security.plain.PlainLoginModule required \ username=\"admin\" \ password=\"admin-secret\";" CONFLUENT_METRICS_REPORTER_SASL_MECHANISM: PLAIN CONFLUENT_METRICS_REPORTER_MAX_REQUEST_SIZE: 10485760 # To avoid race condition with control-center # CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'false' - CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'true' + CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: "true" KAFKA_SSL_KEYSTORE_FILENAME: kafka.kafka1.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka1_keystore_creds @@ -199,7 +199,7 @@ services: # Schema Validation KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: https://schemaregistry:8085 KAFKA_CONFLUENT_BASIC_AUTH_CREDENTIALS_SOURCE: USER_INFO - KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: 'superUser:superUser' + KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: "superUser:superUser" KAFKA_CONFLUENT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka1.truststore.jks KAFKA_CONFLUENT_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf @@ -261,7 +261,7 @@ services: KAFKA_KAFKA_REST_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka1.truststore.jks KAFKA_KAFKA_REST_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_KAFKA_REST_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: BASIC - KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: 'restAdmin:restAdmin' + KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: "restAdmin:restAdmin" KAFKA_KAFKA_REST_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 KAFKA_KAFKA_REST_CLIENT_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 @@ -287,7 +287,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: "bash -c 'if [ ! -f /etc/kafka/secrets/kafka.kafka2.keystore.jks ]; then echo \"ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)\" && exit 1 ; else /etc/confluent/docker/run ; fi'" + command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka2.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' ports: - 8092:8092 - 9092:9092 @@ -296,7 +296,7 @@ services: - 12092:12092 environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2182 - KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: 'true' + KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: "true" # Broker uses TLSv1.2 by-default for ZooKeeper TLS connections # See note for ZOOKEEPER_SSL_ENABLED_PROTOCOLS regarding TLS 1.3 support @@ -311,11 +311,11 @@ services: KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka2.truststore.jks KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_TYPE: JKS - KAFKA_ZOOKEEPER_SET_ACL: 'true' + KAFKA_ZOOKEEPER_SET_ACL: "true" KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:SASL_PLAINTEXT,TOKEN:SASL_SSL,SSL:SSL,CLEAR:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka2:9092,TOKEN://kafka2:10092,SSL://kafka2:11092,CLEAR://kafka2:12092 - + KAFKA_SUPER_USERS: User:admin;User:mds;User:superUser;User:ANONYMOUS KAFKA_LOG4J_LOGGERS: "kafka.authorizer.logger=INFO" KAFKA_LOG4J_ROOT_LOGLEVEL: INFO @@ -329,19 +329,19 @@ services: KAFKA_LISTENER_NAME_INTERNAL_SASL_ENABLED_MECHANISMS: PLAIN KAFKA_LISTENER_NAME_INTERNAL_PLAIN_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.plain.PlainLoginModule required \ - username="admin" \ - password="admin-secret" \ - user_admin="admin-secret" \ - user_mds="mds-secret"; + org.apache.kafka.common.security.plain.PlainLoginModule required \ + username="admin" \ + password="admin-secret" \ + user_admin="admin-secret" \ + user_mds="mds-secret"; # Configure TOKEN listener for Confluent Platform components and impersonation KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerValidatorCallbackHandler KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerServerLoginCallbackHandler KAFKA_LISTENER_NAME_TOKEN_SASL_ENABLED_MECHANISMS: OAUTHBEARER KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - publicKeyPath="/tmp/conf/public.pem"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + publicKeyPath="/tmp/conf/public.pem"; KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 2 @@ -351,8 +351,8 @@ services: KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_BALANCER_HEAL_BROKER_FAILURE_THRESHOLD_MS: 30000 - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false' + KAFKA_DELETE_TOPIC_ENABLE: "true" + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_DEFAULT_REPLICATION_FACTOR: 2 # Confluent Metrics Reporter @@ -360,14 +360,15 @@ services: CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 2 CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka1:9091,kafka2:9092 CONFLUENT_METRICS_REPORTER_SECURITY_PROTOCOL: SASL_PLAINTEXT - CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required \ + CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: + "org.apache.kafka.common.security.plain.PlainLoginModule required \ username=\"admin\" \ password=\"admin-secret\";" CONFLUENT_METRICS_REPORTER_SASL_MECHANISM: PLAIN CONFLUENT_METRICS_REPORTER_MAX_REQUEST_SIZE: 10485760 # To avoid race condition with control-center # CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'false' - CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'true' + CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: "true" KAFKA_SSL_KEYSTORE_FILENAME: kafka.kafka2.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka2_keystore_creds @@ -383,7 +384,7 @@ services: # Schema Validation KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: https://schemaregistry:8085 KAFKA_CONFLUENT_BASIC_AUTH_CREDENTIALS_SOURCE: USER_INFO - KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: 'superUser:superUser' + KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: "superUser:superUser" KAFKA_CONFLUENT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka2.truststore.jks KAFKA_CONFLUENT_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf @@ -445,7 +446,7 @@ services: KAFKA_KAFKA_REST_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka2.truststore.jks KAFKA_KAFKA_REST_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_KAFKA_REST_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: BASIC - KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: 'restAdmin:restAdmin' + KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: "restAdmin:restAdmin" KAFKA_KAFKA_REST_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 KAFKA_KAFKA_REST_CLIENT_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 @@ -453,8 +454,6 @@ services: # Encrypt credentials stored in Cluster Link KAFKA_PASSWORD_ENCODER_SECRET: encoder-secret - - connect: image: localbuild/connect:${CONFLUENT_DOCKER_TAG}-${CONNECTOR_VERSION} container_name: connect @@ -475,13 +474,13 @@ services: ports: - 8083:8083 environment: - CUB_CLASSPATH: '/usr/share/java/confluent-security/connect/*:/usr/share/java/kafka/*:/usr/share/java/cp-base-new/*' + CUB_CLASSPATH: "/usr/share/java/confluent-security/connect/*:/usr/share/java/kafka/*:/usr/share/java/cp-base-new/*" CONNECT_BOOTSTRAP_SERVERS: kafka1:10091,kafka2:10092 CONNECT_LISTENERS: https://0.0.0.0:8083 CONNECT_GROUP_ID: "connect-cluster" CONNECT_PRODUCER_CLIENT_ID: "connect-worker-producer" - CONNECT_PRODUCER_ENABLE_IDEMPOTENCE: 'true' + CONNECT_PRODUCER_ENABLE_IDEMPOTENCE: "true" CONNECT_CONFIG_STORAGE_TOPIC: connect-configs CONNECT_OFFSET_STORAGE_TOPIC: connect-offsets @@ -540,43 +539,43 @@ services: CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SSL_KEY_PASSWORD: confluent # RBAC - CONNECT_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' + CONNECT_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" CONNECT_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="connectAdmin" \ - password="connectAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="connectAdmin" \ + password="connectAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; # Allow overriding configs on the connector level - CONNECT_CONNECTOR_CLIENT_CONFIG_OVERRIDE_POLICY: 'All' + CONNECT_CONNECTOR_CLIENT_CONFIG_OVERRIDE_POLICY: "All" # Producer - CONNECT_PRODUCER_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_PRODUCER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' - CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' + CONNECT_PRODUCER_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_PRODUCER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" + CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="connectAdmin" \ - password="connectAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="connectAdmin" \ + password="connectAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; # Consumer - CONNECT_CONSUMER_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_CONSUMER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' - CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' + CONNECT_CONSUMER_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_CONSUMER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" + CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="connectAdmin" \ - password="connectAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="connectAdmin" \ + password="connectAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; # Default admin config CONNECT_ADMIN_SECURITY_PROTOCOL: SASL_SSL - CONNECT_ADMIN_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_ADMIN_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' + CONNECT_ADMIN_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_ADMIN_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" CONNECT_ADMIN_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.connect.truststore.jks CONNECT_ADMIN_SSL_TRUSTSTORE_PASSWORD: confluent CONNECT_ADMIN_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.connect.keystore.jks @@ -586,18 +585,18 @@ services: # io.confluent.connect.security.ConnectSecurityExtension - RBAC # io.confluent.connect.secretregistry.ConnectSecretRegistryExtension - Secret Registry CONNECT_REST_EXTENSION_CLASSES: io.confluent.connect.security.ConnectSecurityExtension,io.confluent.connect.secretregistry.ConnectSecretRegistryExtension - CONNECT_REST_SERVLET_INITIALIZOR_CLASSES: 'io.confluent.common.security.jetty.initializer.InstallBearerOrBasicSecurityHandler' + CONNECT_REST_SERVLET_INITIALIZOR_CLASSES: "io.confluent.common.security.jetty.initializer.InstallBearerOrBasicSecurityHandler" CONNECT_PUBLIC_KEY_PATH: /tmp/conf/public.pem # Used by Connect's REST layer to connect to MDS to verify tokens and authenticate clients CONNECT_CONFLUENT_METADATA_BOOTSTRAP_SERVER_URLS: https://kafka1:8091,https://kafka2:8092 - CONNECT_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: 'connectAdmin:connectAdmin' - CONNECT_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: 'BASIC' + CONNECT_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: "connectAdmin:connectAdmin" + CONNECT_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: "BASIC" # Secret Registry - CONNECT_CONFIG_PROVIDERS: 'secret' - CONNECT_CONFIG_PROVIDERS_SECRET_CLASS: 'io.confluent.connect.secretregistry.rbac.config.provider.InternalSecretConfigProvider' - CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_MASTER_ENCRYPTION_KEY: 'password1234' + CONNECT_CONFIG_PROVIDERS: "secret" + CONNECT_CONFIG_PROVIDERS_SECRET_CLASS: "io.confluent.connect.secretregistry.rbac.config.provider.InternalSecretConfigProvider" + CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_MASTER_ENCRYPTION_KEY: "password1234" CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_BOOTSTRAP_SERVERS: kafka1:10091,kafka2:10092 CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_TOPIC_REPLICATION_FACTOR: 2 CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SECURITY_PROTOCOL: SASL_SSL @@ -606,22 +605,21 @@ services: CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.connect.keystore.jks CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SSL_KEYSTORE_PASSWORD: confluent CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SSL_KEY_PASSWORD: confluent - CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SASL_MECHANISM: 'OAUTHBEARER' - CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SASL_LOGIN_CALLBACK_HANDLER_CLASS: 'io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler' + CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SASL_MECHANISM: "OAUTHBEARER" + CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SASL_LOGIN_CALLBACK_HANDLER_CLASS: "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler" CONNECT_CONFIG_PROVIDERS_SECRET_PARAM_KAFKASTORE_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="connectAdmin" \ - password="connectAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="connectAdmin" \ + password="connectAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; CONNECT_SSL_CIPHER_SUITES: ${SSL_CIPHER_SUITES} # Reduce Connect memory utilization KAFKA_JVM_PERFORMANCE_OPTS: -server -XX:+UseG1GC -XX:GCTimeRatio=1 - -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 - -XX:MaxGCPauseMillis=10000 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent - -XX:MaxInlineLevel=15 -Djava.awt.headless=true - + -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 + -XX:MaxGCPauseMillis=10000 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent + -XX:MaxInlineLevel=15 -Djava.awt.headless=true elasticsearch: image: docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.0 @@ -654,9 +652,9 @@ services: ports: - 5601:5601 environment: - NEWSFEED_ENABLED: 'false' - TELEMETRY_OPTIN: 'false' - TELEMETRY_ENABLED: 'false' + NEWSFEED_ENABLED: "false" + TELEMETRY_OPTIN: "false" + TELEMETRY_ENABLED: "false" SERVER_MAXPAYLOADBYTES: 4194304 KIBANA_AUTOCOMPLETETIMEOUT: 3000 KIBANA_AUTOCOMPLETETERMINATEAFTER: 2500000 @@ -680,12 +678,12 @@ services: - 9021:9021 - 9022:9022 environment: - CUB_CLASSPATH: '/usr/share/java/confluent-control-center/*:/usr/share/java/rest-utils/*:/usr/share/java/confluent-common/*:/usr/share/java/confluent-security/kafka-rest/*:/usr/share/java/kafka-rest/:/usr/share/java/cp-base-new/*' + CUB_CLASSPATH: "/usr/share/java/confluent-control-center/*:/usr/share/java/rest-utils/*:/usr/share/java/confluent-common/*:/usr/share/java/confluent-security/kafka-rest/*:/usr/share/java/kafka-rest/:/usr/share/java/cp-base-new/*" # If using Confluent Health+ for monitoring and alerting, uncomment the line below to run in management mode. # CONTROL_CENTER_MODE_ENABLE: "management" # You can now remove all Control Center "STREAMS", "MONITORING", and "METRICS" configurations - + # general settings CONTROL_CENTER_BOOTSTRAP_SERVERS: SASL_SSL://kafka1:10091,SASL_SSL://kafka2:10092 CONTROL_CENTER_REPLICATION_FACTOR: 2 @@ -696,12 +694,11 @@ services: CONTROL_CENTER_STREAMS_SASL_MECHANISM: OAUTHBEARER CONTROL_CENTER_STREAMS_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler CONTROL_CENTER_STREAMS_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="controlcenterAdmin" \ - password="controlcenterAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="controlcenterAdmin" \ + password="controlcenterAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; CONTROL_CENTER_STREAMS_SSL_CIPHER_SUITES: ${SSL_CIPHER_SUITES} - CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_REPLICATION: 2 CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 CONTROL_CENTER_METRICS_TOPIC_REPLICATION: 2 @@ -718,7 +715,7 @@ services: CONTROL_CENTER_STREAMS_SSL_KEYSTORE_PASSWORD: confluent CONTROL_CENTER_STREAMS_SSL_KEY_PASSWORD: confluent - # HTTP and HTTPS to Control Center UI + # HTTP and HTTPS to Control Center UI CONTROL_CENTER_REST_LISTENERS: http://0.0.0.0:9021,https://0.0.0.0:9022 CONTROL_CENTER_REST_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.controlCenterAndKsqlDBServer.truststore.jks CONTROL_CENTER_REST_SSL_TRUSTSTORE_PASSWORD: confluent @@ -747,7 +744,7 @@ services: # RBAC CONTROL_CENTER_REST_AUTHENTICATION_METHOD: BEARER PUBLIC_KEY_PATH: /tmp/conf/public.pem - + # Used by Control Center to connect to MDS to verify tokens and authenticate clients CONFLUENT_METADATA_BOOTSTRAP_SERVER_URLS: https://kafka1:8091,https://kafka2:8092 CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: controlcenterAdmin:controlcenterAdmin @@ -775,7 +772,7 @@ services: ports: - 8085:8085 environment: - CUB_CLASSPATH: '/usr/share/java/confluent-security/schema-registry/*:/usr/share/java/schema-registry/*:/usr/share/java/schema-registry-plugins/*:/usr/share/java/cp-base-new/*' + CUB_CLASSPATH: "/usr/share/java/confluent-security/schema-registry/*:/usr/share/java/schema-registry/*:/usr/share/java/schema-registry-plugins/*:/usr/share/java/cp-base-new/*" SCHEMA_REGISTRY_HOST_NAME: schemaregistry @@ -786,10 +783,10 @@ services: SCHEMA_REGISTRY_KAFKASTORE_SASL_MECHANISM: OAUTHBEARER SCHEMA_REGISTRY_KAFKASTORE_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler SCHEMA_REGISTRY_KAFKASTORE_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="schemaregistryUser" \ - password="schemaregistryUser" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="schemaregistryUser" \ + password="schemaregistryUser" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.schemaregistry.truststore.jks SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_PASSWORD: confluent SCHEMA_REGISTRY_KAFKASTORE_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.schemaregistry.keystore.jks @@ -801,7 +798,7 @@ services: SCHEMA_REGISTRY_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.schemaregistry.keystore.jks SCHEMA_REGISTRY_SSL_KEYSTORE_PASSWORD: confluent SCHEMA_REGISTRY_SSL_KEY_PASSWORD: confluent - SCHEMA_REGISTRY_SSL_CLIENT_AUTHENTICATION: 'NONE' + SCHEMA_REGISTRY_SSL_CLIENT_AUTHENTICATION: "NONE" SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "https" SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO @@ -809,7 +806,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC_REPLICATION_FACTOR: 2 SCHEMA_REGISTRY_SSL_CIPHER_SUITES: ${SSL_CIPHER_SUITES} - SCHEMA_REGISTRY_DEBUG: 'true' + SCHEMA_REGISTRY_DEBUG: "true" # Enable security extension for RBAC and exporter extension for schema linking SCHEMA_REGISTRY_SCHEMA_REGISTRY_RESOURCE_EXTENSION_CLASS: io.confluent.kafka.schemaregistry.security.SchemaRegistrySecurityResourceExtension,io.confluent.schema.exporter.SchemaExporterResourceExtension @@ -850,7 +847,7 @@ services: - 8088:8088 - 8089:8089 environment: - CUB_CLASSPATH: '/usr/share/java/confluent-security/ksql/*:/usr/share/java/ksqldb-server/*:/usr/share/java/cp-base-new/*' + CUB_CLASSPATH: "/usr/share/java/confluent-security/ksql/*:/usr/share/java/ksqldb-server/*:/usr/share/java/cp-base-new/*" KSQL_LOG4J_ROOT_LOGLEVEL: INFO @@ -863,16 +860,16 @@ services: KSQL_LOG4J_OPTS: "-Dlog4j.configuration=file:/tmp/helper/log4j-secure.properties" KSQL_KSQL_LOGGING_PROCESSING_TOPIC_REPLICATION_FACTOR: 2 - KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: 'true' - KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: 'true' + KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true" + KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true" - KSQL_PRODUCER_ENABLE_IDEMPOTENCE: 'true' + KSQL_PRODUCER_ENABLE_IDEMPOTENCE: "true" KSQL_BOOTSTRAP_SERVERS: kafka1:10091,kafka2:10092 KSQL_HOST_NAME: ksqldb-server KSQL_LISTENERS: "http://0.0.0.0:8088,https://0.0.0.0:8089" KSQL_CACHE_MAX_BYTES_BUFFERING: 0 - + KSQL_KSQL_SECURITY_EXTENSION_CLASS: io.confluent.ksql.security.KsqlConfluentSecurityExtension # Enable bearer token authentication which allows the identity of the ksqlDB end user to be propagated to Kafka for authorization @@ -891,7 +888,7 @@ services: KSQL_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.controlCenterAndKsqlDBServer.keystore.jks KSQL_SSL_KEYSTORE_PASSWORD: confluent KSQL_SSL_KEY_PASSWORD: confluent - # Before v6.1.0: disabling TLSv1.x is required + # Before v6.1.0: disabling TLSv1.x is required KSQL_SSL_ENABLED_PROTOCOLS: "TLSv1.3,TLSv1.2" KSQL_SSL_CIPHER_SUITES: "TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256" @@ -905,17 +902,17 @@ services: KSQL_SASL_MECHANISM: OAUTHBEARER KSQL_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler KSQL_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="ksqlDBAdmin" \ - password="ksqlDBAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="ksqlDBAdmin" \ + password="ksqlDBAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; KSQL_KSQL_SCHEMA_REGISTRY_BASIC_AUTH_CREDENTIALS_SOURCE: USER_INFO KSQL_KSQL_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO: ksqlDBAdmin:ksqlDBAdmin # Confluent Monitoring Interceptors for Control Center streams monitoring KSQL_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" - KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" + KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.controlCenterAndKsqlDBServer.truststore.jks KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SSL_TRUSTSTORE_PASSWORD: confluent KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.controlCenterAndKsqlDBServer.keystore.jks @@ -925,10 +922,10 @@ services: KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: OAUTHBEARER KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler KSQL_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="ksqlDBAdmin" \ - password="ksqlDBAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="ksqlDBAdmin" \ + password="ksqlDBAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; ksqldb-cli: image: ${REPOSITORY}/cp-ksqldb-cli:${CONFLUENT_DOCKER_TAG} @@ -969,7 +966,7 @@ services: KAFKA_REST_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.restproxy.keystore.jks KAFKA_REST_SSL_KEYSTORE_PASSWORD: confluent KAFKA_REST_SSL_KEY_PASSWORD: confluent - KAFKA_REST_SSL_CLIENT_AUTHENTICATION: 'NONE' + KAFKA_REST_SSL_CLIENT_AUTHENTICATION: "NONE" KAFKA_REST_CLIENT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.restproxy.truststore.jks KAFKA_REST_CLIENT_SSL_TRUSTSTORE_PASSWORD: confluent @@ -978,19 +975,18 @@ services: KAFKA_REST_CLIENT_SSL_KEY_PASSWORD: confluent # Credentials and classpath for cub kafka-ready - CUB_CLASSPATH: '/usr/share/java/confluent-security/kafka-rest/*:/usr/share/java/kafka-rest/*:/usr/share/java/cp-base-new/*' - + CUB_CLASSPATH: "/usr/share/java/confluent-security/kafka-rest/*:/usr/share/java/kafka-rest/*:/usr/share/java/cp-base-new/*" + # Enable OAuth for REST Proxy's embedded Kafka client that accesses and manages consumer groups and topics KAFKA_REST_CLIENT_SECURITY_PROTOCOL: SASL_SSL KAFKA_REST_CLIENT_SASL_MECHANISM: OAUTHBEARER KAFKA_REST_CLIENT_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler KAFKA_REST_CLIENT_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - username="restAdmin" \ - password="restAdmin" \ - metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + username="restAdmin" \ + password="restAdmin" \ + metadataServerUrls="https://kafka1:8091,https://kafka2:8092"; KAFKA_REST_SSL_CIPHER_SUITES: ${SSL_CIPHER_SUITES} - # Enable bearer token authentication which allows the identity of the REST Proxy end user to be propagated to Kafka for authorization KAFKA_REST_KAFKA_REST_RESOURCE_EXTENSION_CLASS: io.confluent.kafkarest.security.KafkaRestSecurityResourceExtension KAFKA_REST_REST_SERVLET_INITIALIZOR_CLASSES: io.confluent.common.security.jetty.initializer.InstallBearerOrBasicSecurityHandler @@ -1001,8 +997,7 @@ services: KAFKA_REST_CLIENT_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 KAFKA_REST_CONFLUENT_METADATA_BOOTSTRAP_SERVER_URLS: https://kafka1:8091,https://kafka2:8092 KAFKA_REST_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: BASIC - KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: 'restAdmin:restAdmin' - + KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: "restAdmin:restAdmin" streams-demo: image: cnfldemos/cp-demo-kstreams:0.0.11 @@ -1018,4 +1013,3 @@ services: - ./env_files/streams-demo.env environment: KAFKA_REPLICATION_FACTOR: 2 - diff --git a/scripts/sbc/add-broker.sh b/scripts/sbc/add-broker.sh index e54778a6c..aa0b6cfb2 100755 --- a/scripts/sbc/add-broker.sh +++ b/scripts/sbc/add-broker.sh @@ -11,7 +11,7 @@ source ${SBCDIR}/../env.sh (cd $SBCDIR/../security && ./certs-create-per-user.sh kafka3) || exit 1 -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml up -d kafka3 +docker compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml up -d kafka3 # verify SBC responds with an add-broker balance plan MAX_WAIT=120 diff --git a/scripts/sbc/docker-compose.yml b/scripts/sbc/compose.yaml similarity index 86% rename from scripts/sbc/docker-compose.yml rename to scripts/sbc/compose.yaml index e6acdcc54..4e976063a 100644 --- a/scripts/sbc/docker-compose.yml +++ b/scripts/sbc/compose.yaml @@ -1,19 +1,18 @@ -# docker-compose supports environment variable substitution with the ${VARIABLE-NAME} syntax. +# docker compose supports environment variable substitution with the ${VARIABLE-NAME} syntax. # Environment variables can be sourced in a variety of ways. One of those ways is through -# a well known '.env' file located in the same folder as the docker-compose.yml file. See the Docker +# a well known '.env' file located in the same folder as the compose.yaml file. See the Docker # documentation for details: https://docs.docker.com/compose/environment-variables/#the-env-file -# +# # This feature is being used to parameterize some values within this file. In this directory is also # a .env file, which is actually a symbolic link to /utils/config.env. That file # contains values which get substituted here when docker-compose parses this file. # -# If you'd like to view the docker-compose.yml file rendered with its environment variable substitutions -# you can execute the `docker-compose config` command. Take note that some demos provide additional -# environment variable values by exporting them in a script prior to running `docker-compose up`. +# If you'd like to view the compose.yaml file rendered with its environment variable substitutions +# you can execute the `docker compose config` command. Take note that some demos provide additional +# environment variable values by exporting them in a script prior to running `docker compose up`. --- version: "2.3" services: - kafka3: # Broker kafka3 is not started by-default in start scripts - it is used during the Self Balancing Cluster (SBC) demo image: ${REPOSITORY}/cp-server:${CONFLUENT_DOCKER_TAG} @@ -26,7 +25,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: "bash -c 'if [ ! -f /etc/kafka/secrets/kafka.kafka3.keystore.jks ]; then echo \"ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)\" && exit 1 ; else /etc/confluent/docker/run ; fi'" + command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka3.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' ports: - 8093:8093 - 9093:9093 @@ -35,7 +34,7 @@ services: - 12093:12093 environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2182 - KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: 'true' + KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: "true" # Broker uses TLSv1.2 by-default for ZooKeeper TLS connections # See note for ZOOKEEPER_SSL_ENABLED_PROTOCOLS regarding TLS 1.3 support @@ -50,7 +49,7 @@ services: KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka3.truststore.jks KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_TYPE: JKS - KAFKA_ZOOKEEPER_SET_ACL: 'true' + KAFKA_ZOOKEEPER_SET_ACL: "true" KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:SASL_PLAINTEXT,TOKEN:SASL_SSL,SSL:SSL,CLEAR:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka3:9093,TOKEN://kafka3:10093,SSL://kafka3:11093,CLEAR://kafka3:12093 @@ -68,19 +67,19 @@ services: KAFKA_LISTENER_NAME_INTERNAL_SASL_ENABLED_MECHANISMS: PLAIN KAFKA_LISTENER_NAME_INTERNAL_PLAIN_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.plain.PlainLoginModule required \ - username="admin" \ - password="admin-secret" \ - user_admin="admin-secret" \ - user_mds="mds-secret"; + org.apache.kafka.common.security.plain.PlainLoginModule required \ + username="admin" \ + password="admin-secret" \ + user_admin="admin-secret" \ + user_mds="mds-secret"; # Configure TOKEN listener for Confluent Platform components and impersonation KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerValidatorCallbackHandler KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerServerLoginCallbackHandler KAFKA_LISTENER_NAME_TOKEN_SASL_ENABLED_MECHANISMS: OAUTHBEARER KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - publicKeyPath="/tmp/conf/public.pem"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + publicKeyPath="/tmp/conf/public.pem"; KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 2 @@ -90,8 +89,8 @@ services: KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_BALANCER_HEAL_BROKER_FAILURE_THRESHOLD_MS: 30000 - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false' + KAFKA_DELETE_TOPIC_ENABLE: "true" + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_DEFAULT_REPLICATION_FACTOR: 2 # Confluent Metrics Reporter @@ -99,14 +98,15 @@ services: CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 2 CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka1:9091,kafka2:9092 CONFLUENT_METRICS_REPORTER_SECURITY_PROTOCOL: SASL_PLAINTEXT - CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required \ + CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: + "org.apache.kafka.common.security.plain.PlainLoginModule required \ username=\"admin\" \ password=\"admin-secret\";" CONFLUENT_METRICS_REPORTER_SASL_MECHANISM: PLAIN CONFLUENT_METRICS_REPORTER_MAX_REQUEST_SIZE: 10485760 # To avoid race condition with control-center # CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'false' - CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'true' + CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: "true" KAFKA_SSL_KEYSTORE_FILENAME: kafka.kafka3.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka3_keystore_creds @@ -122,7 +122,7 @@ services: # Schema Validation KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: https://schemaregistry:8085 KAFKA_CONFLUENT_BASIC_AUTH_CREDENTIALS_SOURCE: USER_INFO - KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: 'superUser:superUser' + KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: "superUser:superUser" KAFKA_CONFLUENT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka3.truststore.jks KAFKA_CONFLUENT_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf @@ -184,7 +184,6 @@ services: KAFKA_KAFKA_REST_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka3.truststore.jks KAFKA_KAFKA_REST_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_KAFKA_REST_CONFLUENT_METADATA_HTTP_AUTH_CREDENTIALS_PROVIDER: BASIC - KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: 'restAdmin:restAdmin' + KAFKA_KAFKA_REST_CONFLUENT_METADATA_BASIC_AUTH_USER_INFO: "restAdmin:restAdmin" KAFKA_KAFKA_REST_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 KAFKA_KAFKA_REST_CLIENT_CONFLUENT_METADATA_SERVER_URLS_MAX_AGE_MS: 60000 - diff --git a/scripts/sbc/validate_sbc_add_broker_completed.sh b/scripts/sbc/validate_sbc_add_broker_completed.sh index db4acfb00..9990fc5e5 100755 --- a/scripts/sbc/validate_sbc_add_broker_completed.sh +++ b/scripts/sbc/validate_sbc_add_broker_completed.sh @@ -4,4 +4,4 @@ SBCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${SBCDIR}/../helper/functions.sh source ${SBCDIR}/../env.sh -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml logs kafka1 kafka2 | grep "COMPLETED.*databalancer" +docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml logs kafka1 kafka2 | grep "COMPLETED.*databalancer" diff --git a/scripts/sbc/validate_sbc_add_broker_plan_computation.sh b/scripts/sbc/validate_sbc_add_broker_plan_computation.sh index 19b8e8080..61476d350 100755 --- a/scripts/sbc/validate_sbc_add_broker_plan_computation.sh +++ b/scripts/sbc/validate_sbc_add_broker_plan_computation.sh @@ -4,4 +4,4 @@ SBCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${SBCDIR}/../helper/functions.sh source ${SBCDIR}/../env.sh -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml logs kafka1 kafka2 | grep "PLAN_COMPUTATION.*databalancer" +docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml logs kafka1 kafka2 | grep "PLAN_COMPUTATION.*databalancer" diff --git a/scripts/sbc/validate_sbc_add_broker_reassignment.sh b/scripts/sbc/validate_sbc_add_broker_reassignment.sh index c777812ff..043748683 100755 --- a/scripts/sbc/validate_sbc_add_broker_reassignment.sh +++ b/scripts/sbc/validate_sbc_add_broker_reassignment.sh @@ -4,4 +4,4 @@ SBCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${SBCDIR}/../helper/functions.sh source ${SBCDIR}/../env.sh -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml logs kafka1 kafka2 | grep "REASSIGNMENT.*databalancer" +docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml logs kafka1 kafka2 | grep "REASSIGNMENT.*databalancer" diff --git a/scripts/sbc/validate_sbc_kill_broker_completed.sh b/scripts/sbc/validate_sbc_kill_broker_completed.sh index bf6c22229..bfc460190 100755 --- a/scripts/sbc/validate_sbc_kill_broker_completed.sh +++ b/scripts/sbc/validate_sbc_kill_broker_completed.sh @@ -4,6 +4,6 @@ SBCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${SBCDIR}/../helper/functions.sh source ${SBCDIR}/../env.sh -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml logs kafka1 kafka2 | grep "BROKER_FAILURE.*execution finishes" || exit 1 -(docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml exec kafka1 kafka-replica-status --bootstrap-server kafka1:9091 --admin.config /etc/kafka/secrets/client_sasl_plain.config --verbose || exit 1) | grep "IsInIsr: false" && exit 1 +docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml logs kafka1 kafka2 | grep "BROKER_FAILURE.*execution finishes" || exit 1 +(docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml exec kafka1 kafka-replica-status --bootstrap-server kafka1:9091 --admin.config /etc/kafka/secrets/client_sasl_plain.config --verbose || exit 1) | grep "IsInIsr: false" && exit 1 exit 0 diff --git a/scripts/sbc/validate_sbc_kill_broker_started.sh b/scripts/sbc/validate_sbc_kill_broker_started.sh index 15c74a55e..b9c0e1091 100755 --- a/scripts/sbc/validate_sbc_kill_broker_started.sh +++ b/scripts/sbc/validate_sbc_kill_broker_started.sh @@ -4,4 +4,4 @@ SBCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${SBCDIR}/../helper/functions.sh source ${SBCDIR}/../env.sh -docker-compose -f $SBCDIR/../../docker-compose.yml -f $SBCDIR/docker-compose.yml logs kafka1 kafka2 | grep "BROKER_FAILURE.*started successfully" +docker-compose -f $SBCDIR/../../compose.yaml -f $SBCDIR/compose.yaml logs kafka1 kafka2 | grep "BROKER_FAILURE.*started successfully" diff --git a/scripts/start.sh b/scripts/start.sh index 50b5b41a1..45f8435b7 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -46,9 +46,9 @@ fi #------------------------------------------------------------------------------- # Bring up openldap -docker-compose up --no-recreate -d openldap +docker compose up --no-recreate -d openldap sleep 5 -if [[ $(docker-compose ps openldap | grep Exit) =~ "Exit" ]] ; then +if [[ $(docker compose ps openldap | grep Exit) =~ "Exit" ]] ; then echo "ERROR: openldap container could not start. Troubleshoot and try again. For troubleshooting instructions see https://docs.confluent.io/platform/current/tutorials/cp-demo/docs/troubleshooting.html" exit 1 fi @@ -56,14 +56,14 @@ fi # Bring up tools -docker-compose up --no-recreate -d tools +docker compose up --no-recreate -d tools # Add root CA to container (obviates need for supplying it at CLI login '--ca-cert-path') -docker-compose exec tools bash -c "cp /etc/kafka/secrets/snakeoil-ca-1.crt /usr/local/share/ca-certificates && /usr/sbin/update-ca-certificates" +docker compose exec tools bash -c "cp /etc/kafka/secrets/snakeoil-ca-1.crt /usr/local/share/ca-certificates && /usr/sbin/update-ca-certificates" # Bring up base kafka cluster -docker-compose up --no-recreate -d zookeeper kafka1 kafka2 +docker compose up --no-recreate -d zookeeper kafka1 kafka2 # Verify MDS has started MAX_WAIT=150 @@ -72,10 +72,10 @@ retry $MAX_WAIT host_check_up kafka1 || exit 1 retry $MAX_WAIT host_check_up kafka2 || exit 1 echo "Creating role bindings for principals" -docker-compose exec tools bash -c "/tmp/helper/create-role-bindings.sh" || exit 1 +docker compose exec tools bash -c "/tmp/helper/create-role-bindings.sh" || exit 1 # Workaround for setting min ISR on topic _confluent-metadata-auth -docker-compose exec kafka1 kafka-configs \ +docker compose exec kafka1 kafka-configs \ --bootstrap-server kafka1:12091 \ --entity-type topics \ --entity-name _confluent-metadata-auth \ @@ -86,11 +86,11 @@ docker-compose exec kafka1 kafka-configs \ # Bring up more containers -docker-compose up --no-recreate -d schemaregistry connect control-center +docker compose up --no-recreate -d schemaregistry connect control-center echo echo -e "Create topics in Kafka cluster:" -docker-compose exec tools bash -c "/tmp/helper/create-topics.sh" || exit 1 +docker compose exec tools bash -c "/tmp/helper/create-topics.sh" || exit 1 # Verify Kafka Connect Worker has started MAX_WAIT=240 @@ -130,7 +130,7 @@ echo #------------------------------------------------------------------------------- # Start more containers -docker-compose up --no-recreate -d ksqldb-server ksqldb-cli restproxy +docker compose up --no-recreate -d ksqldb-server ksqldb-cli restproxy # Verify ksqlDB server has started echo @@ -153,7 +153,7 @@ ${DIR}/consumers/listen_WIKIPEDIA_COUNT_GT_1.sh echo echo echo "Start the Kafka Streams application wikipedia-activity-monitor" -docker-compose up --no-recreate -d streams-demo +docker compose up --no-recreate -d streams-demo echo "..." @@ -161,14 +161,14 @@ echo "..." # Verify Docker containers started -if [[ $(docker-compose ps) =~ "Exit 137" ]]; then - echo -e "\nERROR: At least one Docker container did not start properly, see 'docker-compose ps'. Did you increase the memory available to Docker to at least 8 GB (default is 2 GB)?\n" +if [[ $(docker compose ps) =~ "Exit 137" ]]; then + echo -e "\nERROR: At least one Docker container did not start properly, see 'docker compose ps'. Did you increase the memory available to Docker to at least 8 GB (default is 2 GB)?\n" exit 1 fi echo echo -e "\nAvailable LDAP users:" -#docker-compose exec openldap ldapsearch -x -h localhost -b dc=confluentdemo,dc=io -D "cn=admin,dc=confluentdemo,dc=io" -w admin | grep uid: +#docker compose exec openldap ldapsearch -x -h localhost -b dc=confluentdemo,dc=io -D "cn=admin,dc=confluentdemo,dc=io" -w admin | grep uid: curl -u mds:mds -X POST "https://localhost:8091/security/1.0/principals/User%3Amds/roles/UserAdmin" \ -H "accept: application/json" -H "Content-Type: application/json" \ -d "{\"clusters\":{\"kafka-cluster\":\"does_not_matter\"}}" \ diff --git a/scripts/validate/validate_rest_proxy.sh b/scripts/validate/validate_rest_proxy.sh index 959824de1..852af2502 100755 --- a/scripts/validate/validate_rest_proxy.sh +++ b/scripts/validate/validate_rest_proxy.sh @@ -35,7 +35,7 @@ topic="users" subject="$topic-value" group="my_avro_consumer" -docker-compose exec tools bash -c "confluent iam rbac role-binding create \ +docker compose exec tools bash -c "confluent iam rbac role-binding create \ --principal $CLIENT_PRINCIPAL \ --role ResourceOwner \ --resource Subject:$subject \ @@ -43,54 +43,54 @@ docker-compose exec tools bash -c "confluent iam rbac role-binding create \ --schema-registry-cluster-id $SR" # Register a new Avro schema for topic 'users' -docker-compose exec schemaregistry curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{ "schema": "[ { \"type\":\"record\", \"name\":\"user\", \"fields\": [ {\"name\":\"userid\",\"type\":\"long\"}, {\"name\":\"username\",\"type\":\"string\"} ]} ]" }' -u $CLIENT_NAME:$CLIENT_NAME https://schemaregistry:8085/subjects/$subject/versions +docker compose exec schemaregistry curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{ "schema": "[ { \"type\":\"record\", \"name\":\"user\", \"fields\": [ {\"name\":\"userid\",\"type\":\"long\"}, {\"name\":\"username\",\"type\":\"string\"} ]} ]" }' -u $CLIENT_NAME:$CLIENT_NAME https://schemaregistry:8085/subjects/$subject/versions # Get the Avro schema id schemaid=$(docker exec schemaregistry curl -s -X GET --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://schemaregistry:8085/subjects/$subject/versions/1 | jq '.id') # Go through steps at https://docs.confluent.io/platform/current/tutorials/cp-demo/docs/index.html#crest-long?utm_source=github&utm_medium=demo&utm_campaign=ch.cp-demo_type.community_content.cp-demo#confluent-rest-proxy -docker-compose exec tools bash -c "confluent iam rbac role-binding create \ +docker compose exec tools bash -c "confluent iam rbac role-binding create \ --principal $CLIENT_PRINCIPAL \ --role DeveloperWrite \ --resource Topic:$topic \ --kafka-cluster-id $KAFKA_CLUSTER_ID" -docker-compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.avro.v2+json" -H "Accept: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"value_schema_id": '"$schemaid"', "records": [{"value": {"user":{"userid": 1, "username": "Bunny Smith"}}}]}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/topics/$topic +docker compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.avro.v2+json" -H "Accept: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"value_schema_id": '"$schemaid"', "records": [{"value": {"user":{"userid": 1, "username": "Bunny Smith"}}}]}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/topics/$topic -docker-compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"name": "my_consumer_instance", "format": "avro", "auto.offset.reset": "earliest"}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group +docker compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"name": "my_consumer_instance", "format": "avro", "auto.offset.reset": "earliest"}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group -docker-compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"topics":["users"]}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/subscription +docker compose exec restproxy curl -X POST -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt --data '{"topics":["users"]}' -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/subscription -docker-compose exec tools bash -c "confluent iam rbac role-binding create \ +docker compose exec tools bash -c "confluent iam rbac role-binding create \ --principal $CLIENT_PRINCIPAL \ --role ResourceOwner \ --resource Group:$group \ --kafka-cluster-id $KAFKA_CLUSTER_ID" -docker-compose exec tools bash -c "confluent iam rbac role-binding create \ +docker compose exec tools bash -c "confluent iam rbac role-binding create \ --principal $CLIENT_PRINCIPAL \ --role DeveloperRead \ --resource Topic:$topic \ --kafka-cluster-id $KAFKA_CLUSTER_ID" # Note: Issue this command twice due to https://github.com/confluentinc/kafka-rest/issues/432 -docker-compose exec restproxy curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/records -output=$(docker-compose exec restproxy curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/records) +docker compose exec restproxy curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/records +output=$(docker compose exec restproxy curl -X GET -H "Accept: application/vnd.kafka.avro.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance/records) if [[ $output =~ "Bunny Smith" ]]; then printf "\nPASS: Output matches expected output:\n$output" else printf "\nFAIL: Output does not match expected output:\n$output" fi -docker-compose exec restproxy curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance +docker compose exec restproxy curl -X DELETE -H "Content-Type: application/vnd.kafka.v2+json" --cert /etc/kafka/secrets/restproxy.certificate.pem --key /etc/kafka/secrets/restproxy.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u $CLIENT_NAME:$CLIENT_NAME https://restproxy:8086/consumers/$group/instances/my_consumer_instance ################# echo -e "\n\n\nValidating the embedded REST Proxy...\n" -docker-compose exec tools bash -c "confluent iam rbac role-binding create \ +docker compose exec tools bash -c "confluent iam rbac role-binding create \ --principal User:appSA \ --role ResourceOwner \ --resource Topic:dev_users \ From 7e108b18172c2354a273bc33e5d4d0d2dae62e95 Mon Sep 17 00:00:00 2001 From: Cerchie Date: Tue, 10 Dec 2024 08:51:47 -0700 Subject: [PATCH 2/2] revert unnecessary updates, update other compose --- .github/ISSUE_TEMPLATE/bug_report.md | 21 +++++++++--------- docker-compose.yml => compose.yaml | 4 ++-- scripts/sbc/compose.yaml | 33 ++++++++++++++-------------- 3 files changed, 29 insertions(+), 29 deletions(-) rename docker-compose.yml => compose.yaml (99%) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index bb8c4fe96..4da6f322e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,10 +1,9 @@ --- name: Bug report about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - +title: "" +labels: "" +assignees: "" --- **Description** @@ -16,11 +15,13 @@ Validate every step in the troubleshooting section: https://docs.confluent.io/pl Identify any existing issues that seem related: https://github.com/confluentinc/cp-demo/issues?q=is%3Aissue If applicable, please include the output of: - - `docker-compose logs ` - - any other relevant commands + +- `docker-compose logs ` +- any other relevant commands **Environment** - - GitHub branch: [e.g. `6.0.1-post`, etc] - - Operating System: - - Version of Docker: - - Version of Docker Compose: + +- GitHub branch: [e.g. `6.0.1-post`, etc] +- Operating System: +- Version of Docker: +- Version of docker-compose: diff --git a/docker-compose.yml b/compose.yaml similarity index 99% rename from docker-compose.yml rename to compose.yaml index 9812be32b..8438b85af 100644 --- a/docker-compose.yml +++ b/compose.yaml @@ -92,7 +92,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka1.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' + command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka1.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' ports: - 8091:8091 - 9091:9091 @@ -287,7 +287,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka2.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker-compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' + command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka2.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' ports: - 8092:8092 - 9092:9092 diff --git a/scripts/sbc/compose.yaml b/scripts/sbc/compose.yaml index 4e976063a..63190ceea 100644 --- a/scripts/sbc/compose.yaml +++ b/scripts/sbc/compose.yaml @@ -5,7 +5,7 @@ # # This feature is being used to parameterize some values within this file. In this directory is also # a .env file, which is actually a symbolic link to /utils/config.env. That file -# contains values which get substituted here when docker-compose parses this file. +# contains values which get substituted here when docker compose parses this file. # # If you'd like to view the compose.yaml file rendered with its environment variable substitutions # you can execute the `docker compose config` command. Take note that some demos provide additional @@ -25,7 +25,7 @@ services: - ./scripts/security/keypair:/tmp/conf - ./scripts/helper:/tmp/helper - ./scripts/security:/etc/kafka/secrets - command: 'bash -c ''if [ ! -f /etc/kafka/secrets/kafka.kafka3.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi''' + command: "bash -c 'if [ ! -f /etc/kafka/secrets/kafka.kafka3.keystore.jks ]; then echo "ERROR: Did not find SSL certificates in /etc/kafka/secrets/ (did you remember to run ./scripts/start.sh instead of docker compose up -d?)" && exit 1 ; else /etc/confluent/docker/run ; fi'" ports: - 8093:8093 - 9093:9093 @@ -34,7 +34,7 @@ services: - 12093:12093 environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2182 - KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: "true" + KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE: 'true' # Broker uses TLSv1.2 by-default for ZooKeeper TLS connections # See note for ZOOKEEPER_SSL_ENABLED_PROTOCOLS regarding TLS 1.3 support @@ -49,7 +49,7 @@ services: KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka3.truststore.jks KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_ZOOKEEPER_SSL_TRUSTSTORE_TYPE: JKS - KAFKA_ZOOKEEPER_SET_ACL: "true" + KAFKA_ZOOKEEPER_SET_ACL: 'true' KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:SASL_PLAINTEXT,TOKEN:SASL_SSL,SSL:SSL,CLEAR:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka3:9093,TOKEN://kafka3:10093,SSL://kafka3:11093,CLEAR://kafka3:12093 @@ -67,19 +67,19 @@ services: KAFKA_LISTENER_NAME_INTERNAL_SASL_ENABLED_MECHANISMS: PLAIN KAFKA_LISTENER_NAME_INTERNAL_PLAIN_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.plain.PlainLoginModule required \ - username="admin" \ - password="admin-secret" \ - user_admin="admin-secret" \ - user_mds="mds-secret"; + org.apache.kafka.common.security.plain.PlainLoginModule required \ + username="admin" \ + password="admin-secret" \ + user_admin="admin-secret" \ + user_mds="mds-secret"; # Configure TOKEN listener for Confluent Platform components and impersonation KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerValidatorCallbackHandler KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS: io.confluent.kafka.server.plugins.auth.token.TokenBearerServerLoginCallbackHandler KAFKA_LISTENER_NAME_TOKEN_SASL_ENABLED_MECHANISMS: OAUTHBEARER KAFKA_LISTENER_NAME_TOKEN_OAUTHBEARER_SASL_JAAS_CONFIG: | - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ - publicKeyPath="/tmp/conf/public.pem"; + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + publicKeyPath="/tmp/conf/public.pem"; KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 2 @@ -89,8 +89,8 @@ services: KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 2 KAFKA_CONFLUENT_BALANCER_HEAL_BROKER_FAILURE_THRESHOLD_MS: 30000 - KAFKA_DELETE_TOPIC_ENABLE: "true" - KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false' KAFKA_DEFAULT_REPLICATION_FACTOR: 2 # Confluent Metrics Reporter @@ -98,15 +98,14 @@ services: CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 2 CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka1:9091,kafka2:9092 CONFLUENT_METRICS_REPORTER_SECURITY_PROTOCOL: SASL_PLAINTEXT - CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: - "org.apache.kafka.common.security.plain.PlainLoginModule required \ + CONFLUENT_METRICS_REPORTER_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required \ username=\"admin\" \ password=\"admin-secret\";" CONFLUENT_METRICS_REPORTER_SASL_MECHANISM: PLAIN CONFLUENT_METRICS_REPORTER_MAX_REQUEST_SIZE: 10485760 # To avoid race condition with control-center # CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'false' - CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: "true" + CONFLUENT_METRICS_REPORTER_TOPIC_CREATE: 'true' KAFKA_SSL_KEYSTORE_FILENAME: kafka.kafka3.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka3_keystore_creds @@ -122,7 +121,7 @@ services: # Schema Validation KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: https://schemaregistry:8085 KAFKA_CONFLUENT_BASIC_AUTH_CREDENTIALS_SOURCE: USER_INFO - KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: "superUser:superUser" + KAFKA_CONFLUENT_BASIC_AUTH_USER_INFO: 'superUser:superUser' KAFKA_CONFLUENT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.kafka3.truststore.jks KAFKA_CONFLUENT_SSL_TRUSTSTORE_PASSWORD: confluent KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf