From 6c8a2ede6279b6eaa1eeb6fc2da97a46599cb339 Mon Sep 17 00:00:00 2001 From: Carles Arnal Date: Tue, 14 May 2024 18:08:53 +0200 Subject: [PATCH] Add examples to the registry repository --- app/.gitignore | 2 - app/script.sql | 199 +++++++ examples/README.md | 71 +++ examples/avro-bean/pom.xml | 43 ++ .../examples/avro/bean/AvroBeanExample.java | 211 ++++++++ .../examples/avro/bean/GreetingBean.java | 61 +++ .../avro-maven-with-references-auto/pom.xml | 51 ++ .../src/main/resources/schemas/Exchange.avsc | 6 + .../src/main/resources/schemas/TradeKey.avsc | 15 + .../src/main/resources/schemas/TradeRaw.avsc | 23 + .../main/resources/schemas/TradeValue.avsc | 15 + examples/avro-maven-with-references/pom.xml | 64 +++ .../src/main/resources/schemas/Exchange.avsc | 6 + .../src/main/resources/schemas/TradeKey.avsc | 15 + examples/camel-quarkus-kafka/README.md | 26 + .../kafka-registry-consumer/README.adoc | 228 ++++++++ .../kubefiles/secret-example.yml | 28 + .../kafka-registry-consumer/pom.xml | 240 +++++++++ .../src/main/docker/Dockerfile.jvm | 71 +++ .../src/main/docker/Dockerfile.native | 44 ++ .../src/main/java/org/acme/kafka/Routes.java | 35 ++ .../src/main/resources/application.properties | 39 ++ .../kafka-registry-producer/README.adoc | 10 + .../kafka-registry-producer/pom.xml | 193 +++++++ .../src/main/docker/Dockerfile.jvm | 71 +++ .../src/main/docker/Dockerfile.native | 44 ++ .../src/main/java/org/acme/kafka/Routes.java | 37 ++ .../src/main/java/org/acme/kafka/User.java | 435 ++++++++++++++++ .../src/main/resources/application.properties | 40 ++ .../kafka-registry-producer/user.avsc | 15 + examples/camel-quarkus-kafka/pom.xml | 20 + examples/cloudevents/.gitignore | 36 ++ examples/cloudevents/README.md | 122 +++++ examples/cloudevents/new-order-schema.json | 16 + examples/cloudevents/pom.xml | 154 ++++++ .../cloudevents/processed-order-schema.json | 35 ++ .../apicurio/registry/examples/NewOrder.java | 47 ++ .../registry/examples/OrdersResource.java | 108 ++++ .../registry/examples/ProcessedOrder.java | 93 ++++ .../src/main/resources/application.properties | 8 + examples/confluent-serdes/pom.xml | 60 +++ .../serdes/ConfluentSerdesExample.java | 236 +++++++++ examples/custom-resolver/pom.xml | 33 ++ .../examples/custom/resolver/Config.java | 30 ++ .../custom/resolver/CustomSchemaResolver.java | 105 ++++ .../resolver/CustomSchemaResolverExample.java | 206 ++++++++ examples/custom-strategy/pom.xml | 33 ++ .../examples/custom/strategy/Config.java | 30 ++ .../CustomArtifactResolverStrategy.java | 20 + .../strategy/CustomStrategyExample.java | 227 ++++++++ examples/debezium-openshift/README.md | 159 ++++++ examples/debezium-openshift/example-app.yaml | 132 +++++ .../example-components.yaml | 361 +++++++++++++ examples/debezium-openshift/pom.xml | 316 +++++++++++ .../apicurio/example/debezium/Operation.java | 38 ++ .../debezium/kafka/ExampleKafkaConsumer.java | 133 +++++ .../example/debezium/kafka/KafkaFactory.java | 48 ++ .../example/debezium/model/Address.java | 44 ++ .../example/debezium/model/Customer.java | 35 ++ .../example/debezium/model/Order.java | 41 ++ .../example/debezium/model/Product.java | 35 ++ .../example/debezium/model/ProductOnHand.java | 29 ++ .../apicurio/example/debezium/rest/Api.java | 37 ++ .../example/debezium/rest/ExampleRunner.java | 51 ++ .../example/debezium/sql/Database.java | 65 +++ .../example/debezium/sql/SqlParam.java | 98 ++++ .../example/debezium/sql/SqlParamType.java | 27 + .../src/main/resources/application.properties | 4 + examples/event-driven-architecture/README.md | 100 ++++ .../docker-compose.yaml | 131 +++++ .../studio-connector.json | 26 + examples/jsonschema-validation/pom.xml | 23 + .../validation/json/InvalidMessageBean.java | 63 +++ .../json/JsonSchemaValidationExample.java | 188 +++++++ .../examples/validation/json/MessageBean.java | 61 +++ examples/mix-avro/pom.xml | 43 ++ .../examples/mix/avro/MixAvroExample.java | 240 +++++++++ examples/pom.xml | 278 ++++++++++ examples/protobuf-bean/pom.xml | 59 +++ .../simple/protobuf/ProtobufBeanExample.java | 227 ++++++++ .../protobuf-bean/src/main/proto/person.proto | 32 ++ examples/protobuf-find-latest/pom.xml | 59 +++ .../protobuf/ProtobufFindLatestExample.java | 230 ++++++++ .../src/main/proto/person.proto | 32 ++ .../src/main/resources/person.proto | 32 ++ examples/protobuf-validation/pom.xml | 64 +++ .../protobuf/ProtobufValidationExample.java | 177 +++++++ .../src/main/proto/message_example.proto | 9 + .../src/main/proto/message_example_2.proto | 9 + examples/quarkus-auth/README.md | 13 + examples/quarkus-auth/pom.xml | 236 +++++++++ .../java/io/apicurio/example/Consumer.java | 20 + .../java/io/apicurio/example/InputEvent.java | 21 + .../java/io/apicurio/example/Producer.java | 26 + .../java/io/apicurio/example/Resource.java | 30 ++ .../apicurio/example/schema/avro/Event.java | 491 ++++++++++++++++++ .../src/main/resources/application.yaml | 74 +++ .../src/main/resources/avro/schema/event.avsc | 18 + examples/rest-client-downstream/README.md | 11 + examples/rest-client-downstream/pom.xml | 37 ++ .../apicurio/registry/examples/Constants.java | 24 + .../registry/examples/SimpleRegistryDemo.java | 64 +++ .../examples/util/RegistryDemoUtil.java | 76 +++ examples/rest-client/pom.xml | 36 ++ .../apicurio/registry/examples/Constants.java | 24 + .../examples/HeadersCustomizationExample.java | 45 ++ .../registry/examples/RegistryLoader.java | 53 ++ .../registry/examples/SimpleRegistryDemo.java | 58 +++ .../examples/SimpleRegistryDemoBasicAuth.java | 60 +++ .../examples/SimpleVertxClientExample.java | 45 ++ .../examples/util/RegistryDemoUtil.java | 76 +++ examples/serdes-with-references/pom.xml | 166 ++++++ .../AvroSerdeReferencesExample.java | 179 +++++++ ...sonSerdeReferencesDereferencedExample.java | 306 +++++++++++ .../JsonSerdeReferencesExample.java | 239 +++++++++ .../ProtobufSerdeReferencesExample.java | 186 +++++++ .../examples/references/model/Citizen.java | 113 ++++ .../references/model/CitizenIdentifier.java | 64 +++ .../examples/references/model/City.java | 78 +++ .../references/model/CityQualification.java | 44 ++ .../model/IdentifierQualification.java | 44 ++ .../references/model/Qualification.java | 60 +++ .../src/main/resources/Exchange.avsc | 6 + .../src/main/resources/TradeKey.avsc | 15 + .../src/main/resources/TradeRaw.avsc | 19 + .../src/main/resources/citizen.json | 24 + .../src/main/resources/city.json | 17 + .../main/resources/schema/sample/mode.proto | 18 + .../resources/schema/sample/table_info.proto | 27 + .../schema/sample/table_notification.proto | 22 + .../sample/table_notification_type.proto | 14 + .../main/resources/serde/json/citizen.json | 36 ++ .../resources/serde/json/qualification.json | 17 + .../resources/serde/json/sample.address.json | 100 ++++ .../resources/serde/json/types/city/city.json | 20 + .../serde/json/types/city/qualification.json | 17 + .../types/identifier/citizenIdentifier.json | 16 + .../json/types/identifier/qualification.json | 17 + examples/simple-avro-downstream/README.md | 17 + examples/simple-avro-downstream/pom.xml | 40 ++ .../src/main/java/avro/SimpleAvroExample.java | 210 ++++++++ examples/simple-avro-maven/pom.xml | 72 +++ .../avro/maven/SimpleAvroMavenExample.java | 251 +++++++++ .../src/main/resources/schemas/greeting.avsc | 11 + examples/simple-avro/pom.xml | 33 ++ .../simple/avro/SimpleAvroExample.java | 210 ++++++++ examples/simple-json/pom.xml | 38 ++ .../examples/simple/json/MessageBean.java | 61 +++ .../simple/json/SimpleJsonSchemaExample.java | 243 +++++++++ examples/simple-protobuf/pom.xml | 59 +++ .../protobuf/SimpleProtobufExample.java | 229 ++++++++ .../src/main/proto/person.proto | 32 ++ examples/simple-validation/pom.xml | 35 ++ .../examples/simple/json/MessageBean.java | 69 +++ .../simple/json/MessagePublisher.java | 76 +++ .../simple/json/MessageValidator.java | 68 +++ .../examples/simple/json/SimpleBroker.java | 111 ++++ .../simple/json/SimpleValidationExample.java | 90 ++++ .../src/main/schemas/message-invalid.json | 20 + .../src/main/schemas/message.json | 19 + .../src/main/scripts/.gitignore | 3 + .../src/main/scripts/consumer.sh | 34 ++ examples/tools/kafka-all/Dockerfile | 24 + .../tools/kafka-compose/kafka-compose.yaml | 32 ++ examples/tools/kafka-compose/run-kafka.sh | 1 + examples/tools/kafkasql-topic-import/pom.xml | 94 ++++ .../tools/kafkasqltopicimport/Envelope.java | 53 ++ .../kafkasqltopicimport/ImportCommand.java | 131 +++++ .../tools/kafkasqltopicimport/Main.java | 32 ++ .../tools/kafkasqltopicimport/Tuple.java | 36 ++ examples/tools/run-registry.sh | 1 + pom.xml | 11 + 172 files changed, 13539 insertions(+), 2 deletions(-) delete mode 100644 app/.gitignore create mode 100644 app/script.sql create mode 100644 examples/README.md create mode 100644 examples/avro-bean/pom.xml create mode 100644 examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java create mode 100644 examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java create mode 100644 examples/avro-maven-with-references-auto/pom.xml create mode 100644 examples/avro-maven-with-references-auto/src/main/resources/schemas/Exchange.avsc create mode 100644 examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeKey.avsc create mode 100644 examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeRaw.avsc create mode 100644 examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeValue.avsc create mode 100644 examples/avro-maven-with-references/pom.xml create mode 100644 examples/avro-maven-with-references/src/main/resources/schemas/Exchange.avsc create mode 100644 examples/avro-maven-with-references/src/main/resources/schemas/TradeKey.avsc create mode 100644 examples/camel-quarkus-kafka/README.md create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/README.adoc create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/kubefiles/secret-example.yml create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/pom.xml create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.jvm create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.native create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/java/org/acme/kafka/Routes.java create mode 100644 examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/resources/application.properties create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/README.adoc create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/pom.xml create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.jvm create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.native create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/Routes.java create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/User.java create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/src/main/resources/application.properties create mode 100644 examples/camel-quarkus-kafka/kafka-registry-producer/user.avsc create mode 100644 examples/camel-quarkus-kafka/pom.xml create mode 100644 examples/cloudevents/.gitignore create mode 100644 examples/cloudevents/README.md create mode 100644 examples/cloudevents/new-order-schema.json create mode 100644 examples/cloudevents/pom.xml create mode 100644 examples/cloudevents/processed-order-schema.json create mode 100644 examples/cloudevents/src/main/java/io/apicurio/registry/examples/NewOrder.java create mode 100644 examples/cloudevents/src/main/java/io/apicurio/registry/examples/OrdersResource.java create mode 100644 examples/cloudevents/src/main/java/io/apicurio/registry/examples/ProcessedOrder.java create mode 100644 examples/cloudevents/src/main/resources/application.properties create mode 100644 examples/confluent-serdes/pom.xml create mode 100644 examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java create mode 100644 examples/custom-resolver/pom.xml create mode 100644 examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/Config.java create mode 100644 examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java create mode 100644 examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java create mode 100644 examples/custom-strategy/pom.xml create mode 100644 examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/Config.java create mode 100644 examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java create mode 100644 examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java create mode 100644 examples/debezium-openshift/README.md create mode 100644 examples/debezium-openshift/example-app.yaml create mode 100644 examples/debezium-openshift/example-components.yaml create mode 100644 examples/debezium-openshift/pom.xml create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java create mode 100644 examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParamType.java create mode 100644 examples/debezium-openshift/src/main/resources/application.properties create mode 100644 examples/event-driven-architecture/README.md create mode 100644 examples/event-driven-architecture/docker-compose.yaml create mode 100644 examples/event-driven-architecture/studio-connector.json create mode 100644 examples/jsonschema-validation/pom.xml create mode 100644 examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java create mode 100644 examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java create mode 100644 examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java create mode 100644 examples/mix-avro/pom.xml create mode 100644 examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java create mode 100644 examples/pom.xml create mode 100644 examples/protobuf-bean/pom.xml create mode 100644 examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java create mode 100644 examples/protobuf-bean/src/main/proto/person.proto create mode 100644 examples/protobuf-find-latest/pom.xml create mode 100644 examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java create mode 100644 examples/protobuf-find-latest/src/main/proto/person.proto create mode 100644 examples/protobuf-find-latest/src/main/resources/person.proto create mode 100644 examples/protobuf-validation/pom.xml create mode 100644 examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java create mode 100644 examples/protobuf-validation/src/main/proto/message_example.proto create mode 100644 examples/protobuf-validation/src/main/proto/message_example_2.proto create mode 100644 examples/quarkus-auth/README.md create mode 100644 examples/quarkus-auth/pom.xml create mode 100644 examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java create mode 100644 examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java create mode 100644 examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java create mode 100644 examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java create mode 100644 examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java create mode 100644 examples/quarkus-auth/src/main/resources/application.yaml create mode 100644 examples/quarkus-auth/src/main/resources/avro/schema/event.avsc create mode 100644 examples/rest-client-downstream/README.md create mode 100644 examples/rest-client-downstream/pom.xml create mode 100644 examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java create mode 100644 examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java create mode 100644 examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java create mode 100644 examples/rest-client/pom.xml create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/HeadersCustomizationExample.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleVertxClientExample.java create mode 100644 examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java create mode 100644 examples/serdes-with-references/pom.xml create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesDereferencedExample.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CitizenIdentifier.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/City.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CityQualification.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/IdentifierQualification.java create mode 100644 examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Qualification.java create mode 100644 examples/serdes-with-references/src/main/resources/Exchange.avsc create mode 100644 examples/serdes-with-references/src/main/resources/TradeKey.avsc create mode 100644 examples/serdes-with-references/src/main/resources/TradeRaw.avsc create mode 100644 examples/serdes-with-references/src/main/resources/citizen.json create mode 100644 examples/serdes-with-references/src/main/resources/city.json create mode 100644 examples/serdes-with-references/src/main/resources/schema/sample/mode.proto create mode 100644 examples/serdes-with-references/src/main/resources/schema/sample/table_info.proto create mode 100644 examples/serdes-with-references/src/main/resources/schema/sample/table_notification.proto create mode 100644 examples/serdes-with-references/src/main/resources/schema/sample/table_notification_type.proto create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/citizen.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/qualification.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/sample.address.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/types/city/city.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/types/city/qualification.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/types/identifier/citizenIdentifier.json create mode 100644 examples/serdes-with-references/src/main/resources/serde/json/types/identifier/qualification.json create mode 100644 examples/simple-avro-downstream/README.md create mode 100644 examples/simple-avro-downstream/pom.xml create mode 100644 examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java create mode 100644 examples/simple-avro-maven/pom.xml create mode 100644 examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java create mode 100644 examples/simple-avro-maven/src/main/resources/schemas/greeting.avsc create mode 100644 examples/simple-avro/pom.xml create mode 100644 examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java create mode 100644 examples/simple-json/pom.xml create mode 100644 examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java create mode 100644 examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java create mode 100644 examples/simple-protobuf/pom.xml create mode 100644 examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java create mode 100644 examples/simple-protobuf/src/main/proto/person.proto create mode 100644 examples/simple-validation/pom.xml create mode 100644 examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java create mode 100644 examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java create mode 100644 examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java create mode 100644 examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java create mode 100644 examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java create mode 100644 examples/simple-validation/src/main/schemas/message-invalid.json create mode 100644 examples/simple-validation/src/main/schemas/message.json create mode 100644 examples/simple-validation/src/main/scripts/.gitignore create mode 100644 examples/simple-validation/src/main/scripts/consumer.sh create mode 100644 examples/tools/kafka-all/Dockerfile create mode 100644 examples/tools/kafka-compose/kafka-compose.yaml create mode 100755 examples/tools/kafka-compose/run-kafka.sh create mode 100644 examples/tools/kafkasql-topic-import/pom.xml create mode 100644 examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java create mode 100644 examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java create mode 100644 examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Main.java create mode 100644 examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Tuple.java create mode 100755 examples/tools/run-registry.sh diff --git a/app/.gitignore b/app/.gitignore deleted file mode 100644 index 00d2ab71dd..0000000000 --- a/app/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/.apt_generated/ -/.apt_generated_tests/ diff --git a/app/script.sql b/app/script.sql new file mode 100644 index 0000000000..66eb632b3a --- /dev/null +++ b/app/script.sql @@ -0,0 +1,199 @@ +-- H2 2.2.224; +; +CREATE USER IF NOT EXISTS "SA" SALT '2ff99bc3a0ae4f76' HASH '3398cf73b7242d893c56e3c7617e8e205bb00834cfe6ca71e9934d8890c77ca7' ADMIN; +CREATE MEMORY TABLE "PUBLIC"."APICURIO"( + "PROPNAME" CHARACTER VARYING(255) NOT NULL, + "PROPVALUE" CHARACTER VARYING(255) +); +ALTER TABLE "PUBLIC"."APICURIO" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_6" PRIMARY KEY("PROPNAME"); +-- 1 +/- SELECT COUNT(*) FROM PUBLIC.APICURIO; +INSERT INTO "PUBLIC"."APICURIO" VALUES +('db_version', '100'); +CREATE MEMORY TABLE "PUBLIC"."SEQUENCES"( + "SEQNAME" CHARACTER VARYING(32) NOT NULL, + "SEQVALUE" BIGINT NOT NULL +); +ALTER TABLE "PUBLIC"."SEQUENCES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_63" PRIMARY KEY("SEQNAME"); +-- 3 +/- SELECT COUNT(*) FROM PUBLIC.SEQUENCES; +INSERT INTO "PUBLIC"."SEQUENCES" VALUES +('contentId', 265), +('globalId', 395), +('commentId', 3); +CREATE MEMORY TABLE "PUBLIC"."CONFIG"( + "PROPNAME" CHARACTER VARYING(255) NOT NULL, + "PROPVALUE" CHARACTER VARYING(1024) NOT NULL, + "MODIFIEDON" BIGINT NOT NULL +); +ALTER TABLE "PUBLIC"."CONFIG" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_7" PRIMARY KEY("PROPNAME"); +-- 1 +/- SELECT COUNT(*) FROM PUBLIC.CONFIG; +INSERT INTO "PUBLIC"."CONFIG" VALUES +('apicurio.storage.read-only.enabled', 'false', 1715687568633); +CREATE INDEX "PUBLIC"."IDX_CONFIG_1" ON "PUBLIC"."CONFIG"("MODIFIEDON" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."ACLS"( + "PRINCIPALID" CHARACTER VARYING(256) NOT NULL, + "ROLE" CHARACTER VARYING(32) NOT NULL, + "PRINCIPALNAME" CHARACTER VARYING(256) +); +ALTER TABLE "PUBLIC"."ACLS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_1" PRIMARY KEY("PRINCIPALID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.ACLS; +CREATE MEMORY TABLE "PUBLIC"."DOWNLOADS"( + "DOWNLOADID" CHARACTER VARYING(128) NOT NULL, + "EXPIRES" BIGINT NOT NULL, + "CONTEXT" CHARACTER VARYING(1024) +); +ALTER TABLE "PUBLIC"."DOWNLOADS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_F" PRIMARY KEY("DOWNLOADID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.DOWNLOADS; +CREATE HASH INDEX "PUBLIC"."IDX_DOWN_1" ON "PUBLIC"."DOWNLOADS"("EXPIRES" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."GLOBAL_RULES"( + "TYPE" CHARACTER VARYING(32) NOT NULL, + "CONFIGURATION" CHARACTER VARYING NOT NULL +); +ALTER TABLE "PUBLIC"."GLOBAL_RULES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_A" PRIMARY KEY("TYPE"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.GLOBAL_RULES; +CREATE MEMORY TABLE "PUBLIC"."CONTENT"( + "CONTENTID" BIGINT NOT NULL, + "CANONICALHASH" CHARACTER VARYING(64) NOT NULL, + "CONTENTHASH" CHARACTER VARYING(64) NOT NULL, + "CONTENT" BINARY VARYING NOT NULL, + "REFS" CHARACTER VARYING +); +ALTER TABLE "PUBLIC"."CONTENT" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_638" PRIMARY KEY("CONTENTID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.CONTENT; +CREATE HASH INDEX "PUBLIC"."IDX_CONTENT_1" ON "PUBLIC"."CONTENT"("CANONICALHASH" NULLS FIRST); +CREATE HASH INDEX "PUBLIC"."IDX_CONTENT_2" ON "PUBLIC"."CONTENT"("CONTENTHASH" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."CONTENT_REFERENCES"( + "CONTENTID" BIGINT NOT NULL, + "GROUPID" CHARACTER VARYING(512), + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "VERSION" CHARACTER VARYING(256), + "NAME" CHARACTER VARYING(512) NOT NULL +); +ALTER TABLE "PUBLIC"."CONTENT_REFERENCES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_5" PRIMARY KEY("CONTENTID", "NAME"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.CONTENT_REFERENCES; +CREATE MEMORY TABLE "PUBLIC"."GROUPS"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "DESCRIPTION" CHARACTER VARYING(1024), + "ARTIFACTSTYPE" CHARACTER VARYING(32), + "OWNER" CHARACTER VARYING(256), + "CREATEDON" TIMESTAMP NOT NULL, + "MODIFIEDBY" CHARACTER VARYING(256), + "MODIFIEDON" TIMESTAMP, + "LABELS" CHARACTER VARYING +); +ALTER TABLE "PUBLIC"."GROUPS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_7D" PRIMARY KEY("GROUPID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC."GROUPS"; +CREATE MEMORY TABLE "PUBLIC"."GROUP_LABELS"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "LABELKEY" CHARACTER VARYING(256) NOT NULL, + "LABELVALUE" CHARACTER VARYING(512) +); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.GROUP_LABELS; +CREATE INDEX "PUBLIC"."IDX_GLABELS_1" ON "PUBLIC"."GROUP_LABELS"("LABELKEY" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_GLABELS_2" ON "PUBLIC"."GROUP_LABELS"("LABELVALUE" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."GROUP_RULES"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "TYPE" CHARACTER VARYING(32) NOT NULL, + "CONFIGURATION" CHARACTER VARYING(1024) NOT NULL +); +ALTER TABLE "PUBLIC"."GROUP_RULES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_F8" PRIMARY KEY("GROUPID", "TYPE"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.GROUP_RULES; +CREATE MEMORY TABLE "PUBLIC"."ARTIFACTS"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "TYPE" CHARACTER VARYING(32) NOT NULL, + "OWNER" CHARACTER VARYING(256), + "CREATEDON" TIMESTAMP NOT NULL, + "MODIFIEDBY" CHARACTER VARYING(256), + "MODIFIEDON" TIMESTAMP, + "NAME" CHARACTER VARYING(512), + "DESCRIPTION" CHARACTER VARYING(1024), + "LABELS" CHARACTER VARYING +); +ALTER TABLE "PUBLIC"."ARTIFACTS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_C" PRIMARY KEY("GROUPID", "ARTIFACTID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.ARTIFACTS; +CREATE HASH INDEX "PUBLIC"."IDX_ARTIFACTS_0" ON "PUBLIC"."ARTIFACTS"("TYPE" NULLS FIRST); +CREATE HASH INDEX "PUBLIC"."IDX_ARTIFACTS_1" ON "PUBLIC"."ARTIFACTS"("OWNER" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_ARTIFACTS_2" ON "PUBLIC"."ARTIFACTS"("CREATEDON" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_ARTIFACTS_3" ON "PUBLIC"."ARTIFACTS"("NAME" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_ARTIFACTS_4" ON "PUBLIC"."ARTIFACTS"("DESCRIPTION" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."ARTIFACT_LABELS"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "LABELKEY" CHARACTER VARYING(256) NOT NULL, + "LABELVALUE" CHARACTER VARYING(512) +); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.ARTIFACT_LABELS; +CREATE INDEX "PUBLIC"."IDX_ALABELS_1" ON "PUBLIC"."ARTIFACT_LABELS"("LABELKEY" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_ALABELS_2" ON "PUBLIC"."ARTIFACT_LABELS"("LABELVALUE" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."ARTIFACT_RULES"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "TYPE" CHARACTER VARYING(32) NOT NULL, + "CONFIGURATION" CHARACTER VARYING(1024) NOT NULL +); +ALTER TABLE "PUBLIC"."ARTIFACT_RULES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_5E" PRIMARY KEY("GROUPID", "ARTIFACTID", "TYPE"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.ARTIFACT_RULES; +CREATE MEMORY TABLE "PUBLIC"."VERSIONS"( + "GLOBALID" BIGINT NOT NULL, + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "VERSION" CHARACTER VARYING(256), + "VERSIONORDER" INTEGER NOT NULL, + "STATE" CHARACTER VARYING(64) NOT NULL, + "NAME" CHARACTER VARYING(512), + "DESCRIPTION" CHARACTER VARYING(1024), + "OWNER" CHARACTER VARYING(256), + "CREATEDON" TIMESTAMP NOT NULL, + "LABELS" CHARACTER VARYING, + "CONTENTID" BIGINT NOT NULL +); +ALTER TABLE "PUBLIC"."VERSIONS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_B" PRIMARY KEY("GLOBALID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.VERSIONS; +CREATE INDEX "PUBLIC"."IDX_VERSIONS_1" ON "PUBLIC"."VERSIONS"("VERSION" NULLS FIRST); +CREATE HASH INDEX "PUBLIC"."IDX_VERSIONS_2" ON "PUBLIC"."VERSIONS"("STATE" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_VERSIONS_3" ON "PUBLIC"."VERSIONS"("NAME" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_VERSIONS_4" ON "PUBLIC"."VERSIONS"("DESCRIPTION" NULLS FIRST); +CREATE HASH INDEX "PUBLIC"."IDX_VERSIONS_5" ON "PUBLIC"."VERSIONS"("OWNER" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_VERSIONS_6" ON "PUBLIC"."VERSIONS"("CREATEDON" NULLS FIRST); +CREATE HASH INDEX "PUBLIC"."IDX_VERSIONS_7" ON "PUBLIC"."VERSIONS"("CONTENTID" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."VERSION_LABELS"( + "GLOBALID" BIGINT NOT NULL, + "LABELKEY" CHARACTER VARYING(256) NOT NULL, + "LABELVALUE" CHARACTER VARYING(512) +); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.VERSION_LABELS; +CREATE INDEX "PUBLIC"."IDX_VLABELS_1" ON "PUBLIC"."VERSION_LABELS"("LABELKEY" NULLS FIRST); +CREATE INDEX "PUBLIC"."IDX_VLABELS_2" ON "PUBLIC"."VERSION_LABELS"("LABELVALUE" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."VERSION_COMMENTS"( + "COMMENTID" CHARACTER VARYING(128) NOT NULL, + "GLOBALID" BIGINT NOT NULL, + "OWNER" CHARACTER VARYING(256), + "CREATEDON" TIMESTAMP NOT NULL, + "CVALUE" CHARACTER VARYING(1024) NOT NULL +); +ALTER TABLE "PUBLIC"."VERSION_COMMENTS" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_4" PRIMARY KEY("COMMENTID"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.VERSION_COMMENTS; +CREATE INDEX "PUBLIC"."IDX_VERSION_COMMENTS_1" ON "PUBLIC"."VERSION_COMMENTS"("OWNER" NULLS FIRST); +CREATE MEMORY TABLE "PUBLIC"."ARTIFACT_BRANCHES"( + "GROUPID" CHARACTER VARYING(512) NOT NULL, + "ARTIFACTID" CHARACTER VARYING(512) NOT NULL, + "BRANCHID" CHARACTER VARYING(256) NOT NULL, + "BRANCHORDER" INTEGER NOT NULL, + "VERSION" CHARACTER VARYING(256) NOT NULL +); +ALTER TABLE "PUBLIC"."ARTIFACT_BRANCHES" ADD CONSTRAINT "PUBLIC"."CONSTRAINT_1E" PRIMARY KEY("GROUPID", "ARTIFACTID", "BRANCHID", "BRANCHORDER"); +-- 0 +/- SELECT COUNT(*) FROM PUBLIC.ARTIFACT_BRANCHES; +CREATE INDEX "PUBLIC"."IDX_ARTIFACT_BRANCHES_1" ON "PUBLIC"."ARTIFACT_BRANCHES"("GROUPID" NULLS FIRST, "ARTIFACTID" NULLS FIRST, "BRANCHID" NULLS FIRST, "BRANCHORDER" NULLS FIRST); +ALTER TABLE "PUBLIC"."CONTENT" ADD CONSTRAINT "PUBLIC"."UQ_CONTENT_1" UNIQUE("CONTENTHASH"); +ALTER TABLE "PUBLIC"."VERSIONS" ADD CONSTRAINT "PUBLIC"."UQ_VERSIONS_1" UNIQUE("GROUPID", "ARTIFACTID", "VERSION"); +ALTER TABLE "PUBLIC"."VERSIONS" ADD CONSTRAINT "PUBLIC"."UQ_VERSIONS_2" UNIQUE("GLOBALID", "VERSIONORDER"); +ALTER TABLE "PUBLIC"."CONTENT_REFERENCES" ADD CONSTRAINT "PUBLIC"."FK_CONTENT_REFERENCES_1" FOREIGN KEY("CONTENTID") REFERENCES "PUBLIC"."CONTENT"("CONTENTID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."ARTIFACT_BRANCHES" ADD CONSTRAINT "PUBLIC"."FK_ARTIFACT_BRANCHES_1" FOREIGN KEY("GROUPID", "ARTIFACTID") REFERENCES "PUBLIC"."ARTIFACTS"("GROUPID", "ARTIFACTID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."VERSION_COMMENTS" ADD CONSTRAINT "PUBLIC"."FK_VERSION_COMMENTS_1" FOREIGN KEY("GLOBALID") REFERENCES "PUBLIC"."VERSIONS"("GLOBALID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."ARTIFACT_BRANCHES" ADD CONSTRAINT "PUBLIC"."FK_ARTIFACT_BRANCHES_2" FOREIGN KEY("GROUPID", "ARTIFACTID", "VERSION") REFERENCES "PUBLIC"."VERSIONS"("GROUPID", "ARTIFACTID", "VERSION") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."GROUP_LABELS" ADD CONSTRAINT "PUBLIC"."FK_GLABELS_1" FOREIGN KEY("GROUPID") REFERENCES "PUBLIC"."GROUPS"("GROUPID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."GROUP_RULES" ADD CONSTRAINT "PUBLIC"."FK_GRULES_1" FOREIGN KEY("GROUPID") REFERENCES "PUBLIC"."GROUPS"("GROUPID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."ARTIFACT_LABELS" ADD CONSTRAINT "PUBLIC"."FK_ALABELS_1" FOREIGN KEY("GROUPID", "ARTIFACTID") REFERENCES "PUBLIC"."ARTIFACTS"("GROUPID", "ARTIFACTID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."VERSIONS" ADD CONSTRAINT "PUBLIC"."FK_VERSIONS_1" FOREIGN KEY("GROUPID", "ARTIFACTID") REFERENCES "PUBLIC"."ARTIFACTS"("GROUPID", "ARTIFACTID") ON DELETE CASCADE NOCHECK; +ALTER TABLE "PUBLIC"."VERSIONS" ADD CONSTRAINT "PUBLIC"."FK_VERSIONS_2" FOREIGN KEY("CONTENTID") REFERENCES "PUBLIC"."CONTENT"("CONTENTID") NOCHECK; +ALTER TABLE "PUBLIC"."VERSION_LABELS" ADD CONSTRAINT "PUBLIC"."FK_VLABELS_1" FOREIGN KEY("GLOBALID") REFERENCES "PUBLIC"."VERSIONS"("GLOBALID") ON DELETE CASCADE NOCHECK; diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000000..8c1ddc30e7 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,71 @@ +The examples on this repository have been moved to the [main Apicurio Registry Repository](https://github.com/Apicurio/apicurio-registry/examples). + + + +# Apicurio Registry Example Applications +This repository contains a set of example applications (mostly Kafka applications) that use the +Apicurio Registry as part of their workflow. The registry is typically used to store schemas +used by Kafka serializer and deserializer classes. These serdes classes will fetch the schema +from the registry for use during producing or consuming operations (to serializer, deserializer, +or validate the Kafka message payload). + +Each example in this repository attempts to demonstrate some specific use-case or configuration. +There are numerous options available when integrating with the registry, and therefore the set +of examples found here may not cover every configuration permutation. + +# List of Examples +A list of examples is included below, with descriptions and explanations of each covered use-case. + +## Simple Avro Example +This example application demonstrates the basics of using the registry in a very simple Kafka +publish/subscribe application using Apache Avro as the schema technology used to serialize +and deserialize message payloads. + +## Simple JSON Schema Example +This example application demonstrates the basics of using the registry in a very simple Kafka +publish/subscribe application using JSON Schema to validate message payloads when both producing +and consuming them. JSON Schema is not a serialization technology, but rather is only used for +validation. Therefore it can be enabled or disabled in the serializer and deserializer. + +## Confluent Serdes Integration +This example shows how Apicurio Registry serdes classes can be used along with Confluent serdes +classes in a mixed application environment. In other words, some applications can be using +Confluent classes while other applications can be using Apicurio Registry classes - and they +can all work together seamlessly with just a little bit of extra configuration. This example +is essentially the same as the Simple Avro Example, but using a Confluent serializer with an +Apicurio Registry deserializer. + +## Avro Bean Example +This example demonstrates how to use Avro as the schema and serialization technology while +using a Java Bean as the Kafka message payload. This is essentially the same as the Simple +Avro Example, but using a java bean instead of a `GenericRecord` as the message payload. + +## Custom ID Strategy Example +This example demonstrates how to use a custom Global ID strategy. The Global ID strategy is +used by a producer (serializer) application to lookup (or create) the Schema it is using for +serialization. Apicurio Registry comes with some useful implementations of the Global ID +strategy out of the box, but it is possible to create your own. This example is essentially +the same as the Simple Avro Example, except instead of using one of the default Apicurio +Registry Global ID strategies, it uses a custom one. + +## Simple Avro Maven Example +This example application demonstrates how to use the Apicurio Registry maven plugin to +pre-register an Avro schema so that it does not need to be embedded within the producer +application. Note that this example will fail unless the maven plugin is executed before +the Java application. See the javadoc in the example for details. + +## Rest Client example +This example application demonstrates how to use the Apicurio Registry rest client to create, +delete, or fetch schemas. This example contains two basic java application, one showing how to +improve the logs by logging all the request and response headers and a basic example on how to +use the client. + +## Mix Avro Schemas Example +This example application showcases an scenario where Apache Avro messages are published to the same +Kafka topic using different Avro schemas. This example uses the Apicurio Registry serdes classes to serialize +and deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. + +## Cloud Events PoC +This is an example application that implements a REST API that consumes and produces CloudEvents. +This example application showcases an experimental library from apicurio-registry project. This library is used to validate incoming and outgoing CloudEvents messages in the REST API. +The validation is performed against json schemas that are stored in Apicurio Registry. For a more detailed explanation go [here](../apicurio-registry/examples/cloudevents/README.md). diff --git a/examples/avro-bean/pom.xml b/examples/avro-bean/pom.xml new file mode 100644 index 0000000000..fa559f8429 --- /dev/null +++ b/examples/avro-bean/pom.xml @@ -0,0 +1,43 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-avro-bean + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + + diff --git a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java new file mode 100644 index 0000000000..3c2ed1dcd8 --- /dev/null +++ b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java @@ -0,0 +1,211 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.avro.bean; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.serde.avro.ReflectAvroDatumProvider; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a {@link GreetingBean}
  8. + *
+ * + * Pre-requisites: + * + * + * + * @author eric.wittmann@gmail.com + * @author carles.arnal@redhat.com + */ +public class AvroBeanExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = AvroBeanExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + AvroBeanExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + GreetingBean greeting = new GreetingBean(); + greeting.setMessage("Hello (" + producedMessages++ + ")!"); + greeting.setTime(System.currentTimeMillis()); + + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, greeting); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GreetingBean greeting = record.value(); + System.out.println("Consumed a message: " + greeting.getMessage() + " @ " + new Date(greeting.getTime())); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + // Use Java reflection as the Avro Datum Provider - this also generates an Avro schema from the java bean + props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Use Java reflection as the Avro Datum Provider + props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + public static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java new file mode 100644 index 0000000000..b8c64dd589 --- /dev/null +++ b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java @@ -0,0 +1,61 @@ +/* + * Copyright 2020 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.avro.bean; + +/** + * @author eric.wittmann@gmail.com + */ +public class GreetingBean { + + private String message; + private long time; + + /** + * Constructor. + */ + public GreetingBean() { + } + + /** + * @return the message + */ + public String getMessage() { + return message; + } + + /** + * @param message the message to set + */ + public void setMessage(String message) { + this.message = message; + } + + /** + * @return the time + */ + public long getTime() { + return time; + } + + /** + * @param time the time to set + */ + public void setTime(long time) { + this.time = time; + } + +} diff --git a/examples/avro-maven-with-references-auto/pom.xml b/examples/avro-maven-with-references-auto/pom.xml new file mode 100644 index 0000000000..00cf6b92fa --- /dev/null +++ b/examples/avro-maven-with-references-auto/pom.xml @@ -0,0 +1,51 @@ + + + + apicurio-registry-examples + io.apicurio + 2.5.12-SNAPSHOT + + 4.0.0 + + apicurio-registry-examples-avro-maven-with-references-auto + jar + + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + register-artifact + + register + + process-test-resources + + http://localhost:8080/apis/registry/v2 + + + avro-maven-with-references-auto + TradeRaw + 2.0 + AVRO + + ${project.basedir}/src/main/resources/schemas/TradeRaw.avsc + + RETURN_OR_UPDATE + true + true + + + + + + + + + + diff --git a/examples/avro-maven-with-references-auto/src/main/resources/schemas/Exchange.avsc b/examples/avro-maven-with-references-auto/src/main/resources/schemas/Exchange.avsc new file mode 100644 index 0000000000..35e61497f0 --- /dev/null +++ b/examples/avro-maven-with-references-auto/src/main/resources/schemas/Exchange.avsc @@ -0,0 +1,6 @@ +{ + "namespace": "com.kubetrade.schema.common", + "type": "enum", + "name": "Exchange", + "symbols" : ["GEMINI"] +} \ No newline at end of file diff --git a/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeKey.avsc b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeKey.avsc new file mode 100644 index 0000000000..681d74821f --- /dev/null +++ b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeKey.avsc @@ -0,0 +1,15 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeKey", + "fields": [ + { + "name": "exchange", + "type": "com.kubetrade.schema.common.Exchange" + }, + { + "name": "key", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeRaw.avsc b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeRaw.avsc new file mode 100644 index 0000000000..62aad90d20 --- /dev/null +++ b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeRaw.avsc @@ -0,0 +1,23 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeRaw", + "fields": [ + { + "name": "tradeKey", + "type": "com.kubetrade.schema.trade.TradeKey" + }, + { + "name": "value", + "type": "com.kubetrade.schema.trade.TradeValue" + }, + { + "name": "symbol", + "type": "string" + }, + { + "name": "payload", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeValue.avsc b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeValue.avsc new file mode 100644 index 0000000000..529b359507 --- /dev/null +++ b/examples/avro-maven-with-references-auto/src/main/resources/schemas/TradeValue.avsc @@ -0,0 +1,15 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeValue", + "fields": [ + { + "name": "exchange", + "type": "com.kubetrade.schema.common.Exchange" + }, + { + "name": "value", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/avro-maven-with-references/pom.xml b/examples/avro-maven-with-references/pom.xml new file mode 100644 index 0000000000..ff8b3ea282 --- /dev/null +++ b/examples/avro-maven-with-references/pom.xml @@ -0,0 +1,64 @@ + + + + apicurio-registry-examples + io.apicurio + 2.5.12-SNAPSHOT + + 4.0.0 + + apicurio-registry-examples-avro-maven-with-references + jar + + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + register-artifact + + register + + process-test-resources + + http://localhost:8080/apis/registry/v2 + + + avro-maven-with-references + TradeKey + 2.0 + AVRO + + ${project.basedir}/src/main/resources/schemas/TradeKey.avsc + + RETURN_OR_UPDATE + true + + + com.kubetrade.schema.common.Exchange + test-group + Exchange + 2.0 + AVRO + + ${project.basedir}/src/main/resources/schemas/Exchange.avsc + + RETURN_OR_UPDATE + true + + + + + + + + + + + + diff --git a/examples/avro-maven-with-references/src/main/resources/schemas/Exchange.avsc b/examples/avro-maven-with-references/src/main/resources/schemas/Exchange.avsc new file mode 100644 index 0000000000..35e61497f0 --- /dev/null +++ b/examples/avro-maven-with-references/src/main/resources/schemas/Exchange.avsc @@ -0,0 +1,6 @@ +{ + "namespace": "com.kubetrade.schema.common", + "type": "enum", + "name": "Exchange", + "symbols" : ["GEMINI"] +} \ No newline at end of file diff --git a/examples/avro-maven-with-references/src/main/resources/schemas/TradeKey.avsc b/examples/avro-maven-with-references/src/main/resources/schemas/TradeKey.avsc new file mode 100644 index 0000000000..681d74821f --- /dev/null +++ b/examples/avro-maven-with-references/src/main/resources/schemas/TradeKey.avsc @@ -0,0 +1,15 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeKey", + "fields": [ + { + "name": "exchange", + "type": "com.kubetrade.schema.common.Exchange" + }, + { + "name": "key", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/README.md b/examples/camel-quarkus-kafka/README.md new file mode 100644 index 0000000000..dcbe0f1844 --- /dev/null +++ b/examples/camel-quarkus-kafka/README.md @@ -0,0 +1,26 @@ +# Camel Quarkus Kafka Example involving the Service Registry Managed Service + +1. Create Kafka Managed Service instance on cloud.redhat.com + +2. Create associated Service Account, save client Id and Client Secret + +3. Create Service Registry Managed instance on cloud.redhat.com + +4. Populate correctly the producer application.properties file with the missing parameters + +5. Populate correctly the consumer application.properties file with the missing parameters + +6. From the Service Registry Managed Instance UI load the user.avsc as schema named 'test-value' with no group + +7. From the producer folder run + + mvn clean compile package + java -jar target/quarkus-app/quarkus-run.jar + +8. From the consumer folder run + + mvn clean compile package + java -Dquarkus.http.port=8081 -jar target/quarkus-app/quarkus-run.jar + +Notes: +- The class User has been generated starting from the avsc user schema, through the avro tools diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/README.adoc b/examples/camel-quarkus-kafka/kafka-registry-consumer/README.adoc new file mode 100644 index 0000000000..9476221c0a --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/README.adoc @@ -0,0 +1,228 @@ += Kafka example : A Camel Quarkus example +:cq-example-description: An example that shows how to produce and consume messages in a Kafka topic, using Strimzi Operator + +{cq-description} + +TIP: Check the https://camel.apache.org/camel-quarkus/latest/first-steps.html[Camel Quarkus User guide] for prerequisites +and other general information. + + +== Prerequisites + +The example application requires a Kafka instance. + +You do not need to provide the Kafka instance yourself +as long as you play with the example code in dev mode (a.k.a. `mvn quarkus:dev` - read more [here](https://quarkus.io/guides/getting-started#development-mode)) +or as long as you only run the supplied tests (`mvn test`). +In those situations, Quarkus tooling starts a Redpanda image for you via [Quarkus Dev Services](https://quarkus.io/guides/kafka-dev-services) +and it also configures the application so that you do not need touch anything in `application.properties`. + +== Start in Development mode + +Run the application in development mode. + +TIP: If you want to use another running instance, in dev mode. Uncomment the corresponding Kafka configuration section in `src/main/resources/application.properties` and change `%prod` profile to `%dev`. + +[source,shell] +---- +$ mvn clean compile quarkus:dev +---- + +The above command compiles the project, starts the application and lets the Quarkus tooling watch for changes in your +workspace. Any modifications in your project will automatically take effect in the running application. + +TIP: Please refer to the Development mode section of +https://camel.apache.org/camel-quarkus/latest/first-steps.html#_development_mode[Camel Quarkus User guide] for more details. + +You should start to see some log messages appearing on the console. + +Every 10 seconds the timer component triggers the generation of random Message and send it to the Kafka topic `Test`. + +[source,shell] +---- +[FromTimer2Kafka] (Camel (camel-1) thread #2 - KafkaProducer[test]) Message sent correctly sent to the topic! : "Message #1" +---- + +Next a Kafka consumer reads the messages and put them in a seda queue. + +[source,shell] +---- +[FromKafka2Seda] (Camel (camel-1) thread #0 - KafkaConsumer[test]) Received : "Message #1" +---- + +Next pull a message from the queue : +[source,shell] +---- +$ curl -X GET http://0.0.0.0:8080/example +---- + + +=== Configure Kafka client, package and run the application + +Once you are done with developing you may want to configure your kafka client, package and run the application. + +TIP: Find more details about the JVM mode and Native mode in the Package and run section of +https://camel.apache.org/camel-quarkus/latest/first-steps.html#_package_and_run_the_application[Camel Quarkus User guide] + +==== Configure kafka client +Uncomment the corresponding commented section in `src/main/resources/application.properties`. + +- The section Kafka instance without Authentication if no Authentication required. +- The section Kafka instance with SASL Plain if using SASL. +- The section Kafka instance with SASL Oauth Bearer if using Oauth Bearer. + +You need to set the corresponding environment variables: +- Without Authentication +[source,shell] +---- +$ export brokers= +---- +- SASL Plain +[source,shell] +---- +$ export brokers= +$ export id= +$ export secret= +---- +-SASL Oauth Bearer +[source,shell] +---- +$ export brokers= +$ export id= +$ export secret= +$ export token= +---- + +If you want to deploy on Kubernetes or Openshift, you'd need to define those in a secret named `camel-kafka`. Set the needed values in the `kubefiles/secret-example.yml`, then add the secret : +[source,shell] +---- +$ kubectl apply -f kubefiles/secret-example.yml +---- + +==== JVM mode + +[source,shell] +---- +$ mvn clean package -DskipTests +$ java -jar target/quarkus-app/quarkus-run.jar +---- + +==== Native mode + +IMPORTANT: Native mode requires having GraalVM and other tools installed. Please check the Prerequisites section +of https://camel.apache.org/camel-quarkus/latest/first-steps.html#_prerequisites[Camel Quarkus User guide]. + +To prepare a native executable using GraalVM, run the following command: + +[source,shell] +---- +$ mvn clean package -DskipTests -Pnative +$ ./target/*-runner +---- + +==== Deploying to Kubernetes + +You can build a container image for the application like this. Refer to the https://quarkus.io/guides/deploying-to-kubernetes[Quarkus Kubernetes guide] for options around customizing image names, registries etc. + +This example uses Jib to create the container image for Kubernetes deployment. + +Uncomment the creating container with jib and secrets, in the Kubernetes specific section in `src/main/resources/application.properties`. Set image group and image registry. + +Build the application using the `kubernetes` profile. + +[source,shell] +---- +$ mvn clean package -DskipTests -Dkubernetes +---- + +The `kubernetes` profile uses quarkus kubernetes and jib container extensions, as described in the `pom.xml`. + +[source,shell] +---- + + + io.quarkus + quarkus-kubernetes + + + io.quarkus + quarkus-container-image-jib + + +---- + +If you are using a local development cluster like Kind or k3s, you can use host the container image on your local host. Or, with minikube, use the Docker daemon from the cluster virtual machine `eval $(minikube docker-env)`. Otherwise, you'll need to push the image to a registry of your choosing. + +TIP: You can build & deploy in one single step by doing `mvn clean package -DskipTests -Dkubernetes -Dquarkus.kubernetes.deploy=true` + +Check that the pods are running. + +Example when using Strimzi operator, with a Kafka instance named `Test` : + +[source,shell] +---- +$ kubectl get pods +NAME READY STATUS RESTARTS AGE +camel-quarkus-examples-kafka-dbc56974b-ph29m 1/1 Running 0 2m34s +test-entity-operator-7cccff5899-dlfx8 3/3 Running 0 48m +test-kafka-0 1/1 Running 0 49m +test-kafka-1 1/1 Running 0 49m +test-kafka-2 1/1 Running 0 49m +test-zookeeper-0 1/1 Running 0 50m +test-zookeeper-1 1/1 Running 0 50m +test-zookeeper-2 1/1 Running 0 50m + +---- + +Tail the application logs. + +[source,shell] +---- +$ kubectl logs -f camel-quarkus-examples-kafka-dbc56974b-ph29m +---- + +To clean up do. + +[source,shell] +---- +$ kubectl delete all -l app.kubernetes.io/name=camel-quarkus-examples-kafka +$ kubectl delete secret camel-kafka +---- + +[NOTE] +==== +If you need to configure container resource limits & requests, or enable the Quarkus Kubernetes client to trust self signed certificates, you can find these configuration options in `src/main/resources/application.properties`. Simply uncomment them and set your desired values. +==== + + +==== Deploying to OpenShift + +Uncomment the creating container with openshift and secrets, in the Openshift specific section in `src/main/resources/application.properties`. + + +[source,shell] +---- +$ mvn clean package -DskipTests -Dquarkus.kubernetes.deploy=true -Dopenshift +---- + +The `openshift` profile uses quarkus openshift and openshift-container extensions, as described in the `pom.xml`. + +[source,shell] +---- + + + io.quarkus + quarkus-openshift + + + io.quarkus + quarkus-container-image-openshift + + +---- + +You can check the pod status and tail logs using the commands mentioned above in the Kubernetes section. Use the `oc` binary instead of `kubectl` if preferred. + +== Feedback + +Please report bugs and propose improvements via https://github.com/apache/camel-quarkus/issues[GitHub issues of Camel Quarkus] project. diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/kubefiles/secret-example.yml b/examples/camel-quarkus-kafka/kafka-registry-consumer/kubefiles/secret-example.yml new file mode 100644 index 0000000000..72499b29dc --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/kubefiles/secret-example.yml @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +apiVersion: v1 +kind: Secret +metadata: + name: camel-kafka + namespace: test +type: Opaque +stringData: + brokers: "" + id: "" + secret: "" + token: "" \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/pom.xml b/examples/camel-quarkus-kafka/kafka-registry-consumer/pom.xml new file mode 100644 index 0000000000..9330793c81 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/pom.xml @@ -0,0 +1,240 @@ + + + + 4.0.0 + + io.apicurio + apicurio-registry-camel-quarkus-kafka-parent + 2.1.2-SNAPSHOT + ../pom.xml + + + apicurio-registry-camel-quarkus-kafka-consumer + + + 2.3.0.Final + ${quarkus.platform.version} + + io.quarkus.platform + quarkus-bom + ${quarkus.platform.group-id} + quarkus-camel-bom + + UTF-8 + UTF-8 + 11 + 11 + ${maven.compiler.target} + ${maven.compiler.source} + + 2.11.0 + 1.3.2 + 3.8.0 + 3.2.0 + 3.1.0 + 2.22.2 + 3.0 + 0.7.2 + + + + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + ${camel-quarkus.platform.group-id} + ${camel-quarkus.platform.artifact-id} + ${camel-quarkus.platform.version} + pom + import + + + io.strimzi + kafka-oauth-client + ${kafka-oauth-client.version} + + + + + + + org.apache.camel.quarkus + camel-quarkus-kafka + + + org.apache.camel.quarkus + camel-quarkus-log + + + org.apache.camel.quarkus + camel-quarkus-timer + + + org.apache.camel.quarkus + camel-quarkus-seda + + + io.quarkus + quarkus-resteasy-jackson + + + io.quarkus + quarkus-kubernetes-config + + + io.quarkus + quarkus-apicurio-registry-avro + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + + + io.strimzi + kafka-oauth-client + + + + + io.quarkus + quarkus-junit5 + test + + + io.rest-assured + rest-assured + test + + + org.awaitility + awaitility + test + + + org.testcontainers + testcontainers + test + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + true + + -Xlint:unchecked + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + false + + org.jboss.logmanager.LogManager + + + + + + ${quarkus.platform.group-id} + quarkus-maven-plugin + ${quarkus.platform.version} + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-surefire-plugin.version} + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + com.mycila + license-maven-plugin + ${mycila-license.version} + + true +
${maven.multiModuleProjectDirectory}/header.txt
+ + **/*.adoc + **/*.txt + **/LICENSE.txt + **/LICENSE + **/NOTICE.txt + **/NOTICE + **/README + **/pom.xml.versionsBackup + + + SLASHSTAR_STYLE + CAMEL_PROPERTIES_STYLE + CAMEL_PROPERTIES_STYLE + CAMEL_PROPERTIES_STYLE + + + ${maven.multiModuleProjectDirectory}/license-properties-headerdefinition.xml + +
+
+
+
+ + + + ${quarkus.platform.group-id} + quarkus-maven-plugin + + + build + + build + + + + + +
+ +
diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.jvm b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.jvm new file mode 100644 index 0000000000..1e65c9919f --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.jvm @@ -0,0 +1,71 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode +# +# Before building the container image run: +# +# ./mvnw package +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/camel-quarkus-examples-kafka-jvm . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/camel-quarkus-examples-kafka-jvm +# +# If you want to include the debug port into your docker image +# you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005 +# +# Then run the container using : +# +# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/camel-quarkus-examples-kafka-jvm +# +### +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 + +ARG JAVA_PACKAGE=java-11-openjdk-headless +ARG RUN_JAVA_VERSION=1.3.8 +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +# Install java and the run-java script +# Also set up permissions for user `1001` +RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ + && microdnf update \ + && microdnf clean all \ + && mkdir /deployments \ + && chown 1001 /deployments \ + && chmod "g+rwX" /deployments \ + && chown 1001:root /deployments \ + && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \ + && chown 1001 /deployments/run-java.sh \ + && chmod 540 /deployments/run-java.sh \ + && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security + +# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. +ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" +# We make four distinct layers so if there are application changes the library layers can be re-used +COPY --chown=1001 target/quarkus-app/lib/ /deployments/lib/ +COPY --chown=1001 target/quarkus-app/*.jar /deployments/ +COPY --chown=1001 target/quarkus-app/app/ /deployments/app/ +COPY --chown=1001 target/quarkus-app/quarkus/ /deployments/quarkus/ + +EXPOSE 8080 +USER 1001 + +ENTRYPOINT [ "/deployments/run-java.sh" ] \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.native b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.native new file mode 100644 index 0000000000..04038f5647 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/docker/Dockerfile.native @@ -0,0 +1,44 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode +# +# Before building the container image run: +# +# ./mvnw package -Pnative +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.native -t quarkus/camel-quarkus-examples-kafka . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/camel-quarkus-examples-kafka +# +### +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 +WORKDIR /work/ +RUN chown 1001 /work \ + && chmod "g+rwX" /work \ + && chown 1001:root /work +COPY --chown=1001:root target/*-runner /work/application + +EXPOSE 8080 +USER 1001 + +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/java/org/acme/kafka/Routes.java b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/java/org/acme/kafka/Routes.java new file mode 100644 index 0000000000..d97a31d5bc --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/java/org/acme/kafka/Routes.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.acme.kafka; + +import javax.enterprise.context.ApplicationScoped; + +import org.apache.camel.builder.RouteBuilder; + +@ApplicationScoped +public class Routes extends RouteBuilder { + + @Override + public void configure() throws Exception { + + // kafka consumer + from("kafka:{{kafka.topic.name}}") + .routeId("FromKafka2Seda") + .log("Received : \"${body}\"") + .to("seda:kafka-messages"); + } +} diff --git a/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/resources/application.properties b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/resources/application.properties new file mode 100644 index 0000000000..ff595ba0ca --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-consumer/src/main/resources/application.properties @@ -0,0 +1,39 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#Kafka topic Name +kafka.topic.name=test + +# How often should the messages be generated and pushed to Kafka Topic +timer.period = 10000 +timer.delay = 10000 + +# uncomment to set Kafka instance with SASL Oauth Bearer +camel.component.kafka.brokers = +camel.component.kafka.security-protocol = SASL_SSL +camel.component.kafka.sasl-mechanism = OAUTHBEARER +camel.component.kafka.sasl-jaas-config = org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.client.id="" \ + oauth.client.secret="" \ + oauth.token.endpoint.uri="" ; +camel.component.kafka.additional-properties[sasl.login.callback.handler.class] = io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +camel.component.kafka.additional-properties[apicurio.registry.url] = +camel.component.kafka.additional-properties[apicurio.auth.client.id] = +camel.component.kafka.additional-properties[apicurio.auth.client.secret] = +camel.component.kafka.additional-properties[apicurio.auth.service.token.endpoint] = +camel.component.kafka.additional-properties[apicurio.registry.use-specific-avro-reader] = true +camel.component.kafka.valueDeserializer = io.apicurio.registry.serde.avro.AvroKafkaDeserializer diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/README.adoc b/examples/camel-quarkus-kafka/kafka-registry-producer/README.adoc new file mode 100644 index 0000000000..5db5c90fa1 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/README.adoc @@ -0,0 +1,10 @@ +== Camel Quarkus Kafka with Service Registry + +1. Populate correctly the credentials in application.properties after creating kafka instance and registry instance +2. mvn clean package +3. java -jar target/quarkus-app/quarkus-run.jar + +Error: + +2021-10-11 16:50:21,197 WARN [org.apa.cam.com.tim.TimerConsumer] (Camel (camel-1) thread #0 - timer://foo) Error processing exchange. Exchange[57AD50F0C45CD89-0000000000000000]. Caused by: [io.apicurio.registry.rest.client.exception.ArtifactNotFoundException - No artifact with ID 'test-value' in group 'null' was found.]: io.apicurio.registry.rest.client.exception.ArtifactNotFoundException: No artifact with ID 'test-value' in group 'null' was found. + diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/pom.xml b/examples/camel-quarkus-kafka/kafka-registry-producer/pom.xml new file mode 100644 index 0000000000..0256593123 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/pom.xml @@ -0,0 +1,193 @@ + + + + 4.0.0 + + io.apicurio + apicurio-registry-camel-quarkus-kafka-parent + 2.1.2-SNAPSHOT + ../pom.xml + + + apicurio-registry-camel-quarkus-kafka-producer + + + 2.3.0.Final + ${quarkus.platform.version} + + io.quarkus.platform + quarkus-bom + ${quarkus.platform.group-id} + quarkus-camel-bom + + UTF-8 + UTF-8 + 11 + 11 + ${maven.compiler.target} + ${maven.compiler.source} + + 3.8.0 + 3.2.0 + 3.1.0 + 0.7.2 + + + + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + ${camel-quarkus.platform.group-id} + ${camel-quarkus.platform.artifact-id} + ${camel-quarkus.platform.version} + pom + import + + + io.strimzi + kafka-oauth-client + ${kafka-oauth-client.version} + + + + + + + org.apache.camel.quarkus + camel-quarkus-microprofile-health + + + org.apache.camel.quarkus + camel-quarkus-kafka + + + org.apache.camel.quarkus + camel-quarkus-log + + + org.apache.camel.quarkus + camel-quarkus-timer + + + org.apache.camel.quarkus + camel-quarkus-seda + + + io.quarkus + quarkus-resteasy-jackson + + + io.quarkus + quarkus-kubernetes-config + + + io.quarkus + quarkus-apicurio-registry-avro + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + + + io.strimzi + kafka-oauth-client + + + + + io.quarkus + quarkus-junit5 + test + + + io.rest-assured + rest-assured + test + + + org.awaitility + awaitility + test + + + org.testcontainers + testcontainers + test + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + true + + -Xlint:unchecked + + + + + + ${quarkus.platform.group-id} + quarkus-maven-plugin + ${quarkus.platform.version} + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + + + + ${quarkus.platform.group-id} + quarkus-maven-plugin + + + build + + build + + + + + + + + diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.jvm b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.jvm new file mode 100644 index 0000000000..1e65c9919f --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.jvm @@ -0,0 +1,71 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in JVM mode +# +# Before building the container image run: +# +# ./mvnw package +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.jvm -t quarkus/camel-quarkus-examples-kafka-jvm . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/camel-quarkus-examples-kafka-jvm +# +# If you want to include the debug port into your docker image +# you will have to expose the debug port (default 5005) like this : EXPOSE 8080 5005 +# +# Then run the container using : +# +# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/camel-quarkus-examples-kafka-jvm +# +### +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 + +ARG JAVA_PACKAGE=java-11-openjdk-headless +ARG RUN_JAVA_VERSION=1.3.8 +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +# Install java and the run-java script +# Also set up permissions for user `1001` +RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ + && microdnf update \ + && microdnf clean all \ + && mkdir /deployments \ + && chown 1001 /deployments \ + && chmod "g+rwX" /deployments \ + && chown 1001:root /deployments \ + && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \ + && chown 1001 /deployments/run-java.sh \ + && chmod 540 /deployments/run-java.sh \ + && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security + +# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size. +ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" +# We make four distinct layers so if there are application changes the library layers can be re-used +COPY --chown=1001 target/quarkus-app/lib/ /deployments/lib/ +COPY --chown=1001 target/quarkus-app/*.jar /deployments/ +COPY --chown=1001 target/quarkus-app/app/ /deployments/app/ +COPY --chown=1001 target/quarkus-app/quarkus/ /deployments/quarkus/ + +EXPOSE 8080 +USER 1001 + +ENTRYPOINT [ "/deployments/run-java.sh" ] \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.native b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.native new file mode 100644 index 0000000000..04038f5647 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/docker/Dockerfile.native @@ -0,0 +1,44 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#### +# This Dockerfile is used in order to build a container that runs the Quarkus application in native (no JVM) mode +# +# Before building the container image run: +# +# ./mvnw package -Pnative +# +# Then, build the image with: +# +# docker build -f src/main/docker/Dockerfile.native -t quarkus/camel-quarkus-examples-kafka . +# +# Then run the container using: +# +# docker run -i --rm -p 8080:8080 quarkus/camel-quarkus-examples-kafka +# +### +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.3 +WORKDIR /work/ +RUN chown 1001 /work \ + && chmod "g+rwX" /work \ + && chown 1001:root /work +COPY --chown=1001:root target/*-runner /work/application + +EXPOSE 8080 +USER 1001 + +CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] \ No newline at end of file diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/Routes.java b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/Routes.java new file mode 100644 index 0000000000..cdd2aeb3c5 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/Routes.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.acme.kafka; + +import javax.enterprise.context.ApplicationScoped; +import org.apache.camel.builder.RouteBuilder; + +@ApplicationScoped +public class Routes extends RouteBuilder { + + @Override + public void configure() throws Exception { + // produces messages to kafka + + User user = new User(); + user.setName("John"); + user.setAge(36); + + from("timer:foo?period={{timer.period}}&delay={{timer.delay}}").routeId("FromTimer2Kafka") + .setBody(constant(user)).to("kafka:{{kafka.topic.name}}") + .log("Message sent correctly sent to the topic! : \"${body}\" "); + } +} diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/User.java b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/User.java new file mode 100644 index 0000000000..7b18fe473b --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/java/org/acme/kafka/User.java @@ -0,0 +1,435 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.acme.kafka; + +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; + +@org.apache.avro.specific.AvroGenerated +public class User extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 1548979804423630989L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"User\",\"namespace\":\"org.acme.kafka\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"age\",\"type\":\"int\"}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * + * @param resolver + * a {@link SchemaStore} used to find schemas by fingerprint + * + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this User to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * + * @throws java.io.IOException + * if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a User from a ByteBuffer. + * + * @param b + * a byte buffer holding serialized data for an instance of this class + * + * @return a User instance decoded from the given buffer + * + * @throws java.io.IOException + * if the given bytes could not be deserialized into an instance of this class + */ + public static User fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence name; + private int age; + + /** + * Default constructor. Note that this does not initialize fields to their default values from the schema. If that + * is desired then one should use newBuilder(). + */ + public User() { + } + + /** + * All-args constructor. + * + * @param name + * The new value for name + * @param age + * The new value for age + */ + public User(java.lang.CharSequence name, java.lang.Integer age) { + this.name = name; + this.age = age; + } + + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return name; + case 1: + return age; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + name = (java.lang.CharSequence) value$; + break; + case 1: + age = (java.lang.Integer) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'name' field. + * + * @return The value of the 'name' field. + */ + public java.lang.CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * + * @param value + * the value to set. + */ + public void setName(java.lang.CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'age' field. + * + * @return The value of the 'age' field. + */ + public int getAge() { + return age; + } + + /** + * Sets the value of the 'age' field. + * + * @param value + * the value to set. + */ + public void setAge(int value) { + this.age = value; + } + + /** + * Creates a new User RecordBuilder. + * + * @return A new User RecordBuilder + */ + public static org.acme.kafka.User.Builder newBuilder() { + return new org.acme.kafka.User.Builder(); + } + + /** + * Creates a new User RecordBuilder by copying an existing Builder. + * + * @param other + * The existing builder to copy. + * + * @return A new User RecordBuilder + */ + public static org.acme.kafka.User.Builder newBuilder(org.acme.kafka.User.Builder other) { + if (other == null) { + return new org.acme.kafka.User.Builder(); + } else { + return new org.acme.kafka.User.Builder(other); + } + } + + /** + * Creates a new User RecordBuilder by copying an existing User instance. + * + * @param other + * The existing instance to copy. + * + * @return A new User RecordBuilder + */ + public static org.acme.kafka.User.Builder newBuilder(org.acme.kafka.User other) { + if (other == null) { + return new org.acme.kafka.User.Builder(); + } else { + return new org.acme.kafka.User.Builder(other); + } + } + + /** + * RecordBuilder for User instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence name; + private int age; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other + * The existing Builder to copy. + */ + private Builder(org.acme.kafka.User.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.age)) { + this.age = data().deepCopy(fields()[1].schema(), other.age); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + } + + /** + * Creates a Builder by copying an existing User instance + * + * @param other + * The existing instance to copy. + */ + private Builder(org.acme.kafka.User other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.age)) { + this.age = data().deepCopy(fields()[1].schema(), other.age); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'name' field. + * + * @return The value. + */ + public java.lang.CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * + * @param value + * The value of 'name'. + * + * @return This builder. + */ + public org.acme.kafka.User.Builder setName(java.lang.CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'name' field. + * + * @return This builder. + */ + public org.acme.kafka.User.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'age' field. + * + * @return The value. + */ + public int getAge() { + return age; + } + + /** + * Sets the value of the 'age' field. + * + * @param value + * The value of 'age'. + * + * @return This builder. + */ + public org.acme.kafka.User.Builder setAge(int value) { + validate(fields()[1], value); + this.age = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'age' field has been set. + * + * @return True if the 'age' field has been set, false otherwise. + */ + public boolean hasAge() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'age' field. + * + * @return This builder. + */ + public org.acme.kafka.User.Builder clearAge() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public User build() { + try { + User record = new User(); + record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]); + record.age = fieldSetFlags()[1] ? this.age : (java.lang.Integer) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = (org.apache.avro.io.DatumWriter) MODEL$ + .createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = (org.apache.avro.io.DatumReader) MODEL$ + .createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.name); + + out.writeInt(this.age); + + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8) this.name : null); + + this.age = in.readInt(); + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8) this.name : null); + break; + + case 1: + this.age = in.readInt(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/resources/application.properties b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/resources/application.properties new file mode 100644 index 0000000000..dc65f9fb2c --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/src/main/resources/application.properties @@ -0,0 +1,40 @@ +## --------------------------------------------------------------------------- +## Licensed to the Apache Software Foundation (ASF) under one or more +## contributor license agreements. See the NOTICE file distributed with +## this work for additional information regarding copyright ownership. +## The ASF licenses this file to You under the Apache License, Version 2.0 +## (the "License"); you may not use this file except in compliance with +## the License. You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. +## --------------------------------------------------------------------------- + +#Kafka topic Name +kafka.topic.name=test + +# How often should the messages be generated and pushed to Kafka Topic +timer.period = 10000 +timer.delay = 10000 + +# uncomment to set Kafka instance with SASL Oauth Bearer +camel.component.kafka.brokers = +camel.component.kafka.security-protocol = SASL_SSL +camel.component.kafka.sasl-mechanism = OAUTHBEARER +camel.component.kafka.sasl-jaas-config = org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \ + oauth.client.id="" \ + oauth.client.secret="" \ + oauth.token.endpoint.uri="" ; +camel.component.kafka.additional-properties[sasl.login.callback.handler.class] = io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler +camel.component.kafka.additional-properties[apicurio.registry.url] = +camel.component.kafka.additional-properties[apicurio.auth.client.id] = +camel.component.kafka.additional-properties[apicurio.auth.client.secret] = +camel.component.kafka.additional-properties[apicurio.auth.service.token.endpoint] = +camel.component.kafka.additional-properties[apicurio.registry.avro-datum-provider] = io.apicurio.registry.serde.avro.ReflectAvroDatumProvider +camel.component.kafka.additional-properties[apicurio.registry.artifact-resolver-strategy] = io.apicurio.registry.serde.avro.strategy.RecordIdStrategy +camel.component.kafka.valueSerializer = io.apicurio.registry.serde.avro.AvroKafkaSerializer diff --git a/examples/camel-quarkus-kafka/kafka-registry-producer/user.avsc b/examples/camel-quarkus-kafka/kafka-registry-producer/user.avsc new file mode 100644 index 0000000000..7ecaa58b62 --- /dev/null +++ b/examples/camel-quarkus-kafka/kafka-registry-producer/user.avsc @@ -0,0 +1,15 @@ +{ + "namespace": "org.acme.kafka", + "type": "record", + "name": "User", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + } + ] +} diff --git a/examples/camel-quarkus-kafka/pom.xml b/examples/camel-quarkus-kafka/pom.xml new file mode 100644 index 0000000000..eeddd948ba --- /dev/null +++ b/examples/camel-quarkus-kafka/pom.xml @@ -0,0 +1,20 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.1.2-SNAPSHOT + ../pom.xml + + + apicurio-registry-camel-quarkus-kafka-parent + pom + + + kafka-registry-producer + kafka-registry-consumer + + + diff --git a/examples/cloudevents/.gitignore b/examples/cloudevents/.gitignore new file mode 100644 index 0000000000..badea1b484 --- /dev/null +++ b/examples/cloudevents/.gitignore @@ -0,0 +1,36 @@ +# Eclipse +.project +.classpath +.settings/ +bin/ + +# IntelliJ +.idea +*.ipr +*.iml +*.iws + +# NetBeans +nb-configuration.xml + +# Visual Studio Code +.vscode +.factorypath + +# OSX +.DS_Store + +# Vim +*.swp +*.swo + +# patch +*.orig +*.rej + +# Maven +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +release.properties \ No newline at end of file diff --git a/examples/cloudevents/README.md b/examples/cloudevents/README.md new file mode 100644 index 0000000000..dde4cbb42b --- /dev/null +++ b/examples/cloudevents/README.md @@ -0,0 +1,122 @@ +# Apicurio Registry CloudEvents example + +This is an example application that implements a REST API that consumes and produces CloudEvents. + +This example application is implemented thanks to an experimental library implemented within the Apicurio Registry project. This library is used to validate incoming and outgoing CloudEvents messages in a REST API. +The validation is performed using json schemas that are previously stored in Apicurio Registry. + +The idea behind this library is to provide a tool for serialization and deserialization of CloudEvents that uses Apicurio Registry to store the schemas used for serialization, deserialization or validation of CloudEvents data. This library is built on top of the CloudEvents Java SDK, that among other things provides the CloudEvent Java type. + +For this PoC we only focused on **CloudEvents and http**. Meaning that, at least for now, this library primarily allows to use CloudEvents with REST services and REST clients. Also, this library only provides support for json schemas, support for other formats such as avro, protobuf,... could be easily implemented. + +### Apicurio Registry CloudEvents Serde and Kafka + +We decided to not focus on implementing a set of Kafka Serde classes that work with CloudEvents. + +We are open to discussion if you consider it could be interesting to be able to do serdes plus validation of CloudEvents using Apicurio Registry **but** using another protocol or transport such as Kafka, AMQP, MQTT,... Feel free to create an issue or to reach out to the Apicurio team. + +After implementing the serdes library for REST services we considered implementing the equivalent for Kafka but we dismissed this effort, you can find below some of our reasons. + +Our current Serdes classes could be easily improved to make them more compatible with CloudEvents based use cases, and this approach would be preferrable rather than implementing a new set of Kafka Serdes classes for CloudEvents. + +The [KafkaConsumer API](https://kafka.apache.org/26/javadoc/index.html?org/apache/kafka/clients/consumer/KafkaConsumer.html) bounds the consumer to one class that will be the type of the message value that it will receive after deserialization, this means that all messages that a KafkaConsumer will receive are going to have the same structure. +``` +KafkaConsumer consumer; +``` +However for CloudEvents use cases we don't see any value in having Kafka Serde that works with a generic type, such as the java class for CloudEvents would be. +``` +KafkaConsumer> consumer; +``` +If required, this approach could be easy to achieve with some improvements to our existing Serdes classes. + +## Apicurio Registry CloudEvents Serde library + +The Apicurio Registry CloudEvents library consists of two maven modules: +- `apicurio-registry-utils-cloud-events-serde`, provides the serialization and deserialization API, with data validation included. This component calls Apicurio Registry to fetch the required schemas to perform the serialization/deserialization/validation. +- `apicurio-registry-utils-cloud-events-provider`, this contains a jaxrs provider implemented on top of CloudEvents [java sdk for restful services](https://github.com/cloudevents/sdk-java/tree/master/http/restful-ws). This provider allows to implement REST APIs that consume and produce CloudEvents, like the CloudEvents sdk does, but validating the CloudEvents data and ensuring the data adheres to it's respective schema stored in Apicurio Registry. + +This library is experimental and has not been released nor is available in the main branch of the Apicurio Registry project, +so if you are interested you can find the source code [here](https://github.com/Apicurio/apicurio-registry/tree/cloud-events/utils/cloud-events). +Also, to test the code (and to run this demo) you have to build it from source. + +``` +git clone -b cloud-events https://github.com/Apicurio/apicurio-registry.git + +cd apicurio-registry + +mvn install -am -pl 'utils/cloud-events/cloud-events-provider' -Dmaven.javadoc.skip=true + +``` + +## Running the demo + +After installing in your local maven repo the `apicurio-registry-utils-cloud-events-provider` library you have to build this app. +``` +mvn package +``` + +Once that's done you can start Apicurio Registry, I suggest doing it with a container +``` +docker run -p 8080:8080 docker.io/apicurio/apicurio-registry-mem:1.3.2.Final +``` + +Then create the artifacts in the registry that are used by the CloudEvents serde to validate the data that the REST API will receive +``` +curl --data "@new-order-schema.json" -X POST -i -H "X-Registry-ArtifactId: io.apicurio.examples.new-order" http://localhost:8080/api/artifacts +``` +``` +curl --data "@processed-order-schema.json" -X POST -i -H "X-Registry-ArtifactId: io.apicurio.examples.processed-order" http://localhost:8080/api/artifacts +``` + +Finally it's time to run this demo app. +``` +java -jar target/cloudevents-example-*-runner.jar +``` + +## Test the app + +To test the app we are going to make a few http requests sending CloudEvents to the API. + +Previously we created the artifact `io.apicurio.examples.new-order` in the registry with it's json schema. + +With this request we are going to send a CloudEvent of type `new-order` and dataschema `/apicurio/io.apicurio.examples.new-order/1` to the path `/orders`. The serdes layer will read that dataschema and fetch the json schema from +Apicurio Registry in order to validate the json data adheres to the schema. +The server responds with another CloudEvent of type `io.apicurio.examples.processed-order` that has also been validated against it's stored schema in Apicurio Registry. +``` +$ curl -X POST -i -H "Content-Type: application/json" -H "ce-dataschema:/apicurio/io.apicurio.examples.new-order/1" -H "ce-type:new-order" -H "ce-source:test" -H "ce-id:aaaaa" -H "ce-specversion:1.0" --data '{"itemId":"abcde","quantity":5}' http://localhost:8082/order +HTTP/1.1 200 OK +transfer-encoding: chunked +ce-source: apicurio-registry-example-api +ce-specversion: 1.0 +ce-type: io.apicurio.examples.processed-order +ce-id: 005762b9-9bea-4f6e-bf78-5ac8f7c99429 +ce-dataschema: apicurio-global-id-2 +Content-Type: application/json + +{"orderId":"c763f2b4-2356-4124-a690-b205f9baf338","itemId":"abcde","quantity":5,"processingTimestamp":"2021-01-20T16:26:40.128Z","processedBy":"orders-service","error":null,"approved":true} +``` + +This next curl command sends a request to another endpoint in this application. The important part of this is the implementation. This `purchase` endpoint shows the usage of the CloudEvents serde library in REST clients, allowing for producers of events to validate the CloudEvents they produce. + +``` +$ curl -i http://localhost:8082/purchase/abc/5 +HTTP/1.1 200 OK +ce-source: apicurio-registry-example-api +transfer-encoding: chunked +ce-specversion: 1.0 +ce-type: io.apicurio.examples.processed-order +ce-id: f1eabd84-ad78-4beb-9c6c-04f843abf669 +Content-Length: 187 +ce-dataschema: apicurio-global-id-2 +Content-Type: application/json + +{"orderId":"29606862-e74c-47b4-95d0-b59289ea023c","itemId":"abc","quantity":5,"processingTimestamp":"2021-01-20T16:32:06.198Z","processedBy":"orders-service","error":null,"approved":true} + +``` + +This command shows an example of what happens when you try to send a CloudEvent using a non-existent schema. +``` +$ curl -X POST -i -H "Content-Type: application/json" -H "ce-type:io.apicurio.examples.test" -H "ce-source:test" -H "ce-id:aaaaa" -H "ce-specversion:1.0" --data '{"itemId":"abcde","quantity":5}' http://localhost:8082/order +HTTP/1.1 404 Not Found +Content-Length: 0 +``` diff --git a/examples/cloudevents/new-order-schema.json b/examples/cloudevents/new-order-schema.json new file mode 100644 index 0000000000..4fd53dc48c --- /dev/null +++ b/examples/cloudevents/new-order-schema.json @@ -0,0 +1,16 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "itemId": { + "type": "string" + }, + "quantity": { + "type": "integer" + } + }, + "required": [ + "itemId", + "quantity" + ] +} \ No newline at end of file diff --git a/examples/cloudevents/pom.xml b/examples/cloudevents/pom.xml new file mode 100644 index 0000000000..e890c008c6 --- /dev/null +++ b/examples/cloudevents/pom.xml @@ -0,0 +1,154 @@ + + + 4.0.0 + + + io.apicurio + apicurio-registry-examples + 1.3.3-SNAPSHOT + ../pom.xml + + + cloudevents-example + + + 3.8.1 + true + 1.8 + 1.8 + UTF-8 + UTF-8 + 1.8.1.Final + quarkus-universe-bom + io.quarkus + 1.8.1.Final + 3.0.0-M5 + + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + + + + + io.apicurio + apicurio-registry-utils-cloud-events-provider + ${apicurio-registry.version} + + + + io.quarkus + quarkus-rest-client + + + + io.cloudevents + cloudevents-api + 2.0.0-milestone3 + + + + io.cloudevents + cloudevents-core + 2.0.0-milestone3 + + + + io.quarkus + quarkus-junit5 + test + + + io.rest-assured + rest-assured + test + + + io.quarkus + quarkus-resteasy-jackson + + + + org.testcontainers + junit-jupiter + 1.15.1 + test + + + + + + + io.quarkus + quarkus-maven-plugin + ${quarkus-plugin.version} + + + + generate-code + generate-code-tests + build + + + + + + maven-compiler-plugin + ${compiler-plugin.version} + + + maven-surefire-plugin + ${surefire-plugin.version} + + + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + + + native + + + native + + + + + + maven-failsafe-plugin + ${surefire-plugin.version} + + + + integration-test + verify + + + + ${project.build.directory}/${project.build.finalName}-runner + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + + + + native + + + + diff --git a/examples/cloudevents/processed-order-schema.json b/examples/cloudevents/processed-order-schema.json new file mode 100644 index 0000000000..e638ba560e --- /dev/null +++ b/examples/cloudevents/processed-order-schema.json @@ -0,0 +1,35 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "approved": { + "type": "boolean" + }, + "error": { + "type": ["string", "null"] + }, + "itemId": { + "type": "string" + }, + "orderId": { + "type": "string" + }, + "processedBy": { + "type": "string" + }, + "processingTimestamp": { + "type": "string" + }, + "quantity": { + "type": "integer" + } + }, + "required": [ + "approved", + "itemId", + "orderId", + "processedBy", + "processingTimestamp", + "quantity" + ] + } \ No newline at end of file diff --git a/examples/cloudevents/src/main/java/io/apicurio/registry/examples/NewOrder.java b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/NewOrder.java new file mode 100644 index 0000000000..a95b6d36f5 --- /dev/null +++ b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/NewOrder.java @@ -0,0 +1,47 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.apicurio.registry.examples; + +import io.quarkus.runtime.annotations.RegisterForReflection; + +/** + * @author Fabian Martinez + */ +@RegisterForReflection +public class NewOrder { + + public static final String newOrderEventType = "io.apicurio.examples.new-order"; + + private String itemId; + private Integer quantity; + + public String getItemId() { + return itemId; + } + + public void setItemId(String itemId) { + this.itemId = itemId; + } + + public Integer getQuantity() { + return quantity; + } + + public void setQuantity(Integer quantity) { + this.quantity = quantity; + } + +} \ No newline at end of file diff --git a/examples/cloudevents/src/main/java/io/apicurio/registry/examples/OrdersResource.java b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/OrdersResource.java new file mode 100644 index 0000000000..c9903e683b --- /dev/null +++ b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/OrdersResource.java @@ -0,0 +1,108 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.apicurio.registry.examples; + +import java.io.IOException; +import java.net.URI; +import java.time.Instant; +import java.util.UUID; + +import javax.enterprise.context.ApplicationScoped; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.client.Entity; +import javax.ws.rs.core.Response; + +import org.jboss.logging.Logger; +import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; + +import io.apicurio.registry.utils.cloudevents.CloudEvent; +import io.apicurio.registry.utils.cloudevents.CloudEventsWsProvider; +import io.cloudevents.core.builder.CloudEventBuilder; + +/** + * @author Fabian Martinez + */ +@ApplicationScoped +@Path("/") +public class OrdersResource { + + private static final Logger log = Logger.getLogger(OrdersResource.class); + + @POST + @Path("order") + public CloudEvent newOrder(CloudEvent event) throws JsonParseException, JsonMappingException, IOException { + log.info("Received cloud event of type "+event.type()); + + NewOrder neworder = event.data(); + + ProcessedOrder order = new ProcessedOrder(); + order.setOrderId(UUID.randomUUID().toString()); + log.info("Processing order "+order.getOrderId()); + order.setItemId(neworder.getItemId()); + order.setQuantity(neworder.getQuantity()); + order.setProcessingTimestamp(Instant.now().toString()); + order.setProcessedBy("orders-service"); + order.setApproved(true); + + return CloudEvent.from( + CloudEventBuilder.v1() + .withId(UUID.randomUUID().toString()) + .withSource(URI.create("apicurio-registry-example-api")) + .withType(ProcessedOrder.processedOrderEventType) + .build(), + + "/apicurio/"+ProcessedOrder.processedOrderEventType+"/1", + + order); + } + + @GET + @Path("/purchase/{item-id}/{quantity}") + public Response start(@PathParam("item-id") String itemId, @PathParam("quantity") Integer quantity) { + + NewOrder order = new NewOrder(); + order.setItemId(itemId); + order.setQuantity(quantity); + + CloudEvent ce = CloudEvent.from( + CloudEventBuilder.v1() + .withId(UUID.randomUUID().toString()) + .withSource(URI.create("apicurio-registry-example-client-test")) + .withType(NewOrder.newOrderEventType) + .build(), + + "/apicurio/"+NewOrder.newOrderEventType+"/1", + + order); + + Response res = ResteasyClientBuilder.newClient().target("http://localhost:8082/order") + + //magic happening here + .register(CloudEventsWsProvider.class) + + .request() + .buildPost(Entity.entity(ce, "application/json")) + .invoke(); + return res; + } + +} \ No newline at end of file diff --git a/examples/cloudevents/src/main/java/io/apicurio/registry/examples/ProcessedOrder.java b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/ProcessedOrder.java new file mode 100644 index 0000000000..76242d76f7 --- /dev/null +++ b/examples/cloudevents/src/main/java/io/apicurio/registry/examples/ProcessedOrder.java @@ -0,0 +1,93 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.apicurio.registry.examples; + +import io.quarkus.runtime.annotations.RegisterForReflection; + +/** + * @author Fabian Martinez + */ +@RegisterForReflection +public class ProcessedOrder { + + public static final String processedOrderEventType = "io.apicurio.examples.processed-order"; + + private String orderId; + private String itemId; + private Integer quantity; + + private String processingTimestamp; + private String processedBy; + private String error; + private Boolean approved; + + public String getOrderId() { + return orderId; + } + + public void setOrderId(String orderId) { + this.orderId = orderId; + } + + public String getItemId() { + return itemId; + } + + public void setItemId(String itemId) { + this.itemId = itemId; + } + + public Integer getQuantity() { + return quantity; + } + + public void setQuantity(Integer quantity) { + this.quantity = quantity; + } + + public String getProcessingTimestamp() { + return processingTimestamp; + } + + public void setProcessingTimestamp(String processingTimestamp) { + this.processingTimestamp = processingTimestamp; + } + + public String getProcessedBy() { + return processedBy; + } + + public void setProcessedBy(String processedBy) { + this.processedBy = processedBy; + } + + public String getError() { + return error; + } + + public void setError(String error) { + this.error = error; + } + + public Boolean getApproved() { + return approved; + } + + public void setApproved(Boolean approved) { + this.approved = approved; + } + +} \ No newline at end of file diff --git a/examples/cloudevents/src/main/resources/application.properties b/examples/cloudevents/src/main/resources/application.properties new file mode 100644 index 0000000000..a0731501c7 --- /dev/null +++ b/examples/cloudevents/src/main/resources/application.properties @@ -0,0 +1,8 @@ +# Configuration file +# key = value + +quarkus.log.level=DEBUG + +quarkus.http.port=8082 + +apicurio.registry.url=http://localhost:8080/api \ No newline at end of file diff --git a/examples/confluent-serdes/pom.xml b/examples/confluent-serdes/pom.xml new file mode 100644 index 0000000000..27fd32f501 --- /dev/null +++ b/examples/confluent-serdes/pom.xml @@ -0,0 +1,60 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-confluent-serdes + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + io.confluent + kafka-avro-serializer + ${confluent.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + + + + confluent + Confluent + https://packages.confluent.io/maven/ + + + jitpack.io + https://jitpack.io + + + + diff --git a/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java b/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java new file mode 100644 index 0000000000..7a7abc2ae7 --- /dev/null +++ b/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java @@ -0,0 +1,236 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.confluent.serdes; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.RestService; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.confluent.kafka.serializers.KafkaAvroSerializer; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario where applications use a mix of Confluent and Apicurio Registry serdes classes. This + * example uses the Confluent serializer for the producer and the Apicurio Registry deserializer + * class for the consumer. + * + *
    + *
  1. Configuring a Confluent Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class ConfluentSerdesExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String CCOMPAT_API_URL = "http://localhost:8080/apis/ccompat/v6"; + + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = ConfluentSerdesExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Timestamp\",\"type\":\"long\"}]}"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + ConfluentSerdesExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + Schema schema = new Schema.Parser().parse(SCHEMA); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Timestamp", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Timestamp"))); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + + configureSecurityIfPresent(props); + + RestService restService = new RestService(CCOMPAT_API_URL); + final Map restServiceProperties = new HashMap<>(); + //If auth is enabled using the env var, we try to configure it + if (Boolean.parseBoolean(System.getenv("CONFIGURE_AUTH"))) { + restServiceProperties.put("basic.auth.credentials.source", "USER_INFO"); + restServiceProperties.put("schema.registry.basic.auth.user.info", String.format("%s:%s", System.getenv(SerdeConfig.AUTH_CLIENT_ID), System.getenv(SerdeConfig.AUTH_CLIENT_SECRET))); + } + + CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restService, 100, restServiceProperties); + + Map properties = new HashMap<>(); + + // Configure Service Registry location (Confluent API) + properties.put("schema.registry.url", CCOMPAT_API_URL); + properties.put("auto.register.schemas", "true"); + // Map the topic name to the artifactId in the registry + properties.put("value.subject.name.strategy", "io.confluent.kafka.serializers.subject.TopicRecordNameStrategy"); + + // Use the Confluent provided Kafka Serializer for Avro + KafkaAvroSerializer valueSerializer = new KafkaAvroSerializer(schemaRegistryClient, properties); + StringSerializer keySerializer = new StringSerializer(); + + // Create the Kafka producer + Producer producer = new KafkaProducer(props, keySerializer, valueSerializer); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Enable "Confluent Compatible API" mode in the Apicurio Registry deserializer + props.putIfAbsent(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, Boolean.TRUE); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + +} diff --git a/examples/custom-resolver/pom.xml b/examples/custom-resolver/pom.xml new file mode 100644 index 0000000000..2bf8ff2d84 --- /dev/null +++ b/examples/custom-resolver/pom.xml @@ -0,0 +1,33 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-custom-resolver + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + diff --git a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/Config.java b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/Config.java new file mode 100644 index 0000000000..06c509de6d --- /dev/null +++ b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/Config.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.custom.resolver; + +/** + * @author eric.wittmann@gmail.com + */ +public class Config { + + public static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + public static final String SERVERS = "localhost:9092"; + public static final String TOPIC_NAME = CustomSchemaResolverExample.class.getSimpleName(); + public static final String SUBJECT_NAME = "Greeting"; + public static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + +} diff --git a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java new file mode 100644 index 0000000000..660b34af0a --- /dev/null +++ b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.custom.resolver; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.avro.Schema; +import org.apache.kafka.common.header.Headers; + +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.AbstractSchemaResolver; +import io.apicurio.registry.serde.ParsedSchema; +import io.apicurio.registry.serde.SchemaLookupResult; +import io.apicurio.registry.serde.SchemaParser; +import io.apicurio.registry.serde.avro.AvroSchemaUtils; +import io.apicurio.registry.serde.strategy.ArtifactReference; +import io.apicurio.registry.types.ArtifactType; + +/** + * A custom schema resolver that simply uses the Avro schema found in the {@link Config} + * class - and ensures that the schema exists in the registry (so that the deserializer + * is guaranteed to be able to retrieve the exact schema used). + * @author eric.wittmann@gmail.com + */ +public class CustomSchemaResolver extends AbstractSchemaResolver { + + protected final Map> schemaLookupCacheByContent = new ConcurrentHashMap<>(); + + /** + * @see io.apicurio.registry.serde.SchemaResolver#configure(java.util.Map, boolean, io.apicurio.registry.serde.SchemaParser) + */ + @Override + public void configure(Map configs, boolean isKey, SchemaParser schemaMapper) { + super.configure(configs, isKey, schemaMapper); + } + + /** + * @see io.apicurio.registry.serde.SchemaResolver#resolveSchema(java.lang.String, org.apache.kafka.common.header.Headers, java.lang.Object, io.apicurio.registry.serde.ParsedSchema) + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Override + public SchemaLookupResult resolveSchema(String topic, Headers headers, D data, ParsedSchema parsedSchema) { + System.out.println("[CustomSchemaResolver] Resolving a schema for topic: " + topic); + String schema = Config.SCHEMA; + + return schemaLookupCacheByContent.computeIfAbsent(schema, (schemaData) -> { + String groupId = "default"; + String artifactId = topic + "-value"; + Schema schemaObj = AvroSchemaUtils.parse(schema); + + ByteArrayInputStream schemaContent = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); + // Ensure the schema exists in the schema registry. + ArtifactMetaData metaData = client.createArtifact(groupId, artifactId, ArtifactType.AVRO, IfExists.RETURN_OR_UPDATE, schemaContent); + // Note, we could be caching the globalId here rather than hit the registry every time. + + SchemaLookupResult result = SchemaLookupResult.builder() + .groupId(groupId) + .artifactId(artifactId) + .version(String.valueOf(metaData.getVersion())) + .globalId(metaData.getGlobalId()) + .schema(schemaObj) + .rawSchema(schema.getBytes(StandardCharsets.UTF_8)) + .build(); + + // Also update the schemaCacheByGlobalId - useful if this resolver is used by both + // the serializer and deserializer in the same Java application. + return schemaCache.getByGlobalId(metaData.getGlobalId(), (id) -> result); + }); + } + + /** + * @see io.apicurio.registry.serde.SchemaResolver#resolveSchemaByArtifactReference(io.apicurio.registry.serde.strategy.ArtifactReference) + */ + @Override + public SchemaLookupResult resolveSchemaByArtifactReference(ArtifactReference reference) { + throw new UnsupportedOperationException("resolveSchemaByArtifactReference() is not supported by this implementation."); + } + + /** + * @see io.apicurio.registry.serde.SchemaResolver#resolveSchemaByGlobalId(long) + */ + @Override + public SchemaLookupResult resolveSchemaByGlobalId(long globalId) { + return super.resolveSchemaByGlobalId(globalId); + } + +} diff --git a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java new file mode 100644 index 0000000000..00b4fbb142 --- /dev/null +++ b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java @@ -0,0 +1,206 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.custom.resolver; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Register the Avro schema in the registry using a custom Global Id Strategy
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • + *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • + *
+ * + * @author eric.wittmann@gmail.com + * @author carles.arnal@redhat.com + */ +public class CustomSchemaResolverExample { + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + CustomSchemaResolverExample.class.getSimpleName()); + String topicName = Config.TOPIC_NAME; + String subjectName = Config.SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + Schema schema = new Schema.Parser().parse(Config.SCHEMA); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + Config.TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); + // Use our custom resolver here. + props.putIfAbsent(SerdeConfig.SCHEMA_RESOLVER, CustomSchemaResolver.class.getName()); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + Config.TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/custom-strategy/pom.xml b/examples/custom-strategy/pom.xml new file mode 100644 index 0000000000..3c4cbc33d1 --- /dev/null +++ b/examples/custom-strategy/pom.xml @@ -0,0 +1,33 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-custom-strategy + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + diff --git a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/Config.java b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/Config.java new file mode 100644 index 0000000000..3747778982 --- /dev/null +++ b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/Config.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.custom.strategy; + +/** + * @author eric.wittmann@gmail.com + */ +public class Config { + + public static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + public static final String SERVERS = "localhost:9092"; + public static final String TOPIC_NAME = CustomStrategyExample.class.getSimpleName(); + public static final String SUBJECT_NAME = "Greeting"; + public static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + +} diff --git a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java new file mode 100644 index 0000000000..52ab1a95ea --- /dev/null +++ b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java @@ -0,0 +1,20 @@ +package io.apicurio.registry.examples.custom.strategy; + +import io.apicurio.registry.serde.strategy.ArtifactReference; +import io.apicurio.registry.serde.strategy.ArtifactResolverStrategy; + +public class CustomArtifactResolverStrategy implements ArtifactResolverStrategy { + + @Override + public ArtifactReference artifactReference(String topic, boolean isKey, Object schema) { + return ArtifactReference.builder() + .artifactId("my-artifact-" + topic + (isKey ? "-key" : "-value")) + .build(); + } + + @Override + public boolean loadSchema() { + return false; + } + +} diff --git a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java new file mode 100644 index 0000000000..b2c02573cf --- /dev/null +++ b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java @@ -0,0 +1,227 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.custom.strategy; + +import java.io.ByteArrayInputStream; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.types.ArtifactType; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Register the Avro schema in the registry using a custom Global Id Strategy
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • + *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • + *
+ * + * @author eric.wittmann@gmail.com + * @author carles.arnal@redhat.com + */ +public class CustomStrategyExample { + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + CustomStrategyExample.class.getSimpleName()); + String topicName = Config.TOPIC_NAME; + String subjectName = Config.SUBJECT_NAME; + + RegistryClient client = createRegistryClient(Config.REGISTRY_URL); + String artifactId = "my-artifact-" + topicName + "-value"; + client.createArtifact(null, artifactId, ArtifactType.AVRO, IfExists.RETURN, new ByteArrayInputStream(Config.SCHEMA.getBytes())); + System.out.println("Created artifact " + artifactId); + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + Schema schema = new Schema.Parser().parse(Config.SCHEMA); + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + Config.TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); + props.putIfAbsent(SerdeConfig.FIND_LATEST_ARTIFACT, true); + // Use our custom artifact strategy here. + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, CustomArtifactResolverStrategy.class.getName()); + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + Config.TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + + public static RegistryClient createRegistryClient(String registryUrl) { + //TODO old version of oidcauth class available because of serdes? + // final String tokenEndpoint = System.getenv("AUTH_TOKEN_ENDPOINT"); + // if (tokenEndpoint != null) { + // final String authClient = System.getenv("AUTH_CLIENT_ID"); + // final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); + // return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), new OidcAuth(tokenEndpoint, authClient, authSecret, Optional.empty())); + // } else { + return RegistryClientFactory.create(registryUrl); + // } + } +} diff --git a/examples/debezium-openshift/README.md b/examples/debezium-openshift/README.md new file mode 100644 index 0000000000..983cadd83a --- /dev/null +++ b/examples/debezium-openshift/README.md @@ -0,0 +1,159 @@ +# Debezium and Apicurio Registry on OpenShift + +This example contains a simple application that uses Debezium with Apicurio Registry, deployed on OpenShift. + +## Prerequisites + +1. Prepare or provision an OpenShift cluster. + +2. Install the following operators: + +- AMQ Streams (tested on `2.5.0-0` / Kafka `3.4`) +- Red Hat Integration - Service Registry Operator (tested on `2.2.2`) + +3. Configure `oc`: + +```shell +oc login #... +export NAMESPACE="example" +oc new-project $NAMESPACE +``` + +4. Prepare an image repository for example app images, and configure: + +```shell +export APP_IMAGE_GROUP="quay.io/myorg" +``` + +which will result in `quay.io/myorg/apicurio-registry-examples-debezium-openshift:latest` image name. + +5. Prepare an image repository for customized Kafka Connect images, and configure: + +```shell +export KAFKA_CONNECT_IMAGE="$APP_IMAGE_GROUP/kafka-connect-example:latest" +``` + +which will result in `quay.io/myorg/kafka-connect-example:latest` image name. + +6. Create a pull secret for the customized Kafka Connect image repository. This example command creates it from +your local docker config file: + +```shell +oc create secret generic example-components-pull-secret \ + --from-file=.dockerconfigjson=$HOME/.docker/config.json \ + --type=kubernetes.io/dockerconfigjson +``` + +## Deploy example components: MySQL, Kafka, and Debezium Kafka connector + +Review the *example-components.yaml* template, then apply it: + +```shell +oc process -f example-components.yaml \ + -p NAMESPACE=$NAMESPACE \ + -p KAFKA_CONNECT_IMAGE=$KAFKA_CONNECT_IMAGE \ + | oc apply -f - +``` + +Wait for all components to deploy (some pods may be failing for a short time). + +After some time, you should be able to see the topics created by Debezium, for example: + +```shell +oc get --no-headers -o custom-columns=":metadata.name" kafkatopic +``` + +``` +connect-cluster-configs +connect-cluster-offsets +connect-cluster-status +consumer-offsets---84e7a678d08f4bd226872e5cdd4eb527fadc1c6a +example +example.inventory.addresses +example.inventory.customers +example.inventory.orders +example.inventory.products +example.inventory.products-on-hand---406eef91b4bed15190ce4cbe31cee9b5db4c0133 +kafkasql-journal +schema-changes.inventory +strimzi-store-topic---effb8e3e057afce1ecf67c3f5d8e4e3ff177fc55 +strimzi-topic-operator-kstreams-topic-store-changelog---b75e702040b99be8a9263134de3507fc0cc4017b +``` + +Apicurio Registry should contain the AVRO schemas registered by Debezium. Get and configure Apicurio Registry URL by +running `oc route`: + +```shell +export REGISTRY_URL="http://example-components-registry.example.router-default.apps.mycluster.com" +``` + +Then, you can list the schemas using the following example command: + +```shell +curl -s "$REGISTRY_URL/apis/registry/v2/search/artifacts?limit=50&order=asc&orderby=name" \ + | jq -r ".artifacts[] | .id" \ + | sort +``` + +``` +event.block +example.inventory.addresses-key +example.inventory.addresses-value +example.inventory.addresses.Value +example.inventory.customers-key +example.inventory.customers-value +example.inventory.customers.Value +example.inventory.orders-key +example.inventory.orders-value +example.inventory.orders.Value +example.inventory.products-key +example.inventory.products_on_hand-key +example.inventory.products_on_hand-value +example.inventory.products_on_hand.Value +example.inventory.products-value +example.inventory.products.Value +example-key +example-value +io.debezium.connector.mysql.Source +io.debezium.connector.schema.Change +io.debezium.connector.schema.Column +io.debezium.connector.schema.Table +``` + +From the Apicurio Registry URL, we can extract the `INGRESS_ROUTER_CANONICAL_HOSTNAME` variable that will be used later: + +```shell +export INGRESS_ROUTER_CANONICAL_HOSTNAME="router-default.apps.mycluster.com" +``` + +## Build the example application + +```shell +mvn clean install \ + -Dregistry.url="$REGISTRY_URL" \ + -Dquarkus.container-image.build=true \ + -Dquarkus.container-image.group=$APP_IMAGE_GROUP \ + -Dquarkus.container-image.tag=latest +``` + +Push the application image: + +```shell +docker push $APP_IMAGE_GROUP/apicurio-registry-examples-debezium-openshift:latest +``` + +Apply the application template: + +```shell +oc process -f example-app.yaml \ + -p NAMESPACE=$NAMESPACE \ + -p APP_IMAGE_GROUP=$APP_IMAGE_GROUP \ + -p INGRESS_ROUTER_CANONICAL_HOSTNAME=$INGRESS_ROUTER_CANONICAL_HOSTNAME \ + | oc apply -f - +``` + +## Run the example: + +```shell +curl -v -X POST -d 'start' http://example-app.$NAMESPACE.$INGRESS_ROUTER_CANONICAL_HOSTNAME/api/command +``` diff --git a/examples/debezium-openshift/example-app.yaml b/examples/debezium-openshift/example-app.yaml new file mode 100644 index 0000000000..61f60b534e --- /dev/null +++ b/examples/debezium-openshift/example-app.yaml @@ -0,0 +1,132 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + name: example-app-template +labels: + template: example-app-template +objects: + + + - kind: Deployment + apiVersion: apps/v1 + metadata: + name: example-app + namespace: ${NAMESPACE} + labels: + app: example-app + spec: + replicas: 1 + selector: + matchLabels: + app: example-app + template: + metadata: + labels: + app: example-app + spec: + containers: + - resources: + limits: + cpu: 1000m + memory: 1024Mi + requests: + cpu: 500m + memory: 512Mi + terminationMessagePath: /dev/termination-log + name: apicurio-registry-examples-debezium-openshift + env: + - name: MYSQL_USER + value: ${MYSQL_USER} + - name: MYSQL_PASSWORD + value: ${MYSQL_PASSWORD} + - name: MYSQL_JDBC_URL + value: jdbc:mysql://example-components-mysql.${NAMESPACE}.svc.cluster.local/${MYSQL_DATABASE} + - name: KAFKA_BOOTSTRAP_SERVERS + value: example-components-kafka-kafka-bootstrap.${NAMESPACE}.svc:9092 + - name: REGISTRY_URL + value: http://example-components-registry-service.${NAMESPACE}.svc.cluster.local:8080/apis/registry/v2 + ports: + - containerPort: 8080 + protocol: TCP + imagePullPolicy: Always + terminationMessagePolicy: File + image: ${APP_IMAGE_GROUP}/apicurio-registry-examples-debezium-openshift:latest + securityContext: + allowPrivilegeEscalation: false + runAsNonRoot: true + capabilities: + drop: + - ALL + seccompProfile: + type: RuntimeDefault + restartPolicy: Always + terminationGracePeriodSeconds: 30 + dnsPolicy: ClusterFirst + securityContext: { } + schedulerName: default-scheduler + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 25% + maxSurge: 25% + revisionHistoryLimit: 10 + progressDeadlineSeconds: 600 + + + - kind: Service + apiVersion: v1 + metadata: + name: example-app-service + namespace: ${NAMESPACE} + labels: + app: example-app + spec: + selector: + app: example-app + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 + + + - kind: Ingress + apiVersion: networking.k8s.io/v1 + metadata: + name: example-app + namespace: ${NAMESPACE} + spec: + rules: + - host: >- + example-app.${NAMESPACE}.${INGRESS_ROUTER_CANONICAL_HOSTNAME} + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: example-app-service + port: + number: 8080 + + +parameters: + - name: NAMESPACE + required: true + - name: MYSQL_DATABASE + value: inventory + required: true + - name: MYSQL_USER + value: mysqluser + required: true + - name: MYSQL_PASSWORD + value: mysqlpassword + required: true + - name: APP_IMAGE_GROUP + required: true + - name: INGRESS_ROUTER_CANONICAL_HOSTNAME + # Find out from a status block of any Ingress or Route resource, e.g.: + # status: + # loadBalancer: + # ingress: + # - hostname: router-default.apps.mycluster.com + required: true diff --git a/examples/debezium-openshift/example-components.yaml b/examples/debezium-openshift/example-components.yaml new file mode 100644 index 0000000000..cdd069a033 --- /dev/null +++ b/examples/debezium-openshift/example-components.yaml @@ -0,0 +1,361 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + name: example-components-template +labels: + template: example-components-template +objects: + + + # MySQL + + + - kind: ConfigMap + apiVersion: v1 + metadata: + name: example-components-mysql-init + namespace: ${NAMESPACE} + immutable: false + data: + init.sql: | + USE inventory; + + # Create and populate our products using a single insert with many rows + + CREATE TABLE products ( + id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description VARCHAR(512), + weight FLOAT + ); + + ALTER TABLE products AUTO_INCREMENT = 101; + + INSERT INTO products + VALUES (default,"scooter","Small 2-wheel scooter",3.14), + (default,"car battery","12V car battery",8.1), + (default,"12-pack drill bits","12-pack of drill bits with sizes ranging from #40 to #3",0.8), + (default,"hammer","12oz carpenter's hammer",0.75), + (default,"hammer","14oz carpenter's hammer",0.875), + (default,"hammer","16oz carpenter's hammer",1.0), + (default,"rocks","box of assorted rocks",5.3), + (default,"jacket","water resistant black wind breaker",0.1), + (default,"spare tire","24 inch spare tire",22.2); + + # Create and populate the products on hand using multiple inserts + + CREATE TABLE products_on_hand ( + product_id INTEGER NOT NULL PRIMARY KEY, + quantity INTEGER NOT NULL, + FOREIGN KEY (product_id) REFERENCES products(id) + ); + + INSERT INTO products_on_hand VALUES (101,3); + INSERT INTO products_on_hand VALUES (102,8); + INSERT INTO products_on_hand VALUES (103,18); + INSERT INTO products_on_hand VALUES (104,4); + INSERT INTO products_on_hand VALUES (105,5); + INSERT INTO products_on_hand VALUES (106,0); + INSERT INTO products_on_hand VALUES (107,44); + INSERT INTO products_on_hand VALUES (108,2); + INSERT INTO products_on_hand VALUES (109,5); + + # Create some customers ... + + CREATE TABLE customers ( + id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, + first_name VARCHAR(255) NOT NULL, + last_name VARCHAR(255) NOT NULL, + email VARCHAR(255) NOT NULL UNIQUE KEY + ) AUTO_INCREMENT=1001; + + INSERT INTO customers + VALUES (default,"Sally","Thomas","sally.thomas@acme.com"), + (default,"George","Bailey","gbailey@foobar.com"), + (default,"Edward","Walker","ed@walker.com"), + (default,"Anne","Kretchmar","annek@noanswer.org"); + + # Create some fake addresses + + CREATE TABLE addresses ( + id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, + customer_id INTEGER NOT NULL, + street VARCHAR(255) NOT NULL, + city VARCHAR(255) NOT NULL, + state VARCHAR(255) NOT NULL, + zip VARCHAR(255) NOT NULL, + type enum('SHIPPING','BILLING','LIVING') NOT NULL, + FOREIGN KEY address_customer (customer_id) REFERENCES customers(id) + ) AUTO_INCREMENT = 10; + + INSERT INTO addresses + + VALUES (default,1001,'3183 Moore Avenue','Euless','Texas','76036','SHIPPING'), + (default,1001,'2389 Hidden Valley Road','Harrisburg','Pennsylvania','17116','BILLING'), + (default,1002,'281 Riverside Drive','Augusta','Georgia','30901','BILLING'), + (default,1003,'3787 Brownton Road','Columbus','Mississippi','39701','SHIPPING'), + (default,1003,'2458 Lost Creek Road','Bethlehem','Pennsylvania','18018','SHIPPING'), + (default,1003,'4800 Simpson Square','Hillsdale','Oklahoma','73743','BILLING'), + (default,1004,'1289 University Hill Road','Canehill','Arkansas','72717','LIVING'); + + # Create some very simple orders + + CREATE TABLE orders ( + order_number INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, + order_date DATE NOT NULL, + purchaser INTEGER NOT NULL, + quantity INTEGER NOT NULL, + product_id INTEGER NOT NULL, + FOREIGN KEY order_customer (purchaser) REFERENCES customers(id), + FOREIGN KEY ordered_product (product_id) REFERENCES products(id) + ) AUTO_INCREMENT = 10001; + + INSERT INTO orders + VALUES (default, '2016-01-16', 1001, 1, 102), + (default, '2016-01-17', 1002, 2, 105), + (default, '2016-02-19', 1002, 2, 106), + (default, '2016-02-21', 1003, 1, 107); + + - kind: Deployment + apiVersion: apps/v1 + metadata: + name: example-components-mysql + namespace: ${NAMESPACE} + labels: + app: example-components-mysql + spec: + replicas: 1 + selector: + matchLabels: + app: example-components-mysql + template: + metadata: + labels: + app: example-components-mysql + spec: + containers: + - resources: + limits: + cpu: 1000m + memory: 1024Mi + requests: + cpu: 500m + memory: 512Mi + terminationMessagePath: /dev/termination-log + name: mysql + env: + - name: MYSQL_ROOT_PASSWORD + value: ${MYSQL_ROOT_PASSWORD} + - name: MYSQL_USER + value: ${MYSQL_USER} + - name: MYSQL_PASSWORD + value: ${MYSQL_PASSWORD} + - name: MYSQL_DATABASE + value: ${MYSQL_DATABASE} + ports: + - containerPort: 3306 + protocol: TCP + imagePullPolicy: IfNotPresent + terminationMessagePolicy: File + image: mysql:latest + securityContext: + allowPrivilegeEscalation: false + runAsNonRoot: true + capabilities: + drop: + - ALL + seccompProfile: + type: RuntimeDefault + livenessProbe: + exec: + command: [ "mysqladmin", "--user=${MYSQL_ROOT_USER}", "--password=${MYSQL_ROOT_PASSWORD}", "ping" ] + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + volumeMounts: + - name: mysql-init-volume + mountPath: /docker-entrypoint-initdb.d + volumes: + - name: mysql-init-volume + configMap: + name: example-components-mysql-init + restartPolicy: Always + terminationGracePeriodSeconds: 30 + dnsPolicy: ClusterFirst + securityContext: { } + schedulerName: default-scheduler + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 25% + maxSurge: 25% + revisionHistoryLimit: 10 + progressDeadlineSeconds: 600 + + + - kind: Service + apiVersion: v1 + metadata: + name: example-components-mysql + namespace: ${NAMESPACE} + labels: + app: example-components-mysql + spec: + selector: + app: example-components-mysql + ports: + - protocol: TCP + port: 3306 + targetPort: 3306 + + + # Kafka + + + - kind: Kafka + apiVersion: kafka.strimzi.io/v1beta2 + metadata: + name: example-components-kafka + namespace: ${NAMESPACE} + spec: + entityOperator: + topicOperator: { } # Required + userOperator: { } # Required + kafka: + config: + offsets.topic.replication.factor: 1 + transaction.state.log.min.isr: 1 + transaction.state.log.replication.factor: 1 + listeners: + - name: plain + port: 9092 + tls: false + type: internal + - name: tls + port: 9093 + tls: true + type: internal + replicas: 1 + storage: + type: ephemeral + version: 3.4.0 + zookeeper: + replicas: 1 + storage: + type: ephemeral + + + - apiVersion: kafka.strimzi.io/v1beta2 + kind: KafkaConnect + metadata: + annotations: + strimzi.io/use-connector-resources: "true" + name: example-components-kafka-connect + namespace: ${NAMESPACE} + spec: + bootstrapServers: example-components-kafka-kafka-bootstrap.${NAMESPACE}.svc:9093 + build: + output: + image: ${KAFKA_CONNECT_IMAGE} + type: docker + pushSecret: example-components-pull-secret + plugins: + - name: debezium-connector-mysql + artifacts: + - type: zip + url: https://repo1.maven.org/maven2/io/debezium/debezium-connector-mysql/2.3.3.Final/debezium-connector-mysql-2.3.3.Final-plugin.zip + - type: zip + url: https://repo1.maven.org/maven2/io/debezium/debezium-scripting/2.3.3.Final/debezium-scripting-2.3.3.Final.zip + - type: jar + url: https://repo1.maven.org/maven2/org/apache/groovy/groovy/4.0.9/groovy-4.0.9.jar + - type: jar + url: https://repo1.maven.org/maven2/org/apache/groovy/groovy-json/4.0.9/groovy-json-4.0.9.jar + - type: jar + url: https://repo1.maven.org/maven2/org/apache/groovy/groovy-jsr223/4.0.9/groovy-jsr223-4.0.9.jar + - type: zip + url: https://repo1.maven.org/maven2/io/apicurio/apicurio-registry-distro-connect-converter/2.4.4.Final/apicurio-registry-distro-connect-converter-2.4.4.Final.zip + config: + config.storage.replication.factor: 1 + offset.storage.replication.factor: 1 + status.storage.replication.factor: 1 + replicas: 1 + tls: + trustedCertificates: + - certificate: ca.crt + secretName: example-components-kafka-cluster-ca-cert + + + - apiVersion: kafka.strimzi.io/v1beta2 + kind: KafkaConnector + metadata: + labels: + strimzi.io/cluster: example-components-kafka-connect + name: example-components-kafka-connector + namespace: ${NAMESPACE} + spec: + class: io.debezium.connector.mysql.MySqlConnector + config: + + value.converter: io.apicurio.registry.utils.converter.AvroConverter + value.converter.apicurio.registry.auto-register: true + value.converter.apicurio.registry.find-latest: true + value.converter.apicurio.registry.url: http://example-components-registry-service.${NAMESPACE}.svc.cluster.local:8080/apis/registry/v2 + + key.converter: io.apicurio.registry.utils.converter.AvroConverter + key.converter.apicurio.registry.auto-register: true + key.converter.apicurio.registry.find-latest: true + key.converter.apicurio.registry.url: http://example-components-registry-service.${NAMESPACE}.svc.cluster.local:8080/apis/registry/v2 + + database.server.id: 1 + database.hostname: example-components-mysql.${NAMESPACE}.svc.cluster.local + database.port: 3306 + database.user: ${MYSQL_ROOT_USER} + database.password: ${MYSQL_ROOT_PASSWORD} + database.dbname: inventory + database.cdcschema: ASNCDC + + schema.name.adjustment.mode: avro + topic.prefix: example + + schema.history.internal.kafka.topic: schema-changes.inventory + schema.history.internal.kafka.bootstrap.servers: example-components-kafka-kafka-bootstrap.${NAMESPACE}.svc:9092 # TODO TLS? + tasksMax: 1 + + + # Apicurio Registry + + + - kind: ApicurioRegistry + apiVersion: registry.apicur.io/v1 + metadata: + name: example-components-registry + namespace: ${NAMESPACE} + spec: + configuration: + persistence: kafkasql + kafkasql: + bootstrapServers: example-components-kafka-kafka-bootstrap.${NAMESPACE}.svc:9092 + logLevel: DEBUG + registryLogLevel: DEBUG + + +parameters: + - name: NAMESPACE + required: true + - name: MYSQL_ROOT_USER + value: root # From MySQL image + required: true + - name: MYSQL_ROOT_PASSWORD + value: debezium + required: true + - name: MYSQL_USER + value: mysqluser + required: true + - name: MYSQL_PASSWORD + value: mysqlpassword + required: true + - name: MYSQL_DATABASE + value: inventory + required: true + - name: KAFKA_CONNECT_IMAGE + required: true diff --git a/examples/debezium-openshift/pom.xml b/examples/debezium-openshift/pom.xml new file mode 100644 index 0000000000..a3fbec8baa --- /dev/null +++ b/examples/debezium-openshift/pom.xml @@ -0,0 +1,316 @@ + + + 4.0.0 + + + io.apicurio + apicurio-registry-examples + 2.5.12-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-debezium-openshift + + + 1.18.28 + + + + + + io.quarkus + quarkus-resteasy-jackson + + + com.github.java-json-tools + jackson-coreutils + + + + + + io.quarkus + quarkus-jdbc-mysql + + + + io.quarkus + quarkus-agroal + + + + io.quarkus + quarkus-smallrye-health + + + + io.quarkus + quarkus-scheduler + + + + io.quarkus + quarkus-container-image-jib + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + + org.projectlombok + lombok + ${lombok.version} + + + + + + + + io.quarkus + quarkus-maven-plugin + true + + + + build + generate-code + generate-code-tests + + + + + + maven-compiler-plugin + + + maven-surefire-plugin + + + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + generate-sources + + download + + + + ${registry.url}/apis/registry/v2 + + + + + default + event.block + ${project.build.directory}/resources/block.avsc + true + + + default + io.debezium.connector.mysql.Source + ${project.build.directory}/resources/Source.avsc + true + + + default + io.debezium.connector.schema.Change + ${project.build.directory}/resources/Change.avsc + true + + + default + io.debezium.connector.schema.Column + ${project.build.directory}/resources/Column.avsc + true + + + default + io.debezium.connector.schema.Table + ${project.build.directory}/resources/Table.avsc + true + + + + default + example-key + ${project.build.directory}/resources/SchemaChangeKey.avsc + true + + + default + example-value + ${project.build.directory}/resources/SchemaChangeValue.avsc + true + + + + default + example.inventory.addresses-key + + ${project.build.directory}/resources/addresses/Key.avsc + true + + + default + example.inventory.addresses-value + + ${project.build.directory}/resources/addresses/Envelope.avsc + true + + + default + example.inventory.addresses.Value + + ${project.build.directory}/resources/addresses/Value.avsc + true + + + + default + example.inventory.customers-key + + ${project.build.directory}/resources/customers/Key.avsc + true + + + default + example.inventory.customers-value + + ${project.build.directory}/resources/customers/Envelope.avsc + true + + + default + example.inventory.customers.Value + + ${project.build.directory}/resources/customers/Value.avsc + true + + + + default + example.inventory.orders-key + + ${project.build.directory}/resources/orders/Key.avsc + true + + + default + example.inventory.orders-value + + ${project.build.directory}/resources/orders/Envelope.avsc + true + + + default + example.inventory.orders.Value + + ${project.build.directory}/resources/orders/Value.avsc + true + + + + default + example.inventory.products-key + + ${project.build.directory}/resources/products/Key.avsc + true + + + default + example.inventory.products-value + + ${project.build.directory}/resources/products/Envelope.avsc + true + + + default + example.inventory.products.Value + + ${project.build.directory}/resources/products/Value.avsc + true + + + + default + example.inventory.products_on_hand-key + + ${project.build.directory}/resources/products_on_hand/Key.avsc + true + + + default + example.inventory.products_on_hand-value + + ${project.build.directory}/resources/products_on_hand/Envelope.avsc + true + + + default + example.inventory.products_on_hand.Value + + ${project.build.directory}/resources/products_on_hand/Value.avsc + true + + + + + + + + + + org.apache.avro + avro-maven-plugin + ${version.avro} + + + generate-sources + + schema + + + String + + ${project.basedir}/target/resources/Column.avsc + ${project.basedir}/target/resources/Table.avsc + + ${project.basedir}/target/resources/addresses/Value.avsc + ${project.basedir}/target/resources/customers/Value.avsc + ${project.basedir}/target/resources/orders/Value.avsc + ${project.basedir}/target/resources/products/Value.avsc + ${project.basedir}/target/resources/products_on_hand/Value.avsc + + ${project.basedir}/target/resources/ + ${project.basedir}/target/generated-sources/avro + + + + + + + + + + + diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java new file mode 100644 index 0000000000..d3393a227f --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java @@ -0,0 +1,38 @@ +package io.apicurio.example.debezium; + + +import lombok.Getter; + +import java.util.HashMap; +import java.util.Map; + +/** + * @author Jakub Senko m@jsenko.net + */ +public enum Operation { + + CREATE("c"), + READ("r"), // Used for snapshots, i.e. writes the initial (or incremental) state of database tables to each topic + UPDATE("u"), + DELETE("d"), + TRUNCATE("t"); + + @Getter + private String op; + + Operation(String op) { + this.op = op; + } + + private final static Map CONSTANTS = new HashMap<>(); + + static { + for (Operation c : values()) { + CONSTANTS.put(c.op, c); + } + } + + public static Operation from(String value) { + return CONSTANTS.get(value); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java new file mode 100644 index 0000000000..4c515acd0e --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java @@ -0,0 +1,133 @@ +package io.apicurio.example.debezium.kafka; + +import io.apicurio.example.debezium.Operation; +import io.apicurio.example.debezium.model.*; +import io.quarkus.runtime.StartupEvent; +import org.apache.avro.specific.SpecificRecord; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.event.Observes; +import javax.inject.Inject; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.List; + +/** + * @author Jakub Senko m@jsenko.net + */ +@ApplicationScoped +public class ExampleKafkaConsumer { + + private static final Logger log = LoggerFactory.getLogger(ExampleKafkaConsumer.class); + + @Inject + KafkaFactory kafkaFactory; + + + void onStart(@Observes StartupEvent event) { + + Runnable runner = () -> { + try (KafkaConsumer consumer = kafkaFactory.createKafkaConsumer()) { + + var topics = List.of( + "example.inventory.addresses", + "example.inventory.customers", + "example.inventory.orders", + "example.inventory.products", + "example.inventory.products_on_hand" + ); + var existingTopic = consumer.listTopics().keySet(); + if (!existingTopic.containsAll(topics)) { + throw new IllegalStateException("Some topics are not available. " + + "Expected: " + topics + ", actual: " + existingTopic); + } + + consumer.subscribe(topics); + + while (true) { + try { + var records = consumer.poll(Duration.of(10, ChronoUnit.SECONDS)); + if (records != null && !records.isEmpty()) { + log.info("Consuming {} records:", records.count()); + records.forEach(record -> { + if (record.key() == null) { + log.debug("Discarded an unknown message"); + return; + } + if (record.value() == null) { + log.debug("Discarded a tombstone message"); + return; + } + + log.info("---"); + log.info("Raw key: {}", record.key()); + log.info("Raw key schema: {}", ((SpecificRecord) record.key()).getSchema()); + log.info("Raw value: {}", record.value()); + log.info("Raw value schema: {}", ((SpecificRecord) record.value()).getSchema()); + + switch (record.topic()) { + case "example.inventory.addresses": { + var key = (example.inventory.addresses.Key) record.key(); + var value = (example.inventory.addresses.Envelope) record.value(); + log.info("Operation {} on Address", Operation.from(value.getOp())); + log.info("ID: {}", key.getId()); + log.info("Before: {}", Address.from(value.getBefore())); + log.info("After: {}", Address.from(value.getAfter())); + break; + } + case "example.inventory.customers": { + var key = (example.inventory.customers.Key) record.key(); + var value = (example.inventory.customers.Envelope) record.value(); + log.info("Operation {} on Customer", Operation.from(value.getOp())); + log.info("ID: {}", key.getId()); + log.info("Before: {}", Customer.from(value.getBefore())); + log.info("After: {}", Customer.from(value.getAfter())); + break; + } + case "example.inventory.orders": { + var key = (example.inventory.orders.Key) record.key(); + var value = (example.inventory.orders.Envelope) record.value(); + log.info("Operation {} on Order", Operation.from(value.getOp())); + log.info("Order number: {}", key.getOrderNumber()); + log.info("Before: {}", Order.from(value.getBefore())); + log.info("After: {}", Order.from(value.getAfter())); + break; + } + case "example.inventory.products": { + var key = (example.inventory.products.Key) record.key(); + var value = (example.inventory.products.Envelope) record.value(); + log.info("Operation {} on Product", Operation.from(value.getOp())); + log.info("ID: {}", key.getId()); + log.info("Before: {}", Product.from(value.getBefore())); + log.info("After: {}", Product.from(value.getAfter())); + break; + } + case "example.inventory.products_on_hand": { + var key = (example.inventory.products_on_hand.Key) record.key(); + var value = (example.inventory.products_on_hand.Envelope) record.value(); + log.info("Operation {} on ProductOnHand", Operation.from(value.getOp())); + log.info("Product ID: {}", key.getProductId()); + log.info("Before: {}", ProductOnHand.from(value.getBefore())); + log.info("After: {}", ProductOnHand.from(value.getAfter())); + break; + } + default: + throw new IllegalStateException("Received a message from unexpected topic: " + record.topic()); + } + }); + } + } catch (Exception ex) { + log.error("Error reading records from Kafka", ex); + } + } + } + }; + var thread = new Thread(runner); + thread.setDaemon(true); + thread.setName("Kafka Consumer Thread"); + thread.start(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java new file mode 100644 index 0000000000..9e2543db77 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java @@ -0,0 +1,48 @@ +package io.apicurio.example.debezium.kafka; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerdeConfig; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.enterprise.context.ApplicationScoped; +import java.util.Properties; + +/** + * @author Jakub Senko m@jsenko.net + */ +@ApplicationScoped +public class KafkaFactory { + + private static final Logger log = LoggerFactory.getLogger(KafkaFactory.class); + + @ConfigProperty(name = "kafka.bootstrap.servers") + String bootstrapServers; + + @ConfigProperty(name = "registry.url") + String registryUrl; + + public KafkaConsumer createKafkaConsumer() { + + Properties props = new Properties(); + + props.putIfAbsent(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "consumer-example-debezium-openshift"); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + log.debug("Registry URL: {}", registryUrl); + props.putIfAbsent(SerdeConfig.REGISTRY_URL, registryUrl); + // Deserialize into a specific class instead of GenericRecord + props.putIfAbsent(AvroKafkaSerdeConfig.USE_SPECIFIC_AVRO_READER, true); + + return new KafkaConsumer<>(props); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java new file mode 100644 index 0000000000..96a100d898 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java @@ -0,0 +1,44 @@ +package io.apicurio.example.debezium.model; + +import example.inventory.addresses.Value; +import lombok.*; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Address { + + private Integer id; + + private Integer customerId; + + private String street; + + private String city; + + private String state; + + private String zip; + + private String type; + + public static Address from(Value value) { + if (value == null) { + return null; + } + return Address.builder() + .id(value.getId()) + .customerId(value.getCustomerId()) + .street(value.getStreet()) + .city(value.getCity()) + .state(value.getState()) + .zip(value.getZip()) + .type(value.getType()) + .build(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java new file mode 100644 index 0000000000..15590bd35d --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java @@ -0,0 +1,35 @@ +package io.apicurio.example.debezium.model; + +import example.inventory.customers.Value; +import lombok.*; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Customer { + + private Integer id; + + private String firstName; + + private String lastName; + + private String email; + + public static Customer from(Value value) { + if (value == null) { + return null; + } + return Customer.builder() + .id(value.getId()) + .firstName(value.getFirstName()) + .lastName(value.getLastName()) + .email(value.getEmail()) + .build(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java new file mode 100644 index 0000000000..b0d9b1f166 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java @@ -0,0 +1,41 @@ +package io.apicurio.example.debezium.model; + +import example.inventory.orders.Value; +import lombok.*; + +import java.time.Duration; +import java.time.Instant; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Order { + + private Integer orderNumber; + + private Instant orderDate; + + private Integer purchaser; + + private Integer quantity; + + private Integer productId; + + public static Order from(Value value) { + if (value == null) { + return null; + } + return Order.builder() + .orderNumber(value.getOrderNumber()) + .orderDate(Instant.EPOCH.plus(Duration.ofDays(value.getOrderDate()))) + .purchaser(value.getPurchaser()) + .quantity(value.getQuantity()) + .productId(value.getProductId()) + .build(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java new file mode 100644 index 0000000000..a48818006b --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java @@ -0,0 +1,35 @@ +package io.apicurio.example.debezium.model; + +import example.inventory.products.Value; +import lombok.*; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Product { + + private Integer id; + + private String name; + + private String description; + + private Float weight; + + public static Product from(Value value) { + if (value == null) { + return null; + } + return Product.builder() + .id(value.getId()) + .name(value.getName()) + .description(value.getDescription()) + .weight(value.getWeight()) + .build(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java new file mode 100644 index 0000000000..04429a0828 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java @@ -0,0 +1,29 @@ +package io.apicurio.example.debezium.model; + +import example.inventory.products_on_hand.Value; +import lombok.*; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class ProductOnHand { + + private Integer productId; + + private Integer quantity; + + public static ProductOnHand from(Value value) { + if (value == null) { + return null; + } + return ProductOnHand.builder() + .productId(value.getProductId()) + .quantity(value.getQuantity()) + .build(); + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java new file mode 100644 index 0000000000..2dfeb81494 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java @@ -0,0 +1,37 @@ +package io.apicurio.example.debezium.rest; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; +import javax.ws.rs.POST; +import javax.ws.rs.Path; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Path("/api") +public class Api { + + private static final Logger log = LoggerFactory.getLogger(Api.class); + + @Inject + ExampleRunner runner; + + + @POST + @Path("/command") + public String command(String command) { + log.info("Command received: {}", command); + switch (command) { + case "start": + runner.setEnabled(true); + return "OK"; + case "stop": + runner.setEnabled(false); + return "OK"; + default: + return "Unknown command: " + command; + } + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java new file mode 100644 index 0000000000..1d9ad49f5f --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java @@ -0,0 +1,51 @@ +package io.apicurio.example.debezium.rest; + +import io.apicurio.example.debezium.model.Product; +import io.apicurio.example.debezium.sql.Database; +import io.quarkus.scheduler.Scheduled; +import lombok.Getter; +import lombok.Setter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import java.util.Random; +import java.util.UUID; + +import static io.quarkus.scheduler.Scheduled.ConcurrentExecution.SKIP; + +@ApplicationScoped +public class ExampleRunner { + + private static final Random RANDOM = new Random(); + + private static final Logger log = LoggerFactory.getLogger(ExampleRunner.class); + + + @Getter + @Setter + private boolean isEnabled; + + @Inject + Database database; + + + @Scheduled(every = "5s", concurrentExecution = SKIP) + public void run() { + if (isEnabled) { + var product = Product.builder() + .name("name-" + UUID.randomUUID()) + .description("description-" + UUID.randomUUID()) + .weight(RANDOM.nextFloat() * 100 + 1) + .build(); + log.info("Inserting: {}", product); + product.setId(database.insertProduct(product)); + product.setName("updated-" + product.getName()); + log.info("Updating: {}", product); + database.updateProduct(product); + log.info("Deleting: {}", product); + database.deleteProduct(product); + } + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java new file mode 100644 index 0000000000..ffff218169 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java @@ -0,0 +1,65 @@ +package io.apicurio.example.debezium.sql; + +import io.agroal.api.AgroalDataSource; +import io.apicurio.example.debezium.model.Product; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import java.sql.*; +import java.util.List; + +/** + * @author Jakub Senko m@jsenko.net + */ +@ApplicationScoped +public class Database { + + @Inject + AgroalDataSource dataSource; + + + public int insertProduct(Product product) { + return executeUpdate("INSERT INTO products VALUES (default,?,?,?)", List.of( + new SqlParam(0, product.getName(), SqlParamType.STRING), + new SqlParam(1, product.getDescription(), SqlParamType.STRING), + new SqlParam(2, product.getWeight(), SqlParamType.FLOAT) + )); + } + + + public void updateProduct(Product product) { + executeUpdate("UPDATE products SET name = ?, description = ?, weight = ? WHERE id = ?", List.of( + new SqlParam(0, product.getName(), SqlParamType.STRING), + new SqlParam(1, product.getDescription(), SqlParamType.STRING), + new SqlParam(2, product.getWeight(), SqlParamType.FLOAT), + new SqlParam(3, product.getId(), SqlParamType.INTEGER) + )); + } + + + public void deleteProduct(Product product) { + executeUpdate("DELETE FROM products WHERE id = ?", List.of( + new SqlParam(0, product.getId(), SqlParamType.INTEGER) + )); + } + + + private int executeUpdate(String sql, List parameters) { + try (Connection connection = dataSource.getConnection()) { + try (PreparedStatement statement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) { + parameters.forEach(p -> { + p.bindTo(statement); + }); + statement.executeUpdate(); + ResultSet rs = statement.getGeneratedKeys(); + if (rs.next()) { + return rs.getInt(1); + } else { + return -1; + } + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java new file mode 100644 index 0000000000..03d0302012 --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java @@ -0,0 +1,98 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.example.debezium.sql; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; + +/** + * @author eric.wittmann@gmail.com + * @author Jakub Senko m@jsenko.net + */ +public class SqlParam { + + private final int position; + + private final Object value; + + private final SqlParamType type; + + + public SqlParam(int position, Object value, SqlParamType type) { + this.position = position; + this.value = value; + this.type = type; + } + + + public void bindTo(PreparedStatement statement) { + int position = this.position + 1; // Convert from sensible position (starts at 0) to JDBC position index (starts at 1) + try { + switch (type) { + case BYTES: + statement.setBytes(position, (byte[]) value); + break; + case DATE: + if (value == null) { + statement.setNull(position, Types.TIMESTAMP); + } else { + Timestamp ts = new Timestamp(((Date) value).getTime()); + statement.setTimestamp(position, ts); + } + break; + case ENUM: + if (value == null) { + statement.setNull(position, Types.VARCHAR); + } else { + statement.setString(position, ((Enum) value).name()); + } + break; + case INTEGER: + if (value == null) { + statement.setNull(position, Types.INTEGER); + } else { + statement.setInt(position, (Integer) value); + } + break; + case LONG: + if (value == null) { + statement.setNull(position, Types.INTEGER); + } else { + statement.setLong(position, (Long) value); + } + break; + case STRING: + statement.setString(position, (String) value); + break; + case FLOAT: + if (value == null) { + statement.setNull(position, Types.FLOAT); + } else { + statement.setFloat(position, (Float) value); + } + break; + default: + throw new RuntimeException("bindTo not supported for SqlParamType: " + type); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParamType.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParamType.java new file mode 100644 index 0000000000..2daa19fc2c --- /dev/null +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParamType.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.example.debezium.sql; + +/** + * @author eric.wittmann@gmail.com + * @author Jakub Senko m@jsenko.net + */ +public enum SqlParamType { + + STRING, INTEGER, LONG, DATE, BYTES, ENUM, FLOAT + +} diff --git a/examples/debezium-openshift/src/main/resources/application.properties b/examples/debezium-openshift/src/main/resources/application.properties new file mode 100644 index 0000000000..d03e5d1bd7 --- /dev/null +++ b/examples/debezium-openshift/src/main/resources/application.properties @@ -0,0 +1,4 @@ +quarkus.datasource.username=${MYSQL_USER} +quarkus.datasource.password=${MYSQL_PASSWORD} + +quarkus.datasource.jdbc.url=${MYSQL_JDBC_URL} diff --git a/examples/event-driven-architecture/README.md b/examples/event-driven-architecture/README.md new file mode 100644 index 0000000000..d693fd2c38 --- /dev/null +++ b/examples/event-driven-architecture/README.md @@ -0,0 +1,100 @@ +# Kafka, ksqldb, Kafka-ui, apicurio-registry and Debezium together + +This tutorial demonstrates how to use [Debezium](https://debezium.io/) to monitor the PostgreSQL database used by Apicurio Registry. As the +data in the database changes, by adding e.g. new schemas, you will see the resulting event streams. + +## Avro serialization + +The [Apicurio Registry](https://github.com/Apicurio/apicurio-registry) open-source project provides several +components that work with Avro: + +- An Avro converter that you can specify in Debezium connector configurations. This converter maps Kafka + Connect schemas to Avro schemas. The converter then uses the Avro schemas to serialize the record keys and + values into Avro’s compact binary form. + +- An API and schema registry that tracks: + + - Avro schemas that are used in Kafka topics + - Where the Avro converter sends the generated Avro schemas + +### Prerequisites + +- Docker and is installed and running. + + This tutorial uses Docker and the Linux container images to run the required services. You should use the + latest version of Docker. For more information, see + the [Docker Engine installation documentation](https://docs.docker.com/engine/installation/). + +## Starting the services + +1. Clone this repository: + + ```bash + git clone https://github.com/Apicurio/apicurio-registry-examples.git + ``` + +1. Change to the following directory: + + ```bash + cd event-driven-architecture + ``` + +1. Start the environment + + ```bash + docker-compose up -d + ``` + +The last command will start the following components: + +- Single node Zookeeper and Kafka cluster +- Single node Kafka Connect cluster +- Apicurio service registry +- PostgreSQL (ready for CDC) +- KsqlDb instance +- Kafka UI + +## Apicurio converters + +Configuring Avro at the Debezium Connector involves specifying the converter and schema registry as a part of +the connectors configuration. The connector configuration file configures the connector but explicitly sets +the (de-)serializers for the connector to use Avro and specifies the location of the Apicurio registry. + +> The container image used in this environment includes all the required libraries to access the connectors and converters. + +The following are the lines required to set the **key** and **value** converters and their respective registry +configuration: + +```json +{ + "value.converter.apicurio.registry.url": "http://schema-registry:8080/apis/registry/v2", + "key.converter.apicurio.registry.url": "http://schema-registry:8080/apis/registry/v2", + "value.converter": "io.apicurio.registry.utils.converter.AvroConverter", + "key.converter.apicurio.registry.auto-register": "true", + "key.converter": "io.apicurio.registry.utils.converter.AvroConverter", + "value.converter.apicurio.registry.as-confluent": "true", + "value.converter.apicurio.registry.use-id": "contentId" +} +``` + +> The compatibility mode allows you to use other providers tooling to deserialize and reuse the schemas in the Apicurio service registry. + +### Create the connector + +Let's create the Debezium connector to start capturing the changes of the database. + +1. Create the connector using the REST API. You can execute this step either by using the curl command below + or by creating the connector from the Kafka UI. + + ```bash + curl -X POST http://localhost:8083/connectors -H 'content-type:application/json' -d @studio-connector.json + ``` + +### Check the data + +The previous step created and started the connector. Now, all the data inserted in the Apicurio Registry database will be captured by Debezium +and sent as events into Kafka. + +## Summary + +By using this example you can test how to start a full even driven architecture, but it's up to you how to use the produced events in e.g. ksqldb to create streams/tables etc. diff --git a/examples/event-driven-architecture/docker-compose.yaml b/examples/event-driven-architecture/docker-compose.yaml new file mode 100644 index 0000000000..0147f3c45c --- /dev/null +++ b/examples/event-driven-architecture/docker-compose.yaml @@ -0,0 +1,131 @@ +--- +version: '2' +volumes: + postgre_apicurio: + +services: + zookeeper: + image: confluentinc/cp-zookeeper:7.2.1 + hostname: zookeeper + container_name: zookeeper + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + + + kafka-connect0: + restart: "unless-stopped" + image: debezium/connect:nightly + depends_on: + - broker + ports: + - "8083:8083" + environment: + GROUP_ID: 2 + BOOTSTRAP_SERVERS: "broker:9092" + CONFIG_STORAGE_TOPIC: dbz_connect_configs + OFFSET_STORAGE_TOPIC: dbz_connect_offsets + ENABLE_APICURIO_CONVERTERS: "true" + KEY_CONVERTER: io.apicurio.registry.utils.converter.AvroConverter + VALUE_CONVERTER: io.apicurio.registry.utils.converter.AvroConverter + CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8080/apis/registry/v2 + CONNECT_KEY_CONVERTER: io.apicurio.registry.utils.converter.AvroConverter + CONNECT_KEY_CONVERTER_APICURIO.REGISTRY_URL: http://schema-registry:8080/apis/registry/v2 + CONNECT_KEY_CONVERTER_APICURIO_REGISTRY_AUTO-REGISTER: "true" + CONNECT_KEY_CONVERTER_APICURIO_REGISTRY_FIND-LATEST: "true" + CONNECT_VALUE_CONVERTER: io.apicurio.registry.utils.converter.AvroConverter + CONNECT_VALUE_CONVERTER_APICURIO_REGISTRY_URL: http://schema-registry:8080/apis/registry/v2 + CONNECT_VALUE_CONVERTER_APICURIO_REGISTRY_AUTO-REGISTER: "true" + CONNECT_VALUE_CONVERTER_APICURIO_REGISTRY_FIND-LATEST: "true" + CONNECT_SCHEMA_NAME_ADJUSTMENT_MODE: avro + + broker: + image: confluentinc/cp-kafka:7.2.1 + hostname: broker + container_name: broker + depends_on: + - zookeeper + ports: + - "29092:29092" + - "9092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:29092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + + + # Schema Registry + schema-registry: + image: apicurio/apicurio-registry-sql:latest-snapshot + restart: always + ports: + - "8080:8080" + environment: + REGISTRY_DATASOURCE_URL: "jdbc:postgresql://apicurio-db/apicuriodb" + REGISTRY_DATASOURCE_USERNAME: "postgres" + REGISTRY_DATASOURCE_PASSWORD: "postgres" + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: divolte-kafka:2181 + + kafka-ui: + container_name: kafka-ui + image: provectuslabs/kafka-ui:master + ports: + - "8081:8080" + depends_on: + - broker + - schema-registry + - kafka-connect0 + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: "broker:9092" + KAFKA_CLUSTERS_0_SCHEMAREGISTRY: "http://schema-registry:8080/apis/ccompat/v6" + KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first + KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: "http://kafka-connect0:8083" + KAFKA_CLUSTERS_0_KSQLDBSERVER: "http://ksqldb-server:8088" + + ksqldb-server: + image: confluentinc/ksqldb-server:0.28.2 + hostname: ksqldb-server + container_name: ksqldb-server + depends_on: + - broker + ports: + - "8088:8088" + environment: + KSQL_LISTENERS: http://0.0.0.0:8088 + KSQL_BOOTSTRAP_SERVERS: broker:9092 + KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true" + KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true" + KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8080/apis/ccompat/v6" + + ksqldb-cli: + image: confluentinc/ksqldb-cli:0.28.2 + container_name: ksqldb-cli + depends_on: + - broker + - ksqldb-server + entrypoint: /bin/sh + tty: true + + apicurio-db: + ports: + - '5432:5432' + image: 'postgres:latest' + environment: + POSTGRES_DB: apicuriodb + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + volumes: + - 'postgre_apicurio:/var/lib/postgresql/data' + command: + - "postgres" + - "-c" + - "wal_level=logical" diff --git a/examples/event-driven-architecture/studio-connector.json b/examples/event-driven-architecture/studio-connector.json new file mode 100644 index 0000000000..7a8e09f231 --- /dev/null +++ b/examples/event-driven-architecture/studio-connector.json @@ -0,0 +1,26 @@ +{ + "connector.class": "io.debezium.connector.postgresql.PostgresConnector", + "database.user": "postgres", + "database.dbname": "apicuriodb", + "tasks.max": "1", + "database.history.kafka.bootstrap.servers": "kafka:29092", + "database.history.kafka.topic": "schema-changes.inventory", + "database.server.name": "apicuriodb", + "schema.include.list": "public", + "value.converter.apicurio.registry.auto-register": "true", + "key.converter.apicurio.registry.as-confluent": "true", + "database.port": "5432", + "plugin.name": "pgoutput", + "topic.prefix": "postgre-changes", + "database.hostname": "apicurio-db", + "database.password": "postgres", + "key.converter.apicurio.registry.use-id": "contentId", + "name": "Test", + "value.converter.apicurio.registry.url": "http://schema-registry:8080/apis/registry/v2", + "key.converter.apicurio.registry.url": "http://schema-registry:8080/apis/registry/v2", + "value.converter": "io.apicurio.registry.utils.converter.AvroConverter", + "key.converter.apicurio.registry.auto-register": "true", + "key.converter": "io.apicurio.registry.utils.converter.AvroConverter", + "value.converter.apicurio.registry.as-confluent": "true", + "value.converter.apicurio.registry.use-id": "contentId" +} \ No newline at end of file diff --git a/examples/jsonschema-validation/pom.xml b/examples/jsonschema-validation/pom.xml new file mode 100644 index 0000000000..c66767f97a --- /dev/null +++ b/examples/jsonschema-validation/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-jsonschema-validation + jar + + + + io.apicurio + apicurio-registry-schema-validation-jsonschema + ${apicurio-registry-schema-validation.version} + + + + diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java new file mode 100644 index 0000000000..ea1a4d5f05 --- /dev/null +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java @@ -0,0 +1,63 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.validation.json; + +import java.util.Date; + +/** + * @author famartin@redhat.com + */ +public class InvalidMessageBean { + + private String message; + private Date time; + + /** + * Constructor. + */ + public InvalidMessageBean() { + } + + /** + * @return the message + */ + public String getMessage() { + return message; + } + + /** + * @param message the message to set + */ + public void setMessage(String message) { + this.message = message; + } + + /** + * @return the time + */ + public Date getTime() { + return time; + } + + /** + * @param time the time to set + */ + public void setTime(Date time) { + this.time = time; + } + +} diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java new file mode 100644 index 0000000000..3f3d6d122d --- /dev/null +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java @@ -0,0 +1,188 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.validation.json; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import io.apicurio.registry.resolver.SchemaResolverConfig; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.types.ArtifactType; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; +import io.apicurio.rest.client.spi.ApicurioHttpClient; +import io.apicurio.rest.client.spi.ApicurioHttpClientFactory; +import io.apicurio.schema.validation.json.JsonMetadata; +import io.apicurio.schema.validation.json.JsonRecord; +import io.apicurio.schema.validation.json.JsonValidationResult; +import io.apicurio.schema.validation.json.JsonValidator; +import java.util.Collections; + +/** + * This example demonstrates how to use Apicurio Registry Schema Validation library for JSON and JSON Schema. + * + * The following aspects are demonstrated: + * + *
    + *
  1. Register the JSON Schema in the registry
  2. + *
  3. Configuring a JsonValidator that will use Apicurio Registry to fetch and cache the schema to use for validation
  4. + *
  5. Successfully validate Java objects using static configuration to always use the same schema for validation
  6. + *
  7. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use for validation
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class JsonSchemaValidationExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + + public static final String SCHEMA = "{" + + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + + " \"required\": [" + + " \"message\"," + + " \"time\"" + + " ]," + + " \"type\": \"object\"," + + " \"properties\": {" + + " \"message\": {" + + " \"description\": \"\"," + + " \"type\": \"string\"" + + " }," + + " \"time\": {" + + " \"description\": \"\"," + + " \"type\": \"number\"" + + " }" + + " }" + + "}"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + JsonSchemaValidationExample.class.getSimpleName()); + + + // Register the schema with the registry (only if it is not already registered) + String artifactId = JsonSchemaValidationExample.class.getSimpleName(); + RegistryClient client = createRegistryClient(REGISTRY_URL); + client.createArtifact("default", artifactId, ArtifactType.JSON, IfExists.RETURN_OR_UPDATE, new ByteArrayInputStream(SCHEMA.getBytes(StandardCharsets.UTF_8))); + + // Create an artifact reference pointing to the artifact we just created + // and pass it to the JsonValidator + ArtifactReference artifactReference = ArtifactReference.builder() + .groupId("default") + .artifactId(artifactId) + .build(); + + // Create the JsonValidator providing an ArtifactReference + // this ArtifactReference will allways be used to lookup the schema in the registry when using "validateByArtifactReference" + JsonValidator validator = createJsonValidator(artifactReference); + + // Test successfull validation + + MessageBean bean = new MessageBean(); + bean.setMessage("Hello world"); + bean.setTime(System.currentTimeMillis()); + + System.out.println(); + System.out.println("Validating valid message bean"); + JsonValidationResult result = validator.validateByArtifactReference(bean); + System.out.println("Validation result: " + result); + System.out.println(); + + // Test validation error + + InvalidMessageBean invalidBean = new InvalidMessageBean(); + invalidBean.setMessage("Hello from invalid bean"); + invalidBean.setTime(new Date()); + + System.out.println("Validating invalid message bean"); + JsonValidationResult invalidBeanResult = validator.validateByArtifactReference(invalidBean); + System.out.println("Validation result: " + invalidBeanResult); + System.out.println(); + + // Test validate method providing a record to dynamically resolve the artifact to fetch from the registry + + JsonRecord record = new JsonRecord(bean, new JsonMetadata(artifactReference)); + + System.out.println("Validating message bean using dynamic ArtifactReference resolution"); + JsonValidationResult recordValidationResult = validator.validate(record); + System.out.println("Validation result: " + recordValidationResult); + System.out.println(); + + } + + /** + * Creates the registry client + */ + private static RegistryClient createRegistryClient(String registryUrl) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + + //Just if security values are present, then we configure them. + if (tokenEndpoint != null) { + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + ApicurioHttpClient httpClient = ApicurioHttpClientFactory.create(tokenEndpoint, new AuthErrorHandler()); + OidcAuth auth = new OidcAuth(httpClient, authClient, authSecret); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), auth); + } else { + return RegistryClientFactory.create(registryUrl); + } + } + + /** + * Creates the json validator + */ + private static JsonValidator createJsonValidator(ArtifactReference artifactReference) { + Map props = new HashMap<>(); + + // Configure Service Registry location + props.putIfAbsent(SchemaResolverConfig.REGISTRY_URL, REGISTRY_URL); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the json validator + JsonValidator validator = new JsonValidator(props, Optional.ofNullable(artifactReference)); + return validator; + } + + private static void configureSecurityIfPresent(Map props) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SchemaResolverConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SchemaResolverConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + } + } +} diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java new file mode 100644 index 0000000000..34599952ef --- /dev/null +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java @@ -0,0 +1,61 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.validation.json; + +/** + * @author eric.wittmann@gmail.com + */ +public class MessageBean { + + private String message; + private long time; + + /** + * Constructor. + */ + public MessageBean() { + } + + /** + * @return the message + */ + public String getMessage() { + return message; + } + + /** + * @param message the message to set + */ + public void setMessage(String message) { + this.message = message; + } + + /** + * @return the time + */ + public long getTime() { + return time; + } + + /** + * @param time the time to set + */ + public void setTime(long time) { + this.time = time; + } + +} diff --git a/examples/mix-avro/pom.xml b/examples/mix-avro/pom.xml new file mode 100644 index 0000000000..1f0d3fa776 --- /dev/null +++ b/examples/mix-avro/pom.xml @@ -0,0 +1,43 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-mix-avro + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + + diff --git a/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java b/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java new file mode 100644 index 0000000000..966b25654c --- /dev/null +++ b/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java @@ -0,0 +1,240 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.mix.avro; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; +import java.util.UUID; + +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.serde.avro.strategy.RecordIdStrategy; + +/** + * This example application showcases a scenario where Apache Avro messages are published to the same + * Kafka topic using different Avro schemas. This example uses the Apicurio Registry serdes classes to serialize + * and deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. + * The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Producing and consuming Avro messages using different schemas mapped to different Apicurio Registry Artifacts
  10. + *
+ *

+ * Pre-requisites: + * + *

    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author Fabian Martinez + * @author Carles Arnal + */ +public class MixAvroExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = MixAvroExample.class.getSimpleName(); + private static final String SCHEMAV1 = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + private static final String SCHEMAV2 = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"},{\"name\":\"Extra\",\"type\":\"string\"}]}"; + private static final String FAREWELLSCHEMAV1 = "{\"type\":\"record\",\"name\":\"Farewell\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + private static final String FAREWELLSCHEMAV2 = "{\"type\":\"record\",\"name\":\"Farewell\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"},{\"name\":\"Extra\",\"type\":\"string\"}]}"; + + public static void main(String[] args) throws Exception { + System.out.println("Starting example " + MixAvroExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + + int producedMessages = 0; + try { + producedMessages += produceMessages(producer, topicName, SCHEMAV1, null); + + producedMessages += produceMessages(producer, topicName, SCHEMAV2, "extra greeting"); + + producedMessages += produceMessages(producer, topicName, FAREWELLSCHEMAV1, null); + + producedMessages += produceMessages(producer, topicName, FAREWELLSCHEMAV2, "extra farewell"); + + + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + + // Consume the 5 messages. + try (consumer) { + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + int messageCount = 0; + System.out.println("Consuming (" + producedMessages + ") messages."); + while (messageCount < producedMessages) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + value.getSchema().getFullName(); + if (value.hasField("Extra")) { + System.out.println("Consumed " + value.getSchema().getFullName() + ": " + value.get("Message") + " @ " + new Date((long) value.get("Time")) + " @ " + value.get("Extra")); + } else { + System.out.println("Consumed " + value.getSchema().getFullName() + ": " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + } + }); + } + } + + System.out.println("Done (success)."); + } + + private static int produceMessages(Producer producer, String topicName, String schemaContent, String extra) throws InterruptedException { + int producedMessages = 0; + Schema schema = new Schema.Parser().parse(schemaContent); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + if (extra != null) { + record.put("Extra", extra); + } + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, UUID.randomUUID().toString(), record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + return producedMessages; + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, MixAvroExample.class.getSimpleName()); + // Map the topic name to the artifactId in the registry + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, RecordIdStrategy.class.getName()); + // Get an existing schema or auto-register if not found + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/pom.xml b/examples/pom.xml new file mode 100644 index 0000000000..9c334e952d --- /dev/null +++ b/examples/pom.xml @@ -0,0 +1,278 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + pom + + https://www.apicur.io/ + Open Source API & Schema Registry + + + Red Hat + https://www.redhat.com + + + + + Apache License Version 2.0 + https://repository.jboss.org/licenses/apache-2.0.txt + repo + + + + + GitHub + https://github.com/apicurio/apicurio-registry-examples/issues + + + + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + + + + simple-avro + simple-json + confluent-serdes + avro-bean + custom-resolver + custom-strategy + simple-avro-maven + rest-client + mix-avro + jsonschema-validation + simple-validation + quarkus-auth + simple-avro-downstream + rest-client-downstream + serdes-with-references + protobuf-validation + tools/kafkasql-topic-import + + + + + UTF-8 + UTF-8 + yyyy-MM-dd HH:mm:ss + ${maven.build.timestamp} + + 11 + 11 + + + ${project.version} + + + io.quarkus + quarkus-universe-bom + 2.7.5.Final + ${quarkus.platform.version} + + + 2.8.1 + + + 5.5.1 + + + 4.5.8.Final + + + 1.7.25 + + + 3.9.5 + + + 0.1.14.Final + + + 0.0.4.Final + + + ${quarkus.platform.version} + + 3.8.1 + 2.8.2 + 2.22.2 + 3.2.0 + 3.2.1 + 2.22.2 + 3.2.0 + 1.0.0 + 3.1.2 + 3.3.0 + 3.2.0 + 3.1.0 + 1.11.0 + 3.21.6 + + 0.6.1 + 1.7.0 + + + + + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + + + + + + + + + + io.quarkus + quarkus-maven-plugin + ${quarkus-plugin.version} + + + + + org.codehaus.mojo + properties-maven-plugin + ${version.properties.plugin} + + + org.apache.maven.plugins + maven-compiler-plugin + ${version.compiler.plugin} + + + org.apache.maven.plugins + maven-source-plugin + ${version.source.plugin} + + + org.apache.maven.plugins + maven-javadoc-plugin + ${version.javadoc.plugin} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${version.failsafe.plugin} + + + org.apache.maven.plugins + maven-surefire-plugin + ${version.surefire.plugin} + + + org.apache.maven.plugins + maven-deploy-plugin + ${version.deploy.plugin} + + + org.apache.maven.plugins + maven-jar-plugin + ${version.jar.plugin} + + + org.apache.maven.plugins + maven-dependency-plugin + ${version.dependency.plugin} + + + org.apache.maven.plugins + maven-assembly-plugin + ${version.assembly.plugin} + + + org.apache.maven.plugins + maven-resources-plugin + ${version.resources.plugin} + + + org.apache.maven.plugins + maven-clean-plugin + ${version.clean.plugin} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.target} + ${maven.compiler.target} + false + false + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + attach-javadocs + + jar + + + + + + + org.jboss.spec.javax.annotation + jboss-annotations-api_1.2_spec + 1.0.1.Final + + + false + false + + + + + + + + java8 + + [1.8,) + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + -Xdoclint:none + + + + + + + diff --git a/examples/protobuf-bean/pom.xml b/examples/protobuf-bean/pom.xml new file mode 100644 index 0000000000..cbd5a61841 --- /dev/null +++ b/examples/protobuf-bean/pom.xml @@ -0,0 +1,59 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.5.12-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-protobuf-bean + jar + + + 0.6.1 + + + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + + compile + + + + + + + + + + diff --git a/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java b/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java new file mode 100644 index 0000000000..149b55b0cd --- /dev/null +++ b/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java @@ -0,0 +1,227 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.protobuf; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.examples.AddressBookProtos; +import io.apicurio.registry.examples.AddressBookProtos.AddressBook; +import io.apicurio.registry.examples.AddressBookProtos.Person; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaDeserializer; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaSerializer; +import io.apicurio.registry.utils.IoUtil; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Protobuf as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Protobuf schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a custom java bean and received as the same java bean
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class ProtobufBeanExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = ProtobufBeanExample.class.getSimpleName(); + private static final String SCHEMA_NAME = "AddressBook"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + ProtobufBeanExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String key = SCHEMA_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 2 messages. + try { + System.out.println("Producing (2) messages."); + for (int idx = 0; idx < 2; idx++) { + + AddressBookProtos.AddressBook book = AddressBook.newBuilder() + .addPeople(Person.newBuilder() + .setEmail("aa@bb.com") + .setId(1) + .setName("aa") + .build()) + .addPeople(Person.newBuilder() + .setEmail("bb@bb.com") + .setId(2) + .setName("bb") + .build()) + .build(); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, book); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 2 messages. + try { + int messageCount = 0; + System.out.println("Consuming (2) messages."); + while (messageCount < 2) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + AddressBook value = record.value(); + System.out.println("Consumed a message: People count in AddressBook " + value.getPeopleCount()); + }); + } + } finally { + consumer.close(); + } + + RegistryClient client = RegistryClientFactory.create(REGISTRY_URL); + System.out.println("The artifact created in Apicurio Registry is: "); + //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value + System.out.println(IoUtil.toString(client.getArtifactVersion("default", topicName + "-value", "1"))); + System.out.println(); + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Protobuf + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Protobuf + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + // the serializer also puts information about the AddressBook java class in the kafka record headers + // with this the deserializer can automatically return that same java class. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + +} diff --git a/examples/protobuf-bean/src/main/proto/person.proto b/examples/protobuf-bean/src/main/proto/person.proto new file mode 100644 index 0000000000..ba1ae1e85c --- /dev/null +++ b/examples/protobuf-bean/src/main/proto/person.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package tutorial; + +import "google/protobuf/timestamp.proto"; + +option java_package = "io.apicurio.registry.examples"; +option java_outer_classname = "AddressBookProtos"; + +message Person { + string name = 1; + int32 id = 2; // Unique ID number for this person. + string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + string number = 1; + PhoneType type = 2; + } + + repeated PhoneNumber phones = 4; + + google.protobuf.Timestamp last_updated = 5; +} + +message AddressBook { + repeated Person people = 1; +} \ No newline at end of file diff --git a/examples/protobuf-find-latest/pom.xml b/examples/protobuf-find-latest/pom.xml new file mode 100644 index 0000000000..0be21fd809 --- /dev/null +++ b/examples/protobuf-find-latest/pom.xml @@ -0,0 +1,59 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.5.12-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-protobuf-find-latest + jar + + + 0.6.1 + + + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + + compile + + + + + + + + + + diff --git a/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java b/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java new file mode 100644 index 0000000000..e2bb5935be --- /dev/null +++ b/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java @@ -0,0 +1,230 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.protobuf; + +import java.io.InputStream; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.examples.AddressBookProtos; +import io.apicurio.registry.examples.AddressBookProtos.AddressBook; +import io.apicurio.registry.examples.AddressBookProtos.Person; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaDeserializer; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaSerializer; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Protobuf as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Manually registering the Protobuf schema in the registry (registered using the RegistryClient before running the producer/consumer), this would be equivalent to using the maven plugin or a custom CI/CD process
  6. + *
  7. Data sent as a custom java bean and received as the same java bean
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + * @author carles.arnal@redhat.com + */ +public class ProtobufFindLatestExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = ProtobufFindLatestExample.class.getSimpleName(); + private static final String SCHEMA_NAME = "AddressBook"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + ProtobufFindLatestExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String key = SCHEMA_NAME; + + + RegistryClient client = RegistryClientFactory.create(REGISTRY_URL); + System.out.println("Manually creating the artifact in Apicurio Registry"); + //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value + InputStream protofile = Thread.currentThread().getContextClassLoader().getResourceAsStream("person.proto"); + client.createArtifact("default", topicName + "-value", protofile); + System.out.println(); + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 2 messages. + try { + System.out.println("Producing (2) messages."); + for (int idx = 0; idx < 2; idx++) { + + AddressBookProtos.AddressBook book = AddressBook.newBuilder() + .addPeople(Person.newBuilder() + .setEmail("aa@bb.com") + .setId(1) + .setName("aa") + .build()) + .addPeople(Person.newBuilder() + .setEmail("bb@bb.com") + .setId(2) + .setName("bb") + .build()) + .build(); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, book); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (2) messages."); + while (messageCount < 2) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + AddressBook value = record.value(); + System.out.println("Consumed a message: People count in AddressBook " + value.getPeopleCount()); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Protobuf + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + + // Find and use the latest artifact in the registry for the corresponding GroupId and ArtifactId + props.putIfAbsent(SerdeConfig.FIND_LATEST_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Protobuf + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + // the serializer also puts information about the AddressBook java class in the kafka record headers + // with this the deserializer can automatically return that same java class. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/protobuf-find-latest/src/main/proto/person.proto b/examples/protobuf-find-latest/src/main/proto/person.proto new file mode 100644 index 0000000000..ba1ae1e85c --- /dev/null +++ b/examples/protobuf-find-latest/src/main/proto/person.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package tutorial; + +import "google/protobuf/timestamp.proto"; + +option java_package = "io.apicurio.registry.examples"; +option java_outer_classname = "AddressBookProtos"; + +message Person { + string name = 1; + int32 id = 2; // Unique ID number for this person. + string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + string number = 1; + PhoneType type = 2; + } + + repeated PhoneNumber phones = 4; + + google.protobuf.Timestamp last_updated = 5; +} + +message AddressBook { + repeated Person people = 1; +} \ No newline at end of file diff --git a/examples/protobuf-find-latest/src/main/resources/person.proto b/examples/protobuf-find-latest/src/main/resources/person.proto new file mode 100644 index 0000000000..ba1ae1e85c --- /dev/null +++ b/examples/protobuf-find-latest/src/main/resources/person.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package tutorial; + +import "google/protobuf/timestamp.proto"; + +option java_package = "io.apicurio.registry.examples"; +option java_outer_classname = "AddressBookProtos"; + +message Person { + string name = 1; + int32 id = 2; // Unique ID number for this person. + string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + string number = 1; + PhoneType type = 2; + } + + repeated PhoneNumber phones = 4; + + google.protobuf.Timestamp last_updated = 5; +} + +message AddressBook { + repeated Person people = 1; +} \ No newline at end of file diff --git a/examples/protobuf-validation/pom.xml b/examples/protobuf-validation/pom.xml new file mode 100644 index 0000000000..991fef5a0e --- /dev/null +++ b/examples/protobuf-validation/pom.xml @@ -0,0 +1,64 @@ + + + + apicurio-registry-examples + io.apicurio + 2.6.0-SNAPSHOT + ../pom.xml + + 4.0.0 + + apicurio-registry-examples-protobuf-validation + + + + io.apicurio + apicurio-registry-schema-validation-protobuf + ${apicurio-registry-schema-validation.version} + + + + + + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + initialize + + detect + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + generate-sources + + compile + + + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + + + + + \ No newline at end of file diff --git a/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java b/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java new file mode 100644 index 0000000000..eb16635767 --- /dev/null +++ b/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java @@ -0,0 +1,177 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.validation.protobuf; + +import io.apicurio.registry.resolver.SchemaResolverConfig; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.types.ArtifactType; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; +import io.apicurio.rest.client.spi.ApicurioHttpClient; +import io.apicurio.rest.client.spi.ApicurioHttpClientFactory; +import io.apicurio.schema.validation.protobuf.ProtobufMetadata; +import io.apicurio.schema.validation.protobuf.ProtobufRecord; +import io.apicurio.schema.validation.protobuf.ProtobufValidationResult; +import io.apicurio.schema.validation.protobuf.ProtobufValidator; +import io.apicurio.schema.validation.protobuf.ref.MessageExampleOuterClass.MessageExample; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.*; + +import static io.apicurio.schema.validation.protobuf.ref.MessageExample2OuterClass.*; + +/** + * This example demonstrates how to use Apicurio Registry Schema Validation library for Protobuf + *

+ * The following aspects are demonstrated: + * + *

    + *
  1. Register the Protobuf Schema in the registry
  2. + *
  3. Configuring a Protobuf that will use Apicurio Registry to fetch and cache the schema to use for validation
  4. + *
  5. Successfully validate Java objects using static configuration to always use the same schema for validation
  6. + *
  7. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use for validation
  8. + *
+ *

+ * Pre-requisites: + * + *

    + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author carnalca@redhat.com + */ +public class ProtobufValidationExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + + public static final String SCHEMA = + "syntax = \"proto3\";\n" + + "package io.apicurio.schema.validation.protobuf.ref;\n" + + "\n" + + "message MessageExample {\n" + + "\n" + + " string key = 1;\n" + + " string value = 2;\n" + + "\n" + + "}"; + + public static final void main(String[] args) throws Exception { + System.out.println("Starting example " + ProtobufValidationExample.class.getSimpleName()); + + // Register the schema with the registry (only if it is not already registered) + String artifactId = ProtobufValidationExample.class.getSimpleName(); + RegistryClient client = createRegistryClient(REGISTRY_URL); + client.createArtifact("default", artifactId, ArtifactType.PROTOBUF, IfExists.RETURN_OR_UPDATE, + new ByteArrayInputStream(SCHEMA.getBytes(StandardCharsets.UTF_8))); + + // Create an artifact reference pointing to the artifact we just created + // and pass it to the ProtobufValidator + ArtifactReference artifactReference = ArtifactReference.builder().groupId("default") + .artifactId(artifactId).build(); + + // Create the ProtobufValidator providing an ArtifactReference + // this ArtifactReference will allways be used to lookup the schema in the registry when using "validateByArtifactReference" + ProtobufValidator validator = createProtobufValidator(artifactReference); + + // Test successfull validation + + MessageExample bean = MessageExample.newBuilder() + .setKey(UUID.randomUUID().toString()) + .setValue("Hello world").build(); + + System.out.println(); + System.out.println("Validating valid message bean"); + ProtobufValidationResult result = validator.validateByArtifactReference(bean); + System.out.println("Validation result: " + result); + System.out.println(); + + // Test validation error + + MessageExample2 invalidBean = MessageExample2.newBuilder() + .setKey2(UUID.randomUUID().toString()) + .setValue2(32) + .build(); + + + System.out.println("Validating invalid message bean"); + ProtobufValidationResult invalidBeanResult = validator.validateByArtifactReference(invalidBean); + System.out.println("Validation result: " + invalidBeanResult); + System.out.println(); + + // Test validate method providing a record to dynamically resolve the artifact to fetch from the registry + + ProtobufRecord record = new ProtobufRecord(bean, new ProtobufMetadata(artifactReference)); + + System.out.println("Validating message bean using dynamic ArtifactReference resolution"); + ProtobufValidationResult recordValidationResult = validator.validate(record); + System.out.println("Validation result: " + recordValidationResult); + System.out.println(); + + } + + /** + * Creates the registry client + */ + private static RegistryClient createRegistryClient(String registryUrl) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + + //Just if security values are present, then we configure them. + if (tokenEndpoint != null) { + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + ApicurioHttpClient httpClient = ApicurioHttpClientFactory.create(tokenEndpoint, + new AuthErrorHandler()); + OidcAuth auth = new OidcAuth(httpClient, authClient, authSecret); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), auth); + } else { + return RegistryClientFactory.create(registryUrl); + } + } + + /** + * Creates the protobuf validator + */ + private static ProtobufValidator createProtobufValidator(ArtifactReference artifactReference) { + Map props = new HashMap<>(); + + // Configure Service Registry location + props.putIfAbsent(SchemaResolverConfig.REGISTRY_URL, REGISTRY_URL); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the protobuf validator + return new ProtobufValidator(props, Optional.ofNullable(artifactReference)); + } + + private static void configureSecurityIfPresent(Map props) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SchemaResolverConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SchemaResolverConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + } + } +} diff --git a/examples/protobuf-validation/src/main/proto/message_example.proto b/examples/protobuf-validation/src/main/proto/message_example.proto new file mode 100644 index 0000000000..7eaf7daaaa --- /dev/null +++ b/examples/protobuf-validation/src/main/proto/message_example.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; +package io.apicurio.schema.validation.protobuf.ref; + +message MessageExample { + + string key = 1; + string value = 2; + +} \ No newline at end of file diff --git a/examples/protobuf-validation/src/main/proto/message_example_2.proto b/examples/protobuf-validation/src/main/proto/message_example_2.proto new file mode 100644 index 0000000000..2f9461e403 --- /dev/null +++ b/examples/protobuf-validation/src/main/proto/message_example_2.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; +package io.apicurio.schema.validation.protobuf.ref; + +message MessageExample2 { + + string key2 = 1; + int32 value2 = 2; + +} \ No newline at end of file diff --git a/examples/quarkus-auth/README.md b/examples/quarkus-auth/README.md new file mode 100644 index 0000000000..6a8659d1ea --- /dev/null +++ b/examples/quarkus-auth/README.md @@ -0,0 +1,13 @@ +# Commands + +`mvn generate-sources -Pavro` + +`mvn package` + +Set the envorinment variables for the registry url, auth details, kafka boostrap servers ... + +`java -jar target/quarkus-app/quarkus-run.jar` + +Or + +`mvn quarkus:dev` \ No newline at end of file diff --git a/examples/quarkus-auth/pom.xml b/examples/quarkus-auth/pom.xml new file mode 100644 index 0000000000..b286e0f7da --- /dev/null +++ b/examples/quarkus-auth/pom.xml @@ -0,0 +1,236 @@ + + + 4.0.0 + + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-quarkus-auth + + + 11 + 11 + UTF-8 + UTF-8 + + io.quarkus + quarkus-universe-bom + 2.7.5.Final + 2.7.5.Final + + 1.10.0 + + 2.4.1.Final + + + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + + + + + + org.apache.avro + avro + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + test + + + io.quarkus + quarkus-apicurio-registry-avro + + + + io.quarkus + quarkus-resteasy-jackson + + + com.github.java-json-tools + jackson-coreutils + + + + + io.quarkus + quarkus-smallrye-health + + + com.github.java-json-tools + jackson-coreutils + 2.0 + compile + + + + io.quarkus + quarkus-smallrye-reactive-messaging-kafka + + + + io.strimzi + kafka-oauth-client + 0.7.2 + + + + io.quarkus + quarkus-config-yaml + + + + + + + + io.quarkus + quarkus-maven-plugin + ${quarkus-plugin.version} + true + + + + build + generate-code + generate-code-tests + + + + + + maven-compiler-plugin + + + maven-surefire-plugin + + + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + + + avro + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/src/main/resources/avro/schema/ + ${project.basedir}/src/main/java/ + + + + + + + + + upload + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + generate-sources + + register + + + http://localhost:8181/apis/registry/v2 + AVRO + + ${project.basedir}/src/main/resources/avro/schema/event.avsc + + + + + + + + + + test + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + generate-sources + + test-update + + + http://localhost:8181/apis/registry/v2 + AVRO + + ${project.basedir}/src/main/resources/avro/schema/event.avsc + + + + + + + + + + download + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + generate-sources + + download + + + http://localhost:8181/apis/registry/v2 + + events-value + + ${project.build.directory} + + + + + + + + + diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java new file mode 100644 index 0000000000..e587e5d6f8 --- /dev/null +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java @@ -0,0 +1,20 @@ +package io.apicurio.example; + +import javax.enterprise.context.ApplicationScoped; + +import org.eclipse.microprofile.reactive.messaging.Incoming; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.apicurio.example.schema.avro.Event; + +@ApplicationScoped +public class Consumer { + + Logger log = LoggerFactory.getLogger(this.getClass()); + + @Incoming("events-sink") + public void consume(Event message) { + log.info("Consumer consumed message {} from topic {}", message, "events"); + } +} diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java new file mode 100644 index 0000000000..13b5ac01b4 --- /dev/null +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java @@ -0,0 +1,21 @@ +package io.apicurio.example; + +public class InputEvent { + + private String name; + private String description; + + public String getName() { + return name; + } + public void setName(String name) { + this.name = name; + } + public String getDescription() { + return description; + } + public void setDescription(String description) { + this.description = description; + } + +} diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java new file mode 100644 index 0000000000..a4503109f5 --- /dev/null +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java @@ -0,0 +1,26 @@ +package io.apicurio.example; + +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.eclipse.microprofile.reactive.messaging.Channel; +import org.eclipse.microprofile.reactive.messaging.Emitter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.apicurio.example.schema.avro.Event; + +@ApplicationScoped +public class Producer { + + Logger log = LoggerFactory.getLogger(this.getClass()); + + @Inject + @Channel("events") + Emitter eventsEmitter; + + public void send(Event payload) { + log.info("Producer sending message {} to events channel", payload); + this.eventsEmitter.send(payload); + } +} \ No newline at end of file diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java new file mode 100644 index 0000000000..561c956dec --- /dev/null +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java @@ -0,0 +1,30 @@ +package io.apicurio.example; + +import javax.inject.Inject; +import javax.ws.rs.POST; +import javax.ws.rs.Path; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.apicurio.example.schema.avro.Event; + +@Path("/kafka") +public class Resource { + + Logger log = LoggerFactory.getLogger(this.getClass()); + + @Inject + Producer producer; + + @POST + @Path("/publish") + public void publish(InputEvent event) { + log.info("REST Controller has received entity: {}", event); + Event avroEvent = new Event(); + avroEvent.setName(event.getName()); + avroEvent.setDescription(event.getDescription()); + avroEvent.setSource("quarkus"); + this.producer.send(avroEvent); + } +} \ No newline at end of file diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java new file mode 100644 index 0000000000..5d33bf7778 --- /dev/null +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java @@ -0,0 +1,491 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.apicurio.example.schema.avro; + +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +/** Avro Schema for Event */ +@org.apache.avro.specific.AvroGenerated +public class Event extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -3808115584469037383L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Event\",\"namespace\":\"io.apicurio.example.schema.avro\",\"doc\":\"Avro Schema for Event\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"description\",\"type\":\"string\"},{\"name\":\"source\",\"type\":[\"null\",\"string\"],\"default\":null}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Event to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Event from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Event instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Event fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private java.lang.CharSequence name; + private java.lang.CharSequence description; + private java.lang.CharSequence source; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Event() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param description The new value for description + * @param source The new value for source + */ + public Event(java.lang.CharSequence name, java.lang.CharSequence description, java.lang.CharSequence source) { + this.name = name; + this.description = description; + this.source = source; + } + + @Override +public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } + @Override +public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + @Override +public java.lang.Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return description; + case 2: return source; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override +@SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: name = (java.lang.CharSequence)value$; break; + case 1: description = (java.lang.CharSequence)value$; break; + case 2: source = (java.lang.CharSequence)value$; break; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public java.lang.CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(java.lang.CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'description' field. + * @return The value of the 'description' field. + */ + public java.lang.CharSequence getDescription() { + return description; + } + + + /** + * Sets the value of the 'description' field. + * @param value the value to set. + */ + public void setDescription(java.lang.CharSequence value) { + this.description = value; + } + + /** + * Gets the value of the 'source' field. + * @return The value of the 'source' field. + */ + public java.lang.CharSequence getSource() { + return source; + } + + + /** + * Sets the value of the 'source' field. + * @param value the value to set. + */ + public void setSource(java.lang.CharSequence value) { + this.source = value; + } + + /** + * Creates a new Event RecordBuilder. + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder() { + return new io.apicurio.example.schema.avro.Event.Builder(); + } + + /** + * Creates a new Event RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder(io.apicurio.example.schema.avro.Event.Builder other) { + if (other == null) { + return new io.apicurio.example.schema.avro.Event.Builder(); + } else { + return new io.apicurio.example.schema.avro.Event.Builder(other); + } + } + + /** + * Creates a new Event RecordBuilder by copying an existing Event instance. + * @param other The existing instance to copy. + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder(io.apicurio.example.schema.avro.Event other) { + if (other == null) { + return new io.apicurio.example.schema.avro.Event.Builder(); + } else { + return new io.apicurio.example.schema.avro.Event.Builder(other); + } + } + + /** + * RecordBuilder for Event instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence name; + private java.lang.CharSequence description; + private java.lang.CharSequence source; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.apicurio.example.schema.avro.Event.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.source)) { + this.source = data().deepCopy(fields()[2].schema(), other.source); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing Event instance + * @param other The existing instance to copy. + */ + private Builder(io.apicurio.example.schema.avro.Event other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.source)) { + this.source = data().deepCopy(fields()[2].schema(), other.source); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public java.lang.CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setName(java.lang.CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'description' field. + * @return The value. + */ + public java.lang.CharSequence getDescription() { + return description; + } + + + /** + * Sets the value of the 'description' field. + * @param value The value of 'description'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setDescription(java.lang.CharSequence value) { + validate(fields()[1], value); + this.description = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'description' field has been set. + * @return True if the 'description' field has been set, false otherwise. + */ + public boolean hasDescription() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'description' field. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearDescription() { + description = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'source' field. + * @return The value. + */ + public java.lang.CharSequence getSource() { + return source; + } + + + /** + * Sets the value of the 'source' field. + * @param value The value of 'source'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setSource(java.lang.CharSequence value) { + validate(fields()[2], value); + this.source = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'source' field has been set. + * @return True if the 'source' field has been set, false otherwise. + */ + public boolean hasSource() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'source' field. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearSource() { + source = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Event build() { + try { + Event record = new Event(); + record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]); + record.description = fieldSetFlags()[1] ? this.description : (java.lang.CharSequence) defaultValue(fields()[1]); + record.source = fieldSetFlags()[2] ? this.source : (java.lang.CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeString(this.description); + + if (this.source == null) { + out.writeIndex(0); + out.writeNull(); + } else { + out.writeIndex(1); + out.writeString(this.source); + } + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.description = in.readString(this.description instanceof Utf8 ? (Utf8)this.description : null); + + if (in.readIndex() != 1) { + in.readNull(); + this.source = null; + } else { + this.source = in.readString(this.source instanceof Utf8 ? (Utf8)this.source : null); + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.description = in.readString(this.description instanceof Utf8 ? (Utf8)this.description : null); + break; + + case 2: + if (in.readIndex() != 1) { + in.readNull(); + this.source = null; + } else { + this.source = in.readString(this.source instanceof Utf8 ? (Utf8)this.source : null); + } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/examples/quarkus-auth/src/main/resources/application.yaml b/examples/quarkus-auth/src/main/resources/application.yaml new file mode 100644 index 0000000000..19c768b95e --- /dev/null +++ b/examples/quarkus-auth/src/main/resources/application.yaml @@ -0,0 +1,74 @@ +quarkus: + http: + port: 8082 + +apicurio: + registry: + url: ${APICURIO_REGISTRY_URL:} + auth: + client: + id: ${CLIENT_ID:} + secret: ${CLIENT_SECRET:} + +auth: + service: + token-url: ${TOKEN_URL:} + +kafka: + bootstrap: + servers: ${KAFKA_BOOTSTRAP_SERVERS:} + +mp: + messaging: + connector: + smallrye-kafka: + security: + protocol: SASL_SSL + sasl: + mechanism: OAUTHBEARER + jaas: + config: org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required oauth.client.id="${apicurio.auth.client.id}" oauth.client.secret="${apicurio.auth.client.secret}" oauth.token.endpoint.uri="${auth.service.token-url}" ; + login: + callback: + handler: + class: io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler + outgoing: + events: + connector: smallrye-kafka + topic: events + key: + serializer: org.apache.kafka.common.serialization.StringSerializer + value: + serializer: io.apicurio.registry.serde.avro.AvroKafkaSerializer + apicurio: + registry: + url: ${apicurio.registry.url} + artifact-resolver-strategy: io.apicurio.registry.serde.avro.strategy.RecordIdStrategy + auto-register: true + # find-latest: true + auth: + service: + token: + endpoint: ${auth.service.token-url} + client: + id: ${apicurio.auth.client.id} + secret: ${apicurio.auth.client.secret} + incoming: + events-sink: + connector: smallrye-kafka + topic: events + key: + deserializer: org.apache.kafka.common.serialization.StringDeserializer + value: + deserializer: io.apicurio.registry.serde.avro.AvroKafkaDeserializer + apicurio: + registry: + url: ${apicurio.registry.url} + use-specific-avro-reader: true + auth: + service: + token: + endpoint: ${auth.service.token-url} + client: + id: ${apicurio.auth.client.id} + secret: ${apicurio.auth.client.secret} \ No newline at end of file diff --git a/examples/quarkus-auth/src/main/resources/avro/schema/event.avsc b/examples/quarkus-auth/src/main/resources/avro/schema/event.avsc new file mode 100644 index 0000000000..95ad5cd143 --- /dev/null +++ b/examples/quarkus-auth/src/main/resources/avro/schema/event.avsc @@ -0,0 +1,18 @@ +{ + "name": "Event", + "namespace": "io.apicurio.example.schema.avro", + "type": "record", + "doc": "Avro Schema for Event", + "fields" : [ { + "name" : "name", + "type" : "string" + }, { + "name" : "description", + "type" : "string" + }, { + "name" : "source", + "type" : ["null", "string"], + "default" : null + } + ] +} \ No newline at end of file diff --git a/examples/rest-client-downstream/README.md b/examples/rest-client-downstream/README.md new file mode 100644 index 0000000000..7676a9a82f --- /dev/null +++ b/examples/rest-client-downstream/README.md @@ -0,0 +1,11 @@ +# Apicurio Rest Client example application using your RHOSR instance. + +1. Create RHOSR Managed Service instance on cloud.redhat.com and store your instance api url. + +2. Create associated Service Account, save client Id and Client Secret. + +3. Ensure your service account has at least, manager permissions on your RHOSR instance. + +4. Set the environment variables AUTH_CLIENT_ID, AUTH_CLIENT_SECRET, AUTH_TOKEN_URL and REGISTRY_URL. + +5. Execute the java main SimpleRegistryDemo on this module, it will create, get and delete a schema in your instance, proving the functioning of the service. diff --git a/examples/rest-client-downstream/pom.xml b/examples/rest-client-downstream/pom.xml new file mode 100644 index 0000000000..a1c94bf92d --- /dev/null +++ b/examples/rest-client-downstream/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-rest-client-downstream + jar + + + + 2.2.1.Final + 11 + 11 + + + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + + + \ No newline at end of file diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java new file mode 100644 index 0000000000..c6420d39db --- /dev/null +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java @@ -0,0 +1,24 @@ +package io.apicurio.registry.examples; + +public class Constants { + + public static final String SCHEMA = "{" + + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + + " \"required\": [" + + " \"message\"," + + " \"time\"" + + " ]," + + " \"type\": \"object\"," + + " \"properties\": {" + + " \"message\": {" + + " \"description\": \"\"," + + " \"type\": \"string\"" + + " }," + + " \"time\": {" + + " \"description\": \"\"," + + " \"type\": \"number\"" + + " }" + + " }" + + "}"; +} diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java new file mode 100644 index 0000000000..c2656f5a9a --- /dev/null +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java @@ -0,0 +1,64 @@ +package io.apicurio.registry.examples; + +import java.util.Collections; +import java.util.UUID; + +import io.apicurio.registry.examples.util.RegistryDemoUtil; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.rest.client.JdkHttpClientProvider; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; +import io.apicurio.rest.client.spi.ApicurioHttpClient; + + +/** + * Simple demo app that shows how to use the client. + *

+ * 1) Register a new schema in the Registry. + * 2) Fetch the newly created schema. + * 3) Delete the schema. + * + * @author Carles Arnal + */ +public class SimpleRegistryDemo { + + private static final RegistryClient client; + + static { + // Create a Service Registry client + String registryUrl = System.getenv("REGISTRY_URL"); + client = createProperClient(registryUrl); + } + + public static void main(String[] args) throws Exception { + // Register the JSON Schema schema in the Apicurio registry. + final String artifactId = UUID.randomUUID().toString(); + + RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); + + //Wait for the artifact to be available. + Thread.sleep(1000); + + RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); + + RegistryDemoUtil.deleteSchema(client, artifactId); + + //Required due to a bug in the version of registry libraries used. Once the new version is released, we'll be able to remove this. + System.exit(0); + } + + public static RegistryClient createProperClient(String registryUrl) { + RegistryClientFactory.setProvider(new JdkHttpClientProvider()); + + final String tokenEndpoint = System.getenv("AUTH_TOKEN_ENDPOINT"); + if (tokenEndpoint != null) { + final String authClient = System.getenv("AUTH_CLIENT_ID"); + final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); + ApicurioHttpClient httpClient = new JdkHttpClientProvider().create(tokenEndpoint, Collections.emptyMap(), null, new AuthErrorHandler()); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), new OidcAuth(httpClient, authClient, authSecret)); + } else { + return RegistryClientFactory.create(registryUrl); + } + } +} diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java new file mode 100644 index 0000000000..797969c4bd --- /dev/null +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java @@ -0,0 +1,76 @@ +package io.apicurio.registry.examples.util; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.types.ArtifactType; + +public class RegistryDemoUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(RegistryDemoUtil.class); + + /** + * Create the artifact in the registry (or update it if it already exists). + * + * @param artifactId + * @param schema + */ + public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, String schema) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Creating artifact in the registry for JSON Schema with ID: {}", artifactId); + try { + final ByteArrayInputStream content = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); + final ArtifactMetaData metaData = service.createArtifact("default", artifactId, ArtifactType.JSON, IfExists.RETURN, content); + assert metaData != null; + LOGGER.info("=====> Successfully created JSON Schema artifact in Service Registry: {}", metaData); + LOGGER.info("---------------------------------------------------------"); + } catch (Exception t) { + throw t; + } + } + + /** + * Get the artifact from the registry. + * + * @param artifactId + */ + public static ArtifactMetaData getSchemaFromRegistry(RegistryClient service, String artifactId) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Fetching artifact from the registry for JSON Schema with ID: {}", artifactId); + try { + final ArtifactMetaData metaData = service.getArtifactMetaData("default", artifactId); + assert metaData != null; + LOGGER.info("=====> Successfully fetched JSON Schema artifact in Service Registry: {}", metaData); + LOGGER.info("---------------------------------------------------------"); + return metaData; + } catch (Exception t) { + throw t; + } + } + + /** + * Delete the artifact from the registry. + * + * @param artifactId + */ + public static void deleteSchema(RegistryClient service, String artifactId) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Deleting artifact from the registry for JSON Schema with ID: {}", artifactId); + try { + service.deleteArtifact("default", artifactId); + LOGGER.info("=====> Successfully deleted JSON Schema artifact in Service Registry."); + LOGGER.info("---------------------------------------------------------"); + } catch (Exception t) { + throw t; + } + } +} diff --git a/examples/rest-client/pom.xml b/examples/rest-client/pom.xml new file mode 100644 index 0000000000..56b6d01808 --- /dev/null +++ b/examples/rest-client/pom.xml @@ -0,0 +1,36 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-rest-client + jar + + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + + + + io.apicurio + apicurio-common-rest-client-vertx + ${apicurio-common-rest-client.version} + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + + + \ No newline at end of file diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java new file mode 100644 index 0000000000..c6420d39db --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java @@ -0,0 +1,24 @@ +package io.apicurio.registry.examples; + +public class Constants { + + public static final String SCHEMA = "{" + + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + + " \"required\": [" + + " \"message\"," + + " \"time\"" + + " ]," + + " \"type\": \"object\"," + + " \"properties\": {" + + " \"message\": {" + + " \"description\": \"\"," + + " \"type\": \"string\"" + + " }," + + " \"time\": {" + + " \"description\": \"\"," + + " \"type\": \"number\"" + + " }" + + " }" + + "}"; +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/HeadersCustomizationExample.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/HeadersCustomizationExample.java new file mode 100644 index 0000000000..1a2eb4dacf --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/HeadersCustomizationExample.java @@ -0,0 +1,45 @@ +package io.apicurio.registry.examples; + +import io.apicurio.registry.examples.util.RegistryDemoUtil; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; + +import java.util.Map; +import java.util.UUID; + +/** + * Simple demo app that shows how to use the client and set next request headers. + *

+ * 1) Register a new schema in the Registry. + * 2) Fetch the newly created schema. + * 3) Delete the schema. + * + * @author Carles Arnal + */ +public class HeadersCustomizationExample { + + private static final RegistryClient client; + + static { + // Create a Service Registry client + String registryUrl = "http://localhost:8080/apis/registry/v2"; + client = RegistryClientFactory.create(registryUrl); + } + + public static void main(String[] args) throws Exception { + + // Register the JSON Schema schema in the Apicurio registry. + final String artifactId = UUID.randomUUID().toString(); + + RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); + + //Wait for the artifact to be available. + Thread.sleep(1000); + + client.setNextRequestHeaders(Map.of("newHeader", "newHeaderValue")); + + RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); + + RegistryDemoUtil.deleteSchema(client, artifactId); + } +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java new file mode 100644 index 0000000000..5dc5c56b83 --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java @@ -0,0 +1,53 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.Collections; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.utils.IoUtil; + +/** + * @author eric.wittmann@gmail.com + */ +public class RegistryLoader { + + public static void main(String[] args) throws Exception { + String registryUrl = "http://localhost:8080/apis/registry/v2"; + RegistryClient client = RegistryClientFactory.create(registryUrl, Collections.emptyMap()); + + File templateFile = new File("C:\\Temp\\registry.json"); + String template; + try (InputStream templateIS = new FileInputStream(templateFile)) { + template = IoUtil.toString(templateIS); + } + + for (int idx = 1; idx <= 1000; idx++) { + System.out.println("Creating artifact #" + idx); + String content = template.replaceFirst("Apicurio Registry API", "Apicurio Registry API :: Copy #" + idx); + InputStream contentIS = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); + client.createArtifact(null, null, contentIS); + } + } + +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java new file mode 100644 index 0000000000..66a7266d4b --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java @@ -0,0 +1,58 @@ +package io.apicurio.registry.examples; + +import io.apicurio.registry.examples.util.RegistryDemoUtil; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.rest.client.JdkHttpClient; +import io.apicurio.rest.client.auth.Auth; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; + +import java.util.Collections; +import java.util.UUID; + +/** + * Simple demo app that shows how to use the client. + *

+ * 1) Register a new schema in the Registry. + * 2) Fetch the newly created schema. + * 3) Delete the schema. + * + * @author Carles Arnal + */ +public class SimpleRegistryDemo { + + private static final RegistryClient client; + + static { + // Create a Service Registry client + String registryUrl = "http://localhost:8080/apis/registry/v2"; + client = createProperClient(registryUrl); + } + + public static void main(String[] args) throws Exception { + // Register the JSON Schema schema in the Apicurio registry. + final String artifactId = UUID.randomUUID().toString(); + + RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); + + //Wait for the artifact to be available. + Thread.sleep(1000); + + RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); + + RegistryDemoUtil.deleteSchema(client, artifactId); + } + + public static RegistryClient createProperClient(String registryUrl) { + final String tokenEndpoint = System.getenv("AUTH_TOKEN_ENDPOINT"); + if (tokenEndpoint != null) { + final String authClient = System.getenv("AUTH_CLIENT_ID"); + final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); + Auth auth = new OidcAuth(new JdkHttpClient(tokenEndpoint, Collections.emptyMap(), null, new AuthErrorHandler()), authClient, authSecret); + return RegistryClientFactory.create(new JdkHttpClient(registryUrl, Collections.emptyMap(), auth, new AuthErrorHandler())); + } else { + return RegistryClientFactory.create(registryUrl); + } + } +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java new file mode 100644 index 0000000000..b7a727094c --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java @@ -0,0 +1,60 @@ +package io.apicurio.registry.examples; + +import io.apicurio.registry.examples.util.RegistryDemoUtil; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.rest.client.JdkHttpClient; +import io.apicurio.rest.client.auth.Auth; +import io.apicurio.rest.client.auth.BasicAuth; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; + +import java.util.Collections; +import java.util.Optional; +import java.util.UUID; + +/** + * Simple demo app that shows how to use the client. + *

+ * 1) Register a new schema in the Registry. + * 2) Fetch the newly created schema. + * 3) Delete the schema. + * + * @author Carles Arnal + */ +public class SimpleRegistryDemoBasicAuth { + + private static final RegistryClient client; + + static { + // Create a Service Registry client + String registryUrl = "http://localhost:8080/apis/registry/v2"; + client = createProperClient(registryUrl); + } + + public static void main(String[] args) throws Exception { + // Register the JSON Schema schema in the Apicurio registry. + final String artifactId = UUID.randomUUID().toString(); + + RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); + + //Wait for the artifact to be available. + Thread.sleep(1000); + + RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); + + RegistryDemoUtil.deleteSchema(client, artifactId); + } + + public static RegistryClient createProperClient(String registryUrl) { + final String tokenEndpoint = System.getenv("AUTH_TOKEN_ENDPOINT"); + if (tokenEndpoint != null) { + final String authClient = System.getenv("AUTH_CLIENT_ID"); + final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); + Auth auth = new BasicAuth(authClient, authSecret); + return RegistryClientFactory.create(new JdkHttpClient(registryUrl, Collections.emptyMap(), auth, new AuthErrorHandler())); + } else { + return RegistryClientFactory.create(registryUrl); + } + } +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleVertxClientExample.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleVertxClientExample.java new file mode 100644 index 0000000000..ee16c09370 --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleVertxClientExample.java @@ -0,0 +1,45 @@ +package io.apicurio.registry.examples; + + +import io.apicurio.registry.examples.util.RegistryDemoUtil; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.rest.client.VertxHttpClientProvider; +import io.vertx.core.Vertx; + +import java.util.UUID; + +/** + * Simple demo app that shows how to use the Vertx client client. + *

+ * 1) Register a new schema in the Registry. + * 2) Fetch the newly created schema. + * 3) Delete the schema. + * + * @author Carles Arnal + */ +public class SimpleVertxClientExample { + + private static RegistryClient client; + + static { + // Create a Service Registry client + String registryUrl = "http://localhost:8080/apis/registry/v2"; + RegistryClientFactory.setProvider(new VertxHttpClientProvider(Vertx.vertx())); + client = RegistryClientFactory.create(registryUrl); + } + + public static void main(String[] args) throws Exception { + // Register the JSON Schema schema in the Apicurio registry. + final String artifactId = UUID.randomUUID().toString(); + + RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); + + //Wait for the artifact to be available. + Thread.sleep(1000); + + RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); + + RegistryDemoUtil.deleteSchema(client, artifactId); + } +} diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java new file mode 100644 index 0000000000..797969c4bd --- /dev/null +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java @@ -0,0 +1,76 @@ +package io.apicurio.registry.examples.util; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.types.ArtifactType; + +public class RegistryDemoUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(RegistryDemoUtil.class); + + /** + * Create the artifact in the registry (or update it if it already exists). + * + * @param artifactId + * @param schema + */ + public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, String schema) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Creating artifact in the registry for JSON Schema with ID: {}", artifactId); + try { + final ByteArrayInputStream content = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); + final ArtifactMetaData metaData = service.createArtifact("default", artifactId, ArtifactType.JSON, IfExists.RETURN, content); + assert metaData != null; + LOGGER.info("=====> Successfully created JSON Schema artifact in Service Registry: {}", metaData); + LOGGER.info("---------------------------------------------------------"); + } catch (Exception t) { + throw t; + } + } + + /** + * Get the artifact from the registry. + * + * @param artifactId + */ + public static ArtifactMetaData getSchemaFromRegistry(RegistryClient service, String artifactId) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Fetching artifact from the registry for JSON Schema with ID: {}", artifactId); + try { + final ArtifactMetaData metaData = service.getArtifactMetaData("default", artifactId); + assert metaData != null; + LOGGER.info("=====> Successfully fetched JSON Schema artifact in Service Registry: {}", metaData); + LOGGER.info("---------------------------------------------------------"); + return metaData; + } catch (Exception t) { + throw t; + } + } + + /** + * Delete the artifact from the registry. + * + * @param artifactId + */ + public static void deleteSchema(RegistryClient service, String artifactId) { + + LOGGER.info("---------------------------------------------------------"); + LOGGER.info("=====> Deleting artifact from the registry for JSON Schema with ID: {}", artifactId); + try { + service.deleteArtifact("default", artifactId); + LOGGER.info("=====> Successfully deleted JSON Schema artifact in Service Registry."); + LOGGER.info("---------------------------------------------------------"); + } catch (Exception t) { + throw t; + } + } +} diff --git a/examples/serdes-with-references/pom.xml b/examples/serdes-with-references/pom.xml new file mode 100644 index 0000000000..b661633bdc --- /dev/null +++ b/examples/serdes-with-references/pom.xml @@ -0,0 +1,166 @@ + + + + apicurio-registry-examples + io.apicurio + 2.6.0-SNAPSHOT + ../pom.xml + + 4.0.0 + + apicurio-registry-examples-references + + + 3.19.4 + 0.6.1 + 2.7.4 + + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + ${apicurio-registry.version} + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${apicurio-registry.version} + + + io.apicurio + apicurio-registry-protobuf-schema-utilities + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.10.0 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + com.google.api.grpc + proto-google-common-protos + ${protobuf.googleapi.types.version} + + + + + + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + initialize + + detect + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + generate-sources + + compile + + + ./src/main/resources/schema + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + + org.apache.avro + avro-maven-plugin + ${version.avro} + + + generate-sources + + schema + + + String + + ${project.basedir}/src/main/resources/Exchange.avsc + ${project.basedir}/src/main/resources/TradeKey.avsc + ${project.basedir}/src/main/resources/TradeRaw.avsc + + ${project.basedir}/src/main/resources/ + ${project.basedir}/target/generated-sources/avro + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-dist + prepare-package + + copy-resources + + + ${project.build.outputDirectory} + + + ${project.basedir}/target/generated-sources/protobuf/ + false + + + + + + + + + + + + \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java new file mode 100644 index 0000000000..e6cf444580 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java @@ -0,0 +1,179 @@ +package io.apicurio.registry.examples.references; + +import com.kubetrade.schema.common.Exchange; +import com.kubetrade.schema.trade.TradeKey; +import com.kubetrade.schema.trade.TradeRaw; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.serde.avro.ReflectAvroDatumProvider; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +public class AvroSerdeReferencesExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = AvroSerdeReferencesExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Trade"; + + public static void main(String [] args) throws Exception { + System.out.println("Starting example " + AvroSerdeReferencesExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + TradeRaw tradeRaw = new TradeRaw(); + TradeKey tradeKey = new TradeKey(); + tradeKey.setKey(String.valueOf(producedMessages++)); + tradeKey.setExchange(Exchange.GEMINI); + tradeRaw.setTradeKey(tradeKey); + tradeRaw.setPayload("Hello (" + producedMessages++ + ")!"); + tradeRaw.setSymbol("testSymbol"); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, tradeRaw); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + TradeRaw tradeRaw = record.value(); + System.out.println("Consumed a message: " + tradeRaw.getPayload() + " @ " + tradeRaw.getTradeKey().getKey()); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); + props.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); + + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); + props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + public static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesDereferencedExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesDereferencedExample.java new file mode 100644 index 0000000000..0335dbad68 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesDereferencedExample.java @@ -0,0 +1,306 @@ +package io.apicurio.registry.examples.references; + +import io.apicurio.registry.examples.references.model.Citizen; +import io.apicurio.registry.examples.references.model.CitizenIdentifier; +import io.apicurio.registry.examples.references.model.City; +import io.apicurio.registry.examples.references.model.CityQualification; +import io.apicurio.registry.examples.references.model.IdentifierQualification; +import io.apicurio.registry.examples.references.model.Qualification; +import io.apicurio.registry.resolver.SchemaResolverConfig; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.ArtifactReference; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaDeserializer; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaSerializer; +import io.apicurio.registry.serde.strategy.SimpleTopicIdStrategy; +import io.apicurio.registry.types.ArtifactType; +import io.apicurio.registry.types.ContentTypes; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; +import io.apicurio.rest.client.spi.ApicurioHttpClient; +import io.apicurio.rest.client.spi.ApicurioHttpClientFactory; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import javax.xml.validation.Schema; +import java.io.InputStream; +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.UUID; + +public class JsonSerdeReferencesDereferencedExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = JsonSerdeReferencesExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Defeference"; + + public static void main(String[] args) throws Exception { + + System.out.println("Starting example " + JsonSerdeReferencesExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + + RegistryClient client = createRegistryClient(REGISTRY_URL); + + InputStream citizenSchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/citizen.json"); + InputStream citySchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/types/city/city.json"); + InputStream citizenIdentifier = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/types/identifier/citizenIdentifier.json"); + InputStream qualificationSchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/qualification.json"); + InputStream addressSchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/sample.address.json"); + InputStream identifierQualificationSchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/types/identifier/qualification.json"); + InputStream cityQualificationSchema = JsonSerdeReferencesDereferencedExample.class.getClassLoader().getResourceAsStream("serde/json/types/city/qualification.json"); + + //Creates the city qualification schema. + final ArtifactMetaData amdCityQualification = client.createArtifact("default", "cityQualification", ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, cityQualificationSchema); + + final ArtifactReference cityQualificationReference = new ArtifactReference(); + cityQualificationReference.setVersion("1"); + cityQualificationReference.setGroupId(amdCityQualification.getGroupId()); + cityQualificationReference.setArtifactId(amdCityQualification.getId()); + cityQualificationReference.setName("qualification.json"); + + //Creates the identifier qualification schema. + final ArtifactMetaData amdIdentifierQualification = client.createArtifact("default", "identifierQualification", ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, identifierQualificationSchema); + + final ArtifactReference identifierQualificationReference = new ArtifactReference(); + identifierQualificationReference.setVersion("1"); + identifierQualificationReference.setGroupId(amdIdentifierQualification.getGroupId()); + identifierQualificationReference.setArtifactId(amdIdentifierQualification.getId()); + identifierQualificationReference.setName("qualification.json"); + + //Creates the city schema, with a reference to its qualification. + final ArtifactMetaData amdCity = client.createArtifact("default", "city", null, ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, false, null, null, ContentTypes.APPLICATION_CREATE_EXTENDED, null, null, citySchema, List.of(cityQualificationReference)); + + final ArtifactReference cityReference = new ArtifactReference(); + cityReference.setVersion("1"); + cityReference.setGroupId(amdCity.getGroupId()); + cityReference.setArtifactId(amdCity.getId()); + cityReference.setName("types/city/city.json"); + + //Creates the citizen identifier schema + final ArtifactMetaData amdCitizenIdentifier = client.createArtifact("default", "citizenIdentifier", null, ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, false, null, null, ContentTypes.APPLICATION_CREATE_EXTENDED, null, null, citizenIdentifier, + List.of(identifierQualificationReference)); + + final ArtifactReference citizenIdentifierReference = new ArtifactReference(); + citizenIdentifierReference.setVersion("1"); + citizenIdentifierReference.setGroupId(amdCitizenIdentifier.getGroupId()); + citizenIdentifierReference.setArtifactId(amdCitizenIdentifier.getId()); + citizenIdentifierReference.setName("types/identifier/citizenIdentifier.json"); + + //Creates the main qualification schema, used for the citizen + final ArtifactMetaData amdQualification = client.createArtifact("default", "qualification", ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, qualificationSchema); + + final ArtifactReference citizenQualificationReference = new ArtifactReference(); + citizenQualificationReference.setVersion("1"); + citizenQualificationReference.setGroupId(amdQualification.getGroupId()); + citizenQualificationReference.setArtifactId(amdQualification.getId()); + citizenQualificationReference.setName("qualification.json"); + + //Creates the address schema, used for the citizen + final ArtifactMetaData amdAddress = client.createArtifact("default", "address", ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, addressSchema); + + final ArtifactReference addressReference = new ArtifactReference(); + addressReference.setVersion("1"); + addressReference.setGroupId(amdAddress.getGroupId()); + addressReference.setArtifactId(amdAddress.getId()); + addressReference.setName("sample.address.json"); + + // Register the schema with the registry (only if it is not already registered) + String artifactId = TOPIC_NAME; + + //Creates the citizen schema, with references to qualification, city, identifier and address + final ArtifactMetaData amdCitizen = client.createArtifact("default", artifactId, null, ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, false, null, null, ContentTypes.APPLICATION_CREATE_EXTENDED, null, null, citizenSchema, + List.of(citizenQualificationReference, cityReference, citizenIdentifierReference, addressReference)); + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Create the message to send + City city = new City("New York", 10001); + city.setQualification(new CityQualification("city_qualification", 11)); + + CitizenIdentifier identifier = new CitizenIdentifier(123456789); + identifier.setIdentifierQualification(new IdentifierQualification("test_subject", 20)); + Citizen citizen = new Citizen("Carles", "Arnal", 23, city, identifier, + List.of(new Qualification(UUID.randomUUID().toString(), 6), new Qualification(UUID.randomUUID().toString(), 7), + new Qualification(UUID.randomUUID().toString(), 8))); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, SUBJECT_NAME, + citizen); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } + finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } + else + records.forEach(record -> { + Citizen msg = record.value(); + System.out.println("Consumed a message: " + msg + " @ " + msg.getCity()); + }); + } + } + finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaSerializer.class.getName()); + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class.getName()); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + props.putIfAbsent(SchemaResolverConfig.SERIALIZER_DEREFERENCE_SCHEMA, "true"); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaDeserializer.class.getName()); + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class.getName()); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + props.putIfAbsent(SchemaResolverConfig.DESERIALIZER_DEREFERENCE_SCHEMA, "true"); + props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, true); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + public static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + + /** + * Creates the registry client + */ + private static RegistryClient createRegistryClient(String registryUrl) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + + //Just if security values are present, then we configure them. + if (tokenEndpoint != null) { + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + ApicurioHttpClient httpClient = ApicurioHttpClientFactory.create(tokenEndpoint, + new AuthErrorHandler()); + OidcAuth auth = new OidcAuth(httpClient, authClient, authSecret); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), auth); + } + else { + return RegistryClientFactory.create(registryUrl); + } + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java new file mode 100644 index 0000000000..a4997557ca --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java @@ -0,0 +1,239 @@ +package io.apicurio.registry.examples.references; + +import io.apicurio.registry.examples.references.model.Citizen; +import io.apicurio.registry.examples.references.model.City; +import io.apicurio.registry.resolver.SchemaResolverConfig; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.ArtifactReference; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaDeserializer; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaSerializer; +import io.apicurio.registry.serde.strategy.SimpleTopicIdStrategy; +import io.apicurio.registry.types.ArtifactType; +import io.apicurio.registry.types.ContentTypes; +import io.apicurio.rest.client.auth.OidcAuth; +import io.apicurio.rest.client.auth.exception.AuthErrorHandler; +import io.apicurio.rest.client.spi.ApicurioHttpClient; +import io.apicurio.rest.client.spi.ApicurioHttpClientFactory; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.io.InputStream; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; +import java.util.UUID; + +public class JsonSerdeReferencesExample { + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = JsonSerdeReferencesExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + + public static void main(String[] args) throws Exception { + + System.out.println("Starting example " + JsonSerdeReferencesExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + RegistryClient client = createRegistryClient(REGISTRY_URL); + InputStream citySchema = JsonSerdeReferencesExample.class.getClassLoader() + .getResourceAsStream("city.json"); + InputStream citizenSchema = JsonSerdeReferencesExample.class.getClassLoader() + .getResourceAsStream("citizen.json"); + + final ArtifactMetaData amdCity = client.createArtifact("default", "city", ArtifactType.JSON, + IfExists.RETURN_OR_UPDATE, citySchema); + + final ArtifactReference reference = new ArtifactReference(); + reference.setVersion(amdCity.getVersion()); + reference.setGroupId(amdCity.getGroupId()); + reference.setArtifactId(amdCity.getId()); + reference.setName("city.json"); + + // Register the schema with the registry (only if it is not already registered) + String artifactId = TOPIC_NAME; + // use the topic name as the artifactId because we're going to map topic name to artifactId later on. + + @SuppressWarnings("unused") + final ArtifactMetaData amdCitizen = client.createArtifact("default", artifactId, null, + ArtifactType.JSON, IfExists.RETURN, false, null, null, + ContentTypes.APPLICATION_CREATE_EXTENDED, null, + null, citizenSchema, Collections.singletonList(reference)); + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Create the message to send + Citizen citizen = new Citizen(); + City city = new City(); + city.setZipCode(45676); + city.setName(UUID.randomUUID().toString()); + citizen.setCity(city); + citizen.setAge(producedMessages + 20); + citizen.setFirstName(UUID.randomUUID().toString()); + citizen.setLastName(UUID.randomUUID().toString()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + citizen); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else + records.forEach(record -> { + Citizen msg = record.value(); + System.out.println("Consumed a message: " + msg + " @ " + msg.getCity()); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaSerializer.class.getName()); + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class.getName()); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaDeserializer.class.getName()); + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class.getName()); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, true); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + public static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + + /** + * Creates the registry client + */ + private static RegistryClient createRegistryClient(String registryUrl) { + final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); + + //Just if security values are present, then we configure them. + if (tokenEndpoint != null) { + final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); + ApicurioHttpClient httpClient = ApicurioHttpClientFactory.create(tokenEndpoint, + new AuthErrorHandler()); + OidcAuth auth = new OidcAuth(httpClient, authClient, authSecret); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), auth); + } else { + return RegistryClientFactory.create(registryUrl); + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java new file mode 100644 index 0000000000..39714e2534 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java @@ -0,0 +1,186 @@ +package io.apicurio.registry.examples.references; + +import com.google.protobuf.Timestamp; +import io.api.sample.Mode; +import io.api.sample.TableInfo; +import io.api.sample.TableNotification; +import io.api.sample.TableNotificationType; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaDeserializer; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaSerializer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.time.Duration; +import java.time.Instant; +import java.util.Collections; +import java.util.Properties; +import java.util.UUID; + +public class ProtobufSerdeReferencesExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = ProtobufSerdeReferencesExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "TableNotification"; + + public static void main(String[] args) throws Exception { + System.out.println("Starting example " + ProtobufSerdeReferencesExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + + TableInfo tableInfo = TableInfo.newBuilder().setMode(Mode.RAW).setMin(producedMessages + 1) + .setMax(producedMessages + 10).setId(UUID.randomUUID().toString()) + .setSubscriptionId(UUID.randomUUID().toString()).build(); + + TableNotification tableNotification = TableNotification.newBuilder() + .setTableNotificationType(TableNotificationType.NEW).setTableInfo(tableInfo) + .setTimestamp( + Timestamp.newBuilder().setSeconds(Instant.now().getEpochSecond()).build()) + .setSessionId(UUID.randomUUID().toString()).build(); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + tableNotification); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else + records.forEach(record -> { + TableNotification tableNotification = record.value(); + System.out.println("Consumed a message: " + tableNotification.getTableInfo() + " @ " + + tableNotification.getTimestamp()); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + ProtobufKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + ProtobufKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + public static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java new file mode 100644 index 0000000000..98579da301 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java @@ -0,0 +1,113 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.List; + +public class Citizen { + + @JsonProperty("firstName") + private String firstName; + + @JsonProperty("lastName") + private String lastName; + + @JsonProperty("age") + private int age; + + @JsonProperty("city") + City city; + + @JsonProperty("identifier") + CitizenIdentifier identifier; + + @JsonProperty("qualifications") + List qualifications; + + public Citizen() { + } + + public Citizen(String firstName, String lastName, int age, City city, CitizenIdentifier identifier, List qualifications) { + this.firstName = firstName; + this.lastName = lastName; + this.age = age; + this.city = city; + this.qualifications = qualifications; + } + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + + public int getAge() { + return age; + } + + public void setAge(int age) { + this.age = age; + } + + public City getCity() { + return city; + } + + public void setCity(City city) { + this.city = city; + } + + public CitizenIdentifier getIdentifier() { + return identifier; + } + + public void setIdentifier(CitizenIdentifier identifier) { + this.identifier = identifier; + } + + public List getQualifications() { + return qualifications; + } + + public void setQualifications(List qualifications) { + this.qualifications = qualifications; + } + + @Override + public String toString() { + return "Citizen{" + + "firstName='" + firstName + '\'' + + ", lastName='" + lastName + '\'' + + ", age=" + age + + ", city=" + city + + ", identifier=" + identifier + + ", qualifications=" + qualifications + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CitizenIdentifier.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CitizenIdentifier.java new file mode 100644 index 0000000000..70677fa494 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CitizenIdentifier.java @@ -0,0 +1,64 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class CitizenIdentifier { + + @JsonProperty("identifier") + private Integer identifier; + + @JsonProperty("qualification") + private IdentifierQualification identifierQualification; + + public CitizenIdentifier() { + } + + public CitizenIdentifier(Integer identifier) { + this.identifier = identifier; + } + + public CitizenIdentifier(Integer identifier, IdentifierQualification identifierQualification) { + this.identifier = identifier; + this.identifierQualification = identifierQualification; + } + + public Integer getIdentifier() { + return identifier; + } + + public void setIdentifier(Integer identifier) { + this.identifier = identifier; + } + + public IdentifierQualification getIdentifierQualification() { + return identifierQualification; + } + + public void setIdentifierQualification(IdentifierQualification identifierQualification) { + this.identifierQualification = identifierQualification; + } + + @Override + public String toString() { + return "CitizenIdentifier{" + + "identifier=" + identifier + + ", identifierQualification=" + identifierQualification + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/City.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/City.java new file mode 100644 index 0000000000..64acd1e899 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/City.java @@ -0,0 +1,78 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class City { + + @JsonProperty("name") + private String name; + + @JsonProperty("zipCode") + private Integer zipCode; + + @JsonProperty("qualification") + private CityQualification qualification; + + public City() { + } + + public City(String name, Integer zipCode) { + this.name = name; + this.zipCode = zipCode; + } + + public City(String name, Integer zipCode, CityQualification cityQualification) { + this.name = name; + this.zipCode = zipCode; + this.qualification = cityQualification; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getZipCode() { + return zipCode; + } + + public void setZipCode(Integer zipCode) { + this.zipCode = zipCode; + } + + public CityQualification getQualification() { + return qualification; + } + + public void setQualification(CityQualification qualification) { + this.qualification = qualification; + } + + @Override + public String toString() { + return "City{" + + "name='" + name + '\'' + + ", zipCode=" + zipCode + + ", qualification=" + qualification + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CityQualification.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CityQualification.java new file mode 100644 index 0000000000..b79407fa41 --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/CityQualification.java @@ -0,0 +1,44 @@ +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class CityQualification { + + @JsonProperty("subject_name") + private String subjectName; + + @JsonProperty("qualification") + private int qualification; + + public CityQualification() { + } + + public CityQualification(String subjectName, int qualification) { + this.subjectName = subjectName; + this.qualification = qualification; + } + + public String getSubjectName() { + return subjectName; + } + + public void setSubjectName(String subjectName) { + this.subjectName = subjectName; + } + + public int getQualification() { + return qualification; + } + + public void setQualification(int qualification) { + this.qualification = qualification; + } + + @Override + public String toString() { + return "CityQualification{" + + "subjectName='" + subjectName + '\'' + + ", qualification=" + qualification + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/IdentifierQualification.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/IdentifierQualification.java new file mode 100644 index 0000000000..78de94190a --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/IdentifierQualification.java @@ -0,0 +1,44 @@ +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class IdentifierQualification { + + @JsonProperty("subject_name") + private String subjectName; + + @JsonProperty("qualification") + private int qualification; + + public IdentifierQualification() { + } + + public IdentifierQualification(String subjectName, int qualification) { + this.subjectName = subjectName; + this.qualification = qualification; + } + + public String getSubjectName() { + return subjectName; + } + + public void setSubjectName(String subjectName) { + this.subjectName = subjectName; + } + + public int getQualification() { + return qualification; + } + + public void setQualification(int qualification) { + this.qualification = qualification; + } + + @Override + public String toString() { + return "IdentifierQualification{" + + "subjectName='" + subjectName + '\'' + + ", qualification=" + qualification + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Qualification.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Qualification.java new file mode 100644 index 0000000000..b080d603ad --- /dev/null +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Qualification.java @@ -0,0 +1,60 @@ +/* + * Copyright 2022 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.references.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class Qualification { + + @JsonProperty("subject_name") + private String subjectName; + + @JsonProperty("qualification") + private int qualification; + + public Qualification() { + } + + public Qualification(String subjectName, int qualification) { + this.subjectName = subjectName; + this.qualification = qualification; + } + + public String getSubjectName() { + return subjectName; + } + + public void setSubjectName(String subjectName) { + this.subjectName = subjectName; + } + + public int getQualification() { + return qualification; + } + + public void setQualification(int qualification) { + this.qualification = qualification; + } + + @Override + public String toString() { + return "Qualification{" + + "subjectName='" + subjectName + '\'' + + ", qualification=" + qualification + + '}'; + } +} diff --git a/examples/serdes-with-references/src/main/resources/Exchange.avsc b/examples/serdes-with-references/src/main/resources/Exchange.avsc new file mode 100644 index 0000000000..35e61497f0 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/Exchange.avsc @@ -0,0 +1,6 @@ +{ + "namespace": "com.kubetrade.schema.common", + "type": "enum", + "name": "Exchange", + "symbols" : ["GEMINI"] +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/TradeKey.avsc b/examples/serdes-with-references/src/main/resources/TradeKey.avsc new file mode 100644 index 0000000000..681d74821f --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/TradeKey.avsc @@ -0,0 +1,15 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeKey", + "fields": [ + { + "name": "exchange", + "type": "com.kubetrade.schema.common.Exchange" + }, + { + "name": "key", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/TradeRaw.avsc b/examples/serdes-with-references/src/main/resources/TradeRaw.avsc new file mode 100644 index 0000000000..41d59ae794 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/TradeRaw.avsc @@ -0,0 +1,19 @@ +{ + "namespace": "com.kubetrade.schema.trade", + "type": "record", + "name": "TradeRaw", + "fields": [ + { + "name": "tradeKey", + "type": "com.kubetrade.schema.trade.TradeKey" + }, + { + "name": "symbol", + "type": "string" + }, + { + "name": "payload", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/citizen.json b/examples/serdes-with-references/src/main/resources/citizen.json new file mode 100644 index 0000000000..6e4e2000ca --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/citizen.json @@ -0,0 +1,24 @@ +{ + "$id": "https://example.com/citizen.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Citizen", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The citizen's first name." + }, + "lastName": { + "type": "string", + "description": "The citizen's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + }, + "city": { + "$ref": "city.json" + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/city.json b/examples/serdes-with-references/src/main/resources/city.json new file mode 100644 index 0000000000..2bb1311a27 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/city.json @@ -0,0 +1,17 @@ +{ + "$id": "https://example.com/city.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "City", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The city's name." + }, + "zipCode": { + "type": "integer", + "description": "The zip code.", + "minimum": 0 + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/schema/sample/mode.proto b/examples/serdes-with-references/src/main/resources/schema/sample/mode.proto new file mode 100644 index 0000000000..e47e8760c9 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/schema/sample/mode.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; +package sample; +option java_package = "io.api.sample"; +option java_multiple_files = true; + +enum Mode { + + MODE_UNKNOWN = 0; //default unset value + + RAW = 1; //no assumption is made on the nature of the data, leading to less optimization in data delivery + + MERGE = 2; //an item represents a row in a table. Real-time updates to that item are used to update the contents of the cells (fields) for that row + + DISTINCT = 3; //an item represents a list of events. Real-time updates tothat item are used to add lines to that list (where each line is made up of fields) + + COMMAND = 4; //an item represents a full table. Real-time updates tothat item are used to change the contents of that table, by adding rows, removing rows, andupdating cells + +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/schema/sample/table_info.proto b/examples/serdes-with-references/src/main/resources/schema/sample/table_info.proto new file mode 100644 index 0000000000..f6ee3fbcdf --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/schema/sample/table_info.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; +package sample; +option java_package = "io.api.sample"; +option java_multiple_files = true; + +import "sample/mode.proto"; + +message TableInfo { + + int32 winIndex = 1; + + Mode mode = 2; + + int32 min = 3; + + int32 max = 4; + + string id = 5; + + string dataAdapter = 6; + + string schema = 7; + + string selector = 8; + + string subscription_id = 9; +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/schema/sample/table_notification.proto b/examples/serdes-with-references/src/main/resources/schema/sample/table_notification.proto new file mode 100644 index 0000000000..f38f1b02ad --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/schema/sample/table_notification.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package sample; +option java_package = "io.api.sample"; +option java_multiple_files = true; + +import "google/protobuf/timestamp.proto"; +import "sample/table_info.proto"; +import "sample/table_notification_type.proto"; + +message TableNotification { + + google.protobuf.Timestamp timestamp = 1; + + string user = 2; + + string session_id = 3; + + TableNotificationType table_notification_type = 4; + + TableInfo table_info = 5; + +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/schema/sample/table_notification_type.proto b/examples/serdes-with-references/src/main/resources/schema/sample/table_notification_type.proto new file mode 100644 index 0000000000..fa08d82a0c --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/schema/sample/table_notification_type.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +package sample; +option java_package = "io.api.sample"; +option java_multiple_files = true; + +enum TableNotificationType { + + TABLE_NOTIFICATION_TYPE_UNKNOWN = 0; + + NEW = 1; + + CLOSE = 2; + +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/citizen.json b/examples/serdes-with-references/src/main/resources/serde/json/citizen.json new file mode 100644 index 0000000000..daa67f08e4 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/citizen.json @@ -0,0 +1,36 @@ +{ + "$id": "https://example.com/citizen.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Citizen", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The citizen's first name." + }, + "lastName": { + "type": "string", + "description": "The citizen's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + }, + "city": { + "$ref": "types/city/city.json" + }, + "identifier": { + "$ref": "types/identifier/citizenIdentifier.json" + }, + "qualifications": { + "type": "array", + "items": { + "$ref": "qualification.json" + } + } + }, + "required": [ + "city" + ] +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/qualification.json b/examples/serdes-with-references/src/main/resources/serde/json/qualification.json new file mode 100644 index 0000000000..bbe690e903 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/qualification.json @@ -0,0 +1,17 @@ +{ + "$id": "https://example.com/qualification.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Qualification", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The subject's name" + }, + "qualification": { + "type": "integer", + "description": "The qualification.", + "minimum": 0 + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/sample.address.json b/examples/serdes-with-references/src/main/resources/serde/json/sample.address.json new file mode 100644 index 0000000000..413cac5c1d --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/sample.address.json @@ -0,0 +1,100 @@ +{ + "$schema": "http://json-schema.org/draft/2019-09/schema", + "$id": "https://example.com/sample.address.json", + "additionalProperties": false, + "type": "object", + "required": [ + "effectiveDate" + ], + "properties": { + "geoDemographicGroupCd": { + "description": "Geo-demographic group based on zipcode/postcode. This data is usually available from marketing systems like Mosaic. Acorn. etc.", + "type": "string", + "maxLength": 50 + }, + "zipPostcode": { + "description": "Zip/postal code", + "type": "string", + "maxLength": 50 + }, + "city": { + "description": "Identifies the city", + "type": "string", + "maxLength": 100 + }, + "stateProvinceCd": { + "description": "Unique code for the state or province. Can be an organization's internal spelling or a standard abbreviation. as long as they are consistent.", + "type": "string", + "maxLength": 50 + }, + "countryCd": { + "description": "Unique identifier for the country of the address. ", + "type": "string", + "maxLength": 50 + }, + "isStructured": { + "description": "Whether the addresses of individual components are identified (eg: Street. Apt.#. etc.) - set to 1. If not (only Address_Line_1 to 5) then set to 0.", + "type": "boolean" + }, + "poBox": { + "description": "Post office box number.", + "type": "string", + "maxLength": 25 + }, + "apartmentSuiteNumber": { + "description": "If the street address is an apartment building or business complex - the individual apartment number", + "type": "string", + "maxLength": 50 + }, + "street": { + "description": "Street name", + "type": "string", + "maxLength": 255 + }, + "isPrimary": { + "description": "Whether this address is the primary address for the corresponding Entity_Id of type Entity_Type_Cd (e.g. Party) for the Tenant (Tenant_Cd) and for the specified address type (Address_Relation_Type_Cd). Only one address per Entity. Tenant. and address type can be the primary. 1= True 0 = False", + "type": "boolean" + }, + "addressLine1": { + "description": "When the source address is unformatted and not in specific fields such as street. city. zip. etc. This is the first line of the address. Note that all party addresses must either be unformatted or formatted.", + "type": "string", + "maxLength": 255 + }, + "buildingNumber": { + "description": "House or building number of the street address (residential home. apartment building. commercial).", + "type": "string", + "maxLength": 50 + }, + "addressLine2": { + "description": "When the source address is unformatted and not in specific fields such as street. city. zip. etc. This is the second line of the address.", + "type": "string", + "maxLength": 255 + }, + "addressLine3": { + "description": "When the source address is unformatted and not in specific fields such as street. city. zip. etc. This is the third line of the address.", + "type": "string", + "maxLength": 255 + }, + "addressLine4": { + "description": "When the source address is unformatted and not in specific fields such as street. city. zip. etc. This is the fourth line of the address.", + "type": "string", + "maxLength": 255 + }, + "addressLine5": { + "description": "When the source address is unformatted and not in specific fields such as street. city. zip. etc. This is the fifth line of the address.", + "type": "string", + "maxLength": 255 + }, + "effectiveDate": { + "description": "Indicates when the entity started using this address.", + "type": "string", + "format": "date-time" + }, + "expirationDate": { + "description": "Indicates when the entity stopped using this address.", + "type": "string", + "format": "date-time" + } + } +} + diff --git a/examples/serdes-with-references/src/main/resources/serde/json/types/city/city.json b/examples/serdes-with-references/src/main/resources/serde/json/types/city/city.json new file mode 100644 index 0000000000..66a1105c0a --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/types/city/city.json @@ -0,0 +1,20 @@ +{ + "$id": "https://example.com/types/city/city.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "City", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The city's name." + }, + "zipCode": { + "type": "integer", + "description": "The zip code.", + "minimum": 0 + }, + "qualification": { + "$ref": "qualification.json" + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/types/city/qualification.json b/examples/serdes-with-references/src/main/resources/serde/json/types/city/qualification.json new file mode 100644 index 0000000000..4f19d81a31 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/types/city/qualification.json @@ -0,0 +1,17 @@ +{ + "$id": "https://example.com/types/city/qualification.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Qualification", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The subject's name" + }, + "qualification": { + "type": "integer", + "description": "The city qualification", + "minimum": 10 + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/citizenIdentifier.json b/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/citizenIdentifier.json new file mode 100644 index 0000000000..0c4677f84a --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/citizenIdentifier.json @@ -0,0 +1,16 @@ +{ + "$id": "https://example.com/types/identifier/citizenIdentifier.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Identifier", + "type": "object", + "properties": { + "identifier": { + "type": "integer", + "description": "The citizen identifier.", + "minimum": 0 + }, + "qualification": { + "$ref": "qualification.json" + } + } +} \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/qualification.json b/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/qualification.json new file mode 100644 index 0000000000..931557b9d1 --- /dev/null +++ b/examples/serdes-with-references/src/main/resources/serde/json/types/identifier/qualification.json @@ -0,0 +1,17 @@ +{ + "$id": "https://example.com/types/identifier/qualification.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Qualification", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The subject's name" + }, + "qualification": { + "type": "integer", + "description": "The identifier qualification", + "minimum": 20 + } + } +} \ No newline at end of file diff --git a/examples/simple-avro-downstream/README.md b/examples/simple-avro-downstream/README.md new file mode 100644 index 0000000000..32e4254c98 --- /dev/null +++ b/examples/simple-avro-downstream/README.md @@ -0,0 +1,17 @@ +# Apicurio Rest Client example application using your RHOSR instance. + +1. Create RHOSR Managed Service instance on cloud.redhat.com and store your instance api url. + +2. Create associated Service Account, save client Id and Client Secret. + +3. Ensure your service account has at least, manager permissions on your RHOSR instance. + +4. Create or use an existing instance of Openshift Streams for Apache Kafka. Get the bootstraps servers for that instance. + +5. Create a topic with the name SimpleAvroExample on that Openshift Streams for Apache Kafka instance. + +6. Ensure that the previously created service account has permissions on that Kafka instance topic for producing and consuming from that topic. + +7. Set the environment variables SERVERS, AUTH_CLIENT_ID, AUTH_CLIENT_SECRET, AUTH_TOKEN_URL and REGISTRY_URL. + +8. Execute the java main SimpleAvroExample on this module, it will produce and consume 5 messages, creating and enforcing a schema during the way, proving the functioning of the service with a realistic application. diff --git a/examples/simple-avro-downstream/pom.xml b/examples/simple-avro-downstream/pom.xml new file mode 100644 index 0000000000..607f7c55f7 --- /dev/null +++ b/examples/simple-avro-downstream/pom.xml @@ -0,0 +1,40 @@ + + + + apicurio-registry-examples + io.apicurio + 2.6.0-SNAPSHOT + + 4.0.0 + + apicurio-registry-examples-simple-avro-downstream + jar + + + + 2.2.1.Final + 11 + 11 + + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.9.0 + + + + \ No newline at end of file diff --git a/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java b/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java new file mode 100644 index 0000000000..a5ea20fdff --- /dev/null +++ b/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java @@ -0,0 +1,210 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package avro; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type. The following aspects are demonstrated: + * + *

    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ *

+ * Pre-requisites: + * + *

    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class SimpleAvroExample { + + private static final String TOPIC_NAME = SimpleAvroExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + + public static void main(String[] args) throws Exception { + + var registryUrl = System.getenv("REGISTRY_URL"); + var bootstrapServers = System.getenv("SERVERS"); + + System.out.println("Starting example " + SimpleAvroExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(registryUrl, bootstrapServers); + // Produce 5 messages. + int producedMessages = 0; + try { + Schema schema = new Schema.Parser().parse(SCHEMA); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, SUBJECT_NAME, + record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(registryUrl, bootstrapServers); + + // Subscribe to the topic + // Consume the 5 messages. + try (consumer) { + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date( + (long) value.get("Time"))); + }); + } + } + + System.out.println("Done (success)."); + + //Required due to a bug in the version of registry libraries used. Once the new version is released, we'll be able to remove this. + System.exit(0); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer(String registryURL, String bootstrapServers) { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, registryURL); + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurity(props); + + // Create the Kafka producer + return new KafkaProducer<>(props); + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer(String registryURL, + String bootstrapServers) { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, registryURL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurity(props); + + // Create the Kafka Consumer + return new KafkaConsumer<>(props); + } + + private static void configureSecurity(Properties props) { + final String tokenEndpoint = System.getenv("AUTH_TOKEN_ENDPOINT"); + final String authClient = System.getenv("AUTH_CLIENT_ID"); + final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } +} \ No newline at end of file diff --git a/examples/simple-avro-maven/pom.xml b/examples/simple-avro-maven/pom.xml new file mode 100644 index 0000000000..c3be5963b6 --- /dev/null +++ b/examples/simple-avro-maven/pom.xml @@ -0,0 +1,72 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-simple-avro-maven + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + javax.ws.rs + javax.ws.rs-api + 2.1.1 + compile + + + + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio-registry.version} + + + register-artifact + + register + + + http://localhost:8080/apis/registry/v2 + + + default + SimpleAvroMavenExample-value + AVRO + ${project.basedir}/src/main/resources/schemas/greeting.avsc + + + + + + + + + diff --git a/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java b/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java new file mode 100644 index 0000000000..499508d137 --- /dev/null +++ b/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java @@ -0,0 +1,251 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.avro.maven; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.client.exception.NotFoundException; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type and the Schema pre-registered via a Maven plugin. + * The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Pre-register the Avro schema in the registry via the Maven plugin
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Schema is registered by executing "mvn io.apicurio:apicurio-registry-maven-plugin:register@register-artifact"
  • + *
+ * + * Note that this application will fail if the above maven command is not run first, since + * the schema will not be present in the registry. + * + * @author eric.wittmann@gmail.com + */ +public class SimpleAvroMavenExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = SimpleAvroMavenExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + SimpleAvroMavenExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + String artifactId = topicName + "-value"; + + // Get the schema from the registry so we can use it to create GenericData.Records later + RegistryClient client = RegistryClientFactory.create(REGISTRY_URL); + String schemaData = null; + try (InputStream latestArtifact = client.getLatestArtifact("default", artifactId)) { + schemaData = toString(latestArtifact); + } catch (NotFoundException e) { + System.err.println("Schema not registered in registry. Before running this example, please do:"); + System.err.println(" mvn io.apicurio:apicurio-registry-maven-plugin:register@register-artifact"); + System.exit(1); + + } + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + Schema schema = new Schema.Parser().parse(schemaData); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Get an existing schema - auto-register the schema if not found. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + /** + * Reads the entire contents of the input stream as a string. + * @param data + */ + private static String toString(InputStream data) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buff = new byte[64]; + int count; + while ((count = data.read(buff)) != -1) { + baos.write(buff, 0, count); + } + return new String(baos.toByteArray(), StandardCharsets.UTF_8); + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } + +} diff --git a/examples/simple-avro-maven/src/main/resources/schemas/greeting.avsc b/examples/simple-avro-maven/src/main/resources/schemas/greeting.avsc new file mode 100644 index 0000000000..6bc54418e9 --- /dev/null +++ b/examples/simple-avro-maven/src/main/resources/schemas/greeting.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "name": "Greeting", + "fields": [{ + "name": "Message", + "type": "string" + }, { + "name": "Time", + "type": "long" + }] +} \ No newline at end of file diff --git a/examples/simple-avro/pom.xml b/examples/simple-avro/pom.xml new file mode 100644 index 0000000000..2b63ed6b79 --- /dev/null +++ b/examples/simple-avro/pom.xml @@ -0,0 +1,33 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-simple-avro + jar + + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + diff --git a/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java b/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java new file mode 100644 index 0000000000..7844fe9165 --- /dev/null +++ b/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java @@ -0,0 +1,210 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.avro; + +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Avro as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
+ *

+ * Pre-requisites: + * + *

    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class SimpleAvroExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = SimpleAvroExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; + + + public static final void main(String[] args) throws Exception { + System.out.println("Starting example " + SimpleAvroExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + Schema schema = new Schema.Parser().parse(SCHEMA); + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Use the schema to create a record + GenericRecord record = new GenericData.Record(schema); + Date now = new Date(); + String message = "Hello (" + producedMessages++ + ")!"; + record.put("Message", message); + record.put("Time", now.getTime()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Avro + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Avro + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/simple-json/pom.xml b/examples/simple-json/pom.xml new file mode 100644 index 0000000000..56b9693bd0 --- /dev/null +++ b/examples/simple-json/pom.xml @@ -0,0 +1,38 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-simple-json + jar + + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + ${apicurio-registry.version} + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + diff --git a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java new file mode 100644 index 0000000000..e5dea1f388 --- /dev/null +++ b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java @@ -0,0 +1,61 @@ +/* + * Copyright 2020 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +/** + * @author eric.wittmann@gmail.com + */ +public class MessageBean { + + private String message; + private long time; + + /** + * Constructor. + */ + public MessageBean() { + } + + /** + * @return the message + */ + public String getMessage() { + return message; + } + + /** + * @param message the message to set + */ + public void setMessage(String message) { + this.message = message; + } + + /** + * @return the time + */ + public long getTime() { + return time; + } + + /** + * @param time the time to set + */ + public void setTime(long time) { + this.time = time; + } + +} diff --git a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java new file mode 100644 index 0000000000..f86cb38dc3 --- /dev/null +++ b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.v2.beans.IfExists; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaDeserializer; +import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaSerializer; +import io.apicurio.registry.types.ArtifactType; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with JSON as the serialization type (and JSON Schema for validation). Because JSON + * Schema is only used for validation (not actual serialization), it can be enabled and disabled + * without affecting the functionality of the serializers and deserializers. However, if + * validation is disabled, then incorrect data could be consumed incorrectly. + * + * The following aspects are demonstrated: + * + *
    + *
  1. Register the JSON Schema in the registry
  2. + *
  3. Configuring a Kafka Serializer for use with Apicurio Registry
  4. + *
  5. Configuring a Kafka Deserializer for use with Apicurio Registry
  6. + *
  7. Data sent as a MessageBean
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class SimpleJsonSchemaExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = SimpleJsonSchemaExample.class.getSimpleName(); + private static final String SUBJECT_NAME = "Greeting"; + public static final String SCHEMA = "{" + + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + + " \"required\": [" + + " \"message\"," + + " \"time\"" + + " ]," + + " \"type\": \"object\"," + + " \"properties\": {" + + " \"message\": {" + + " \"description\": \"\"," + + " \"type\": \"string\"" + + " }," + + " \"time\": {" + + " \"description\": \"\"," + + " \"type\": \"number\"" + + " }" + + " }" + + "}"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + SimpleJsonSchemaExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String subjectName = SUBJECT_NAME; + + // Register the schema with the registry (only if it is not already registered) + String artifactId = TOPIC_NAME + "-value"; // use the topic name as the artifactId because we're going to map topic name to artifactId later on. + RegistryClient client = RegistryClientFactory.create(REGISTRY_URL); + client.createArtifact("default", artifactId, ArtifactType.JSON, IfExists.RETURN_OR_UPDATE, new ByteArrayInputStream(SCHEMA.getBytes(StandardCharsets.UTF_8))); + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 5 messages. + int producedMessages = 0; + try { + System.out.println("Producing (5) messages."); + for (int idx = 0; idx < 5; idx++) { + // Create the message to send + MessageBean message = new MessageBean(); + message.setMessage("Hello (" + producedMessages++ + ")!"); + message.setTime(System.currentTimeMillis()); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, message); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 5 messages. + try { + int messageCount = 0; + System.out.println("Consuming (5) messages."); + while (messageCount < 5) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + MessageBean msg = record.value(); + System.out.println("Consumed a message: " + msg.getMessage() + " @ " + new Date(msg.getTime())); + }); + } + } finally { + consumer.close(); + } + + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for JSON Schema + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSchemaKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.FALSE); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for JSON Schema + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSchemaKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + // Enable validation in the deserializer to ensure that the data we receive is valid. + props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, Boolean.TRUE); + + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/simple-protobuf/pom.xml b/examples/simple-protobuf/pom.xml new file mode 100644 index 0000000000..a33e92a6f3 --- /dev/null +++ b/examples/simple-protobuf/pom.xml @@ -0,0 +1,59 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.5.12-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-simple-protobuf + jar + + + 0.6.1 + + + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.8.1 + + + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + + compile + + + + + + + + + + diff --git a/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java b/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java new file mode 100644 index 0000000000..8936115a09 --- /dev/null +++ b/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java @@ -0,0 +1,229 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.protobuf; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; + +import com.google.protobuf.DynamicMessage; + +import io.apicurio.registry.examples.AddressBookProtos; +import io.apicurio.registry.examples.AddressBookProtos.AddressBook; +import io.apicurio.registry.examples.AddressBookProtos.Person; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaDeserializer; +import io.apicurio.registry.serde.protobuf.ProtobufKafkaSerializer; +import io.apicurio.registry.utils.IoUtil; + +/** + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe + * scenario with Protobuf as the serialization type. The following aspects are demonstrated: + * + *
    + *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. + *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. + *
  5. Auto-register the Protobuf schema in the registry (registered by the producer)
  6. + *
  7. Data sent as a custom java bean and received as a generic DynamicMessage
  8. + *
+ * + * Pre-requisites: + * + *
    + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class SimpleProtobufExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String SERVERS = "localhost:9092"; + private static final String TOPIC_NAME = SimpleProtobufExample.class.getSimpleName(); + private static final String SCHEMA_NAME = "AddressBook"; + + + public static final void main(String [] args) throws Exception { + System.out.println("Starting example " + SimpleProtobufExample.class.getSimpleName()); + String topicName = TOPIC_NAME; + String key = SCHEMA_NAME; + + // Create the producer. + Producer producer = createKafkaProducer(); + // Produce 2 messages. + try { + System.out.println("Producing (2) messages."); + for (int idx = 0; idx < 2; idx++) { + + AddressBookProtos.AddressBook book = AddressBook.newBuilder() + .addPeople(Person.newBuilder() + .setEmail("aa@bb.com") + .setId(1) + .setName("aa") + .build()) + .addPeople(Person.newBuilder() + .setEmail("bb@bb.com") + .setId(2) + .setName("bb") + .build()) + .build(); + + // Send/produce the message on the Kafka Producer + ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, book); + producer.send(producedRecord); + + Thread.sleep(100); + } + System.out.println("Messages successfully produced."); + } finally { + System.out.println("Closing the producer."); + producer.flush(); + producer.close(); + } + + // Create the consumer + System.out.println("Creating the consumer."); + KafkaConsumer consumer = createKafkaConsumer(); + + // Subscribe to the topic + System.out.println("Subscribing to topic " + topicName); + consumer.subscribe(Collections.singletonList(topicName)); + + // Consume the 2 messages. + try { + int messageCount = 0; + System.out.println("Consuming (2) messages."); + while (messageCount < 2) { + final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); + messageCount += records.count(); + if (records.count() == 0) { + // Do nothing - no messages waiting. + System.out.println("No messages waiting..."); + } else records.forEach(record -> { + DynamicMessage value = record.value(); + System.out.println("Consumed a message: " + value.toString()); + }); + } + } finally { + consumer.close(); + } + + RegistryClient client = RegistryClientFactory.create(REGISTRY_URL); + System.out.println("The artifact created in Apicurio Registry is: "); + //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value + System.out.println(IoUtil.toString(client.getArtifactVersion("default", topicName + "-value", "1"))); + System.out.println(); + System.out.println("Done (success)."); + } + + /** + * Creates the Kafka producer. + */ + private static Producer createKafkaProducer() { + Properties props = new Properties(); + + // Configure kafka settings + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-" + TOPIC_NAME); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + // Use the Apicurio Registry provided Kafka Serializer for Protobuf + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); + + // Register the artifact if not found in the registry. + props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka producer + Producer producer = new KafkaProducer<>(props); + return producer; + } + + /** + * Creates the Kafka consumer. + */ + private static KafkaConsumer createKafkaConsumer() { + Properties props = new Properties(); + + // Configure Kafka + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS); + props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "Consumer-" + TOPIC_NAME); + props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); + props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); + props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + // Use the Apicurio Registry provided Kafka Deserializer for Protobuf + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + + // Configure Service Registry location + props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); + + //this configuration property forces the deserializer to return the generic DynamicMessage + props.putIfAbsent(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, DynamicMessage.class.getName()); + + // No other configuration needed for the deserializer, because the globalId of the schema + // the deserializer should use is sent as part of the payload. So the deserializer simply + // extracts that globalId and uses it to look up the Schema from the registry. + + //Just if security values are present, then we configure them. + configureSecurityIfPresent(props); + + // Create the Kafka Consumer + KafkaConsumer consumer = new KafkaConsumer<>(props); + return consumer; + } + + private static void configureSecurityIfPresent(Properties props) { + final String tokenEndpoint = System.getenv(SerdeConfig.AUTH_TOKEN_ENDPOINT); + if (tokenEndpoint != null) { + + final String authClient = System.getenv(SerdeConfig.AUTH_CLIENT_ID); + final String authSecret = System.getenv(SerdeConfig.AUTH_CLIENT_SECRET); + + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_SECRET, authSecret); + props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); + props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); + props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent("security.protocol", "SASL_SSL"); + + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" "+ + " oauth.client.secret=\"%s\" "+ + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + } + } +} diff --git a/examples/simple-protobuf/src/main/proto/person.proto b/examples/simple-protobuf/src/main/proto/person.proto new file mode 100644 index 0000000000..ba1ae1e85c --- /dev/null +++ b/examples/simple-protobuf/src/main/proto/person.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package tutorial; + +import "google/protobuf/timestamp.proto"; + +option java_package = "io.apicurio.registry.examples"; +option java_outer_classname = "AddressBookProtos"; + +message Person { + string name = 1; + int32 id = 2; // Unique ID number for this person. + string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + string number = 1; + PhoneType type = 2; + } + + repeated PhoneNumber phones = 4; + + google.protobuf.Timestamp last_updated = 5; +} + +message AddressBook { + repeated Person people = 1; +} \ No newline at end of file diff --git a/examples/simple-validation/pom.xml b/examples/simple-validation/pom.xml new file mode 100644 index 0000000000..b3a933c9d8 --- /dev/null +++ b/examples/simple-validation/pom.xml @@ -0,0 +1,35 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../pom.xml + + + apicurio-registry-examples-simple-validation + jar + + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + + + + org.everit.json + org.everit.json.schema + 1.3.0 + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + + + \ No newline at end of file diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java new file mode 100644 index 0000000000..79862caf0b --- /dev/null +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java @@ -0,0 +1,69 @@ +/* + * Copyright 2020 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +/** + * @author eric.wittmann@gmail.com + */ +public class MessageBean { + + private String message; + private long time; + + /** + * Constructor. + */ + public MessageBean() { + } + + /** + * @return the message + */ + public String getMessage() { + return message; + } + + /** + * @param message the message to set + */ + public void setMessage(String message) { + this.message = message; + } + + /** + * @return the time + */ + public long getTime() { + return time; + } + + /** + * @param time the time to set + */ + public void setTime(long time) { + this.time = time; + } + + /** + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "MessageBean [message=" + message + ", time=" + time + "]"; + } + +} diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java new file mode 100644 index 0000000000..89b0497e71 --- /dev/null +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java @@ -0,0 +1,76 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +import java.io.InputStream; +import java.util.Collections; + +import org.json.JSONObject; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; + +import io.apicurio.rest.client.JdkHttpClientProvider; +import io.apicurio.rest.client.error.ApicurioRestClientException; +import io.apicurio.rest.client.error.RestClientErrorHandler; +import io.apicurio.rest.client.request.Operation; +import io.apicurio.rest.client.request.Request; +import io.apicurio.rest.client.request.Request.RequestBuilder; +import io.apicurio.rest.client.spi.ApicurioHttpClient; + +/** + * @author eric.wittmann@gmail.com + */ +@SuppressWarnings("unchecked") +public class MessagePublisher { + private static final ApicurioHttpClient httpClient; + static { + httpClient = new JdkHttpClientProvider().create("http://localhost:12345", Collections.EMPTY_MAP, null, new RestClientErrorHandler() { + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException parseInputSerializingError(JsonProcessingException ex) { + return new ApicurioRestClientException(ex.getMessage()) {}; + } + + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException parseError(Exception ex) { + return new ApicurioRestClientException(ex.getMessage()) {}; + } + + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException handleErrorResponse(InputStream body, int statusCode) { + return new ApicurioRestClientException("Error with code: "+ statusCode) {}; + } + }); + } + + /** + * @param message + */ + @SuppressWarnings({ "rawtypes" }) + public void publishMessage(MessageBean message) { + JSONObject messageObj = new JSONObject(message); + String data = messageObj.toString(); + Request request = new RequestBuilder().operation(Operation.POST).data(data).responseType(new TypeReference() {}).build(); + httpClient.sendRequest(request); + + System.out.println("Produced message: " + message); + } + +} diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java new file mode 100644 index 0000000000..f81c766599 --- /dev/null +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java @@ -0,0 +1,68 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +import java.io.IOException; +import java.io.InputStream; + +import org.everit.json.schema.Schema; +import org.everit.json.schema.ValidationException; +import org.everit.json.schema.loader.SchemaLoader; +import org.json.JSONObject; +import org.json.JSONTokener; + +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; + +/** + * @author eric.wittmann@gmail.com + */ +public class MessageValidator { + + private final String group; + private final String artifactId; + private final RegistryClient client; + + /** + * Constructor. + * @param registryUrl + * @param group + * @param artifactId + */ + public MessageValidator(String registryUrl, String group, String artifactId) { + this.group = group; + this.artifactId = artifactId; + + this.client = RegistryClientFactory.create(registryUrl); + } + + /** + * @param message + */ + public void validate(MessageBean message) throws IOException, ValidationException { + JSONObject jsonSchema; + try (InputStream schemaIS = client.getLatestArtifact(group, artifactId)) { + jsonSchema = new JSONObject(new JSONTokener(schemaIS)); + } + + JSONObject jsonSubject = new JSONObject(message); + + Schema schema = SchemaLoader.load(jsonSchema); + schema.validate(jsonSubject); + } + +} diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java new file mode 100644 index 0000000000..b07a876877 --- /dev/null +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java @@ -0,0 +1,111 @@ +/* + * Copyright 2021 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpServer; + +import io.apicurio.registry.utils.IoUtil; + +/** + * @author eric.wittmann@gmail.com + */ +public class SimpleBroker { + + private static final int port = 12345; + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private List messages = Collections.synchronizedList(new LinkedList()); + private int getCursor = 0; + + public void start() { + try { + HttpServer server = HttpServer.create(new InetSocketAddress(port), 0); + + server.createContext("/", httpExchange -> { + if (httpExchange.getRequestMethod().equalsIgnoreCase("GET")) { + handleGet(httpExchange); + } else if (httpExchange.getRequestMethod().equalsIgnoreCase("POST")) { + handlePost(httpExchange); + } else { + handleDefault(httpExchange); + } + }); + + server.start(); + } catch (Throwable tr) { + tr.printStackTrace(); + } + } + + /** + * @param httpExchange + */ + private void handleGet(HttpExchange httpExchange) throws IOException { + if (getCursor < this.messages.size()) { + byte [] response = this.messages.get(getCursor++).getBytes(StandardCharsets.UTF_8); + + httpExchange.getResponseHeaders().add("Content-Type", "application/json; charset=UTF-8"); + httpExchange.sendResponseHeaders(200, response.length); + + OutputStream out = httpExchange.getResponseBody(); + out.write(response); + out.close(); + } else { + httpExchange.sendResponseHeaders(404, 0); + httpExchange.close(); + } + } + + /** + * @param httpExchange + */ + private void handlePost(HttpExchange httpExchange) throws IOException { + try (InputStream bodyIS = httpExchange.getRequestBody()) { + String message = IoUtil.toString(bodyIS); + this.messages.add(message); + System.out.println("Received message!"); + + httpExchange.sendResponseHeaders(201, 0); + httpExchange.close(); + } + } + + /** + * @param httpExchange + */ + private void handleDefault(HttpExchange httpExchange) throws IOException { + byte response[] = "Operation not supported".getBytes("UTF-8"); + + httpExchange.getResponseHeaders().add("Content-Type", "text/plain; charset=UTF-8"); + httpExchange.sendResponseHeaders(500, response.length); + + OutputStream out = httpExchange.getResponseBody(); + out.write(response); + out.close(); + } + +} diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java new file mode 100644 index 0000000000..22ab6334d4 --- /dev/null +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.examples.simple.json; + +import java.security.SecureRandom; +import java.util.Optional; + +import org.everit.json.schema.ValidationException; + +/** + * This example demonstrates how to integrate with Apicurio Registry when performing client-side validation of + * JSON messages. This example imagines a generic scenario where JSON messages are sent/published to a custom + * messaging system for later consumption. It assumes that the JSON Schema used for validation must already be + * registered. The following aspects are demonstrated: + *
    + *
  1. Fetch the JSON Schema from the registry
  2. + *
  3. Generate and Validate JSON messages
  4. + *
  5. Send validated messages to a messaging system
  6. + *
+ * Pre-requisites: + *
    + *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • JSON schema must be registered at coordinates default/SimpleValidationExample
  • + *
+ * + * @author eric.wittmann@gmail.com + */ +public class SimpleValidationExample { + + private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; + private static final String GROUP = "Examples"; + private static final String ARTIFACT_ID = "MessageType"; + private static final SecureRandom rand = new SecureRandom(); + + + public static final void main(String[] args) throws Exception { + System.out.println("Starting example " + SimpleValidationExample.class.getSimpleName()); + + // Start a mock broker + SimpleBroker broker = new SimpleBroker(); + broker.start(); + + // Some configuration + String registryUrl = Optional.ofNullable(System.getenv("REGISTRY_URL")).orElse(REGISTRY_URL); + String group = Optional.ofNullable(System.getenv("GROUP")).orElse(GROUP); + String artifactId = Optional.ofNullable(System.getenv("ARTIFACT_ID")).orElse(ARTIFACT_ID); + + // Create a message validator and message publisher + MessageValidator validator = new MessageValidator(registryUrl, group, artifactId); + MessagePublisher publisher = new MessagePublisher(); + + // Produce messages in a loop. + boolean done = false; + while (!done) { + // Create a message we want to produce/send + MessageBean message = new MessageBean(); + message.setMessage("Hello! A random integer is: " + rand.nextInt()); + message.setTime(System.currentTimeMillis()); + + try { + // Validate the message before sending it + validator.validate(message); + + // Send the message + publisher.publishMessage(message); + } catch (ValidationException e) { + e.printStackTrace(); + } + + Thread.sleep(5000); + } + + System.out.println("Done (success)."); + } + +} diff --git a/examples/simple-validation/src/main/schemas/message-invalid.json b/examples/simple-validation/src/main/schemas/message-invalid.json new file mode 100644 index 0000000000..92c0c9bae1 --- /dev/null +++ b/examples/simple-validation/src/main/schemas/message-invalid.json @@ -0,0 +1,20 @@ +{ + "$id": "https://example.com/message.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "required": [ + "message", + "time" + ], + "type": "object", + "properties": { + "message": { + "description": "", + "type": "string" + }, + "time": { + "description": "", + "type": "string", + "format": "date-time" + } + } +} \ No newline at end of file diff --git a/examples/simple-validation/src/main/schemas/message.json b/examples/simple-validation/src/main/schemas/message.json new file mode 100644 index 0000000000..e4eef18629 --- /dev/null +++ b/examples/simple-validation/src/main/schemas/message.json @@ -0,0 +1,19 @@ +{ + "$id": "https://example.com/message.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "required": [ + "message", + "time" + ], + "type": "object", + "properties": { + "message": { + "description": "", + "type": "string" + }, + "time": { + "description": "", + "type": "number" + } + } +} \ No newline at end of file diff --git a/examples/simple-validation/src/main/scripts/.gitignore b/examples/simple-validation/src/main/scripts/.gitignore new file mode 100644 index 0000000000..8b191f07fb --- /dev/null +++ b/examples/simple-validation/src/main/scripts/.gitignore @@ -0,0 +1,3 @@ +_schema.json +_message.json + diff --git a/examples/simple-validation/src/main/scripts/consumer.sh b/examples/simple-validation/src/main/scripts/consumer.sh new file mode 100644 index 0000000000..6ad934de9c --- /dev/null +++ b/examples/simple-validation/src/main/scripts/consumer.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +DONE=false + +curl -s http://localhost:8080/apis/registry/v2/groups/Examples/artifacts/MessageType > _schema.json +echo "Schema fetched from registry." +cat _schema.json + +echo "" +echo "---" +echo "Subscribing to broker." +echo "---" + + +while [ "x$DONE" == "xfalse" ] +do + + # Receive a message + curl http://localhost:12345 -s > _message.json + + MESSAGE_SIZE=$(wc -c "_message.json" | awk '{print $1}') + + if [ "x$MESSAGE_SIZE" == "x0" ] + then + continue + else + #json validate --schema-file=_schema.json --document-file=_message.json + echo "Message received and validated." + cat _message.json | jq + fi + + sleep 0.2 + +done diff --git a/examples/tools/kafka-all/Dockerfile b/examples/tools/kafka-all/Dockerfile new file mode 100644 index 0000000000..279fc55e4b --- /dev/null +++ b/examples/tools/kafka-all/Dockerfile @@ -0,0 +1,24 @@ +# docker build -t="apicurio/kafka-all" --rm . +# docker run -it -p 9092:9092 -p 9091:9091 -p 2181:2181 apicurio/kafka-all +# docker run -it -p 8080:8080 apicurio/apicurio-registry-mem:1.3.1.Final +FROM centos:8 + +RUN yum update -y && \ + yum install -y java-1.8.0-openjdk-devel && \ + curl http://mirror.cc.columbia.edu/pub/software/apache/kafka/2.5.0/kafka_2.12-2.5.0.tgz -o /tmp/kafka.tgz && \ + tar xfz /tmp/kafka.tgz -C /usr/local && \ + mv /usr/local/kafka_2.12-2.5.0 /usr/local/kafka + +RUN echo "#!/bin/sh" >> /usr/local/kafka/start_kafka.sh && \ + echo "cd /usr/local/kafka" >> /usr/local/kafka/start_kafka.sh && \ + echo "./bin/zookeeper-server-start.sh config/zookeeper.properties &" >> /usr/local/kafka/start_kafka.sh && \ + echo "sleep 5" >> /usr/local/kafka/start_kafka.sh && \ + echo "./bin/kafka-server-start.sh config/server.properties" >> /usr/local/kafka/start_kafka.sh && \ + chmod 755 /usr/local/kafka/start_kafka.sh + +EXPOSE 9092 +EXPOSE 9091 +EXPOSE 2181 +EXPOSE 2888 + +CMD /usr/local/kafka/start_kafka.sh diff --git a/examples/tools/kafka-compose/kafka-compose.yaml b/examples/tools/kafka-compose/kafka-compose.yaml new file mode 100644 index 0000000000..853bb7040d --- /dev/null +++ b/examples/tools/kafka-compose/kafka-compose.yaml @@ -0,0 +1,32 @@ +--- +version: '3' + +services: + + zookeeper: + image: strimzi/kafka:0.20.1-kafka-2.6.0 + command: [ + "bin/zookeeper-server-start.sh", "config/zookeeper.properties" + ] + ports: + - "2181:2181" + environment: + LOG_DIR: /tmp/logs + + kafka: + image: strimzi/kafka:0.20.1-kafka-2.6.0 + command: [ + "bin/kafka-server-start.sh", "config/server.properties", + "--override", "listeners=PLAINTEXT://0.0.0.0:9092", + "--override", "advertised.listeners=PLAINTEXT://localhost:9092", + "--override", "zookeeper.connect=zookeeper:2181" + ] + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + LOG_DIR: "/tmp/logs" + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092 + KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 \ No newline at end of file diff --git a/examples/tools/kafka-compose/run-kafka.sh b/examples/tools/kafka-compose/run-kafka.sh new file mode 100755 index 0000000000..5561f5cacf --- /dev/null +++ b/examples/tools/kafka-compose/run-kafka.sh @@ -0,0 +1 @@ +docker-compose -f tools/kafka-compose/kafka-compose.yaml up \ No newline at end of file diff --git a/examples/tools/kafkasql-topic-import/pom.xml b/examples/tools/kafkasql-topic-import/pom.xml new file mode 100644 index 0000000000..58d52b7476 --- /dev/null +++ b/examples/tools/kafkasql-topic-import/pom.xml @@ -0,0 +1,94 @@ + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 2.6.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-tools-kafkasql-topic-import + jar + + + + + org.projectlombok + lombok + 1.18.28 + + + + com.fasterxml.jackson.core + jackson-core + 2.15.2 + + + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + + + + org.apache.kafka + kafka-clients + 3.2.3 + + + + com.google.guava + guava + 32.1.3-jre + + + + info.picocli + picocli + 4.7.5 + + + + org.slf4j + slf4j-api + 2.0.9 + + + + org.slf4j + slf4j-simple + 2.0.9 + + + + + + + + maven-assembly-plugin + + + + io.apicurio.registry.tools.kafkasqltopicimport.Main + + + + jar-with-dependencies + + + + + make-assembly + package + + single + + + + + + + + diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java new file mode 100644 index 0000000000..f3958ead25 --- /dev/null +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java @@ -0,0 +1,53 @@ +/* + * Copyright 2020 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.tools.kafkasqltopicimport; + +import lombok.*; + +import java.util.List; + + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@NoArgsConstructor +@AllArgsConstructor +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Envelope { + + private String topic; + + private Integer partition; + + private Long offset; + + private String tstype; + + private Long ts; + + private Long broker; + + private List headers; + + private String key; + + private String payload; +} diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java new file mode 100644 index 0000000000..c3f26f36de --- /dev/null +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java @@ -0,0 +1,131 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.tools.kafkasqltopicimport; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Streams; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.slf4j.simple.SimpleLogger; +import picocli.CommandLine.Command; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.List; +import java.util.Properties; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static picocli.CommandLine.Option; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Command(name = "import", version = "0.1", mixinStandardHelpOptions = true) +public class ImportCommand implements Runnable { + + private static final ObjectMapper mapper = new ObjectMapper(); + + @Option(names = {"-b", "--bootstrap-sever"}, description = "Kafka bootstrap server URL.", + required = true, defaultValue = "localhost:9092") + private String kafkaBootstrapServer; + + @Option(names = {"-f", "--file"}, description = "Path to a kafkasql-journal topic dump file. " + + "Messages must use a JSON envelope and have base64-encoded keys and values.", required = true) + private String dumpFilePath; + + @Option(names = {"-d", "--debug"}, description = "Print debug log messages.", defaultValue = "false") + private boolean debug; + + public void run() { + + if(debug) { + System.setProperty(org.slf4j.simple.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG"); + } else { + System.setProperty(org.slf4j.simple.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "WARN"); + } + + try (Producer producer = createKafkaProducer()) { + + try (BufferedReader br = new BufferedReader(new FileReader(dumpFilePath))) { + String line; + while ((line = br.readLine()) != null) { + var envelope = mapper.readValue(line, Envelope.class); + + if (envelope.getHeaders() == null) { + envelope.setHeaders(List.of()); + } + if (envelope.getHeaders().size() % 2 != 0) { + throw new RuntimeException("Invalid length of the headers field: " + envelope.getHeaders().size()); + } + + var key = envelope.getKey() != null ? Base64.getDecoder().decode(envelope.getKey()) : null; + var value = envelope.getPayload() != null ? Base64.getDecoder().decode(envelope.getPayload()) : null; + + var record = new ProducerRecord<>( + envelope.getTopic(), + envelope.getPartition(), + envelope.getTs(), + key, + value, + Streams.zip( + Streams.zip( + IntStream.range(0, Integer.MAX_VALUE).boxed(), + envelope.getHeaders().stream(), + Tuple::new + ).filter(t -> t.getA() % 2 == 0).map(Tuple::getB), // Even indexes: 0,2,4,... + Streams.zip( + IntStream.range(0, Integer.MAX_VALUE).boxed(), + envelope.getHeaders().stream(), + Tuple::new + ).filter(t -> t.getA() % 2 == 1).map(Tuple::getB), // Odd indexes: 1,3,5,... + (k, v) -> new RecordHeader(k, v.getBytes(StandardCharsets.UTF_8))) + .collect(Collectors.toList()) + ); + producer.send(record); + } + } + + producer.flush(); + System.err.println("Data imported successfully."); + + } catch (Exception ex) { + System.err.println("Data import failed: " + ex.getMessage()); + ex.printStackTrace(System.err); + } + } + + + private Producer createKafkaProducer() { + + Properties props = new Properties(); + + props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServer); + props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "Producer-kafkasql-journal"); + props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); + props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); + + return new KafkaProducer<>(props); + } +} diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Main.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Main.java new file mode 100644 index 0000000000..73f8c257ab --- /dev/null +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Main.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.tools.kafkasqltopicimport; + +import picocli.CommandLine; + +/** + * @author Jakub Senko m@jsenko.net + */ +public class Main { + + public static void main(String[] args) { + + CommandLine cmd = new CommandLine(new ImportCommand()); + int exitCode = cmd.execute(args); + System.exit(exitCode); + } +} diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Tuple.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Tuple.java new file mode 100644 index 0000000000..f3df2f0cba --- /dev/null +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Tuple.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020 Red Hat + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.apicurio.registry.tools.kafkasqltopicimport; + +import lombok.*; + +/** + * @author Jakub Senko m@jsenko.net + */ +@Builder +@NoArgsConstructor +@AllArgsConstructor +@Getter +@Setter +@EqualsAndHashCode +@ToString +public class Tuple { + + private A a; + + private B b; +} diff --git a/examples/tools/run-registry.sh b/examples/tools/run-registry.sh new file mode 100755 index 0000000000..8b4ebe3a0f --- /dev/null +++ b/examples/tools/run-registry.sh @@ -0,0 +1 @@ +docker run -it -p 8080:8080 quay.io/apicurio/apicurio-registry-mem:latest-snapshot \ No newline at end of file diff --git a/pom.xml b/pom.xml index 233a0f3a91..fee9650ea1 100644 --- a/pom.xml +++ b/pom.xml @@ -365,6 +365,11 @@ apicurio-registry-integration-tests ${project.version} + + io.apicurio + apicurio-registry-examples + ${project.version} + io.apicurio apicurio-registry-serde-common @@ -1117,6 +1122,12 @@ integration-tests + + examples + + examples + + skip-auth-test