diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2f2cbdb..276cc9b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -26,10 +26,17 @@ jobs: java-version: 11 - name: Set tag version run: | - echo "VERSION=${GITHUB_REF##*/}" >> $GITHUB_ENV + VERSION=${GITHUB_REF##*/} + echo "VERSION=$VERSION" >> $GITHUB_ENV + PRE_RELEASE=false + if [[ "$VERSION" =~ -rc ]]; then + PRE_RELEASE=true + fi + echo "PRE_RELEASE=$PRE_RELEASE" >> $GITHUB_ENV - name: Defined tag version run: | echo VERSION=$VERSION + echo PRE_RELEASE=$PRE_RELEASE - name: Build artifacts uses: gradle/gradle-build-action@v2.3.3 with: @@ -39,5 +46,6 @@ jobs: with: tag_name: ${{ github.ref }} release_name: Release ${{ github.ref }} + prerelease: ${{ PRE_RELEASE }} artifacts: "build/distributions/*" bodyFile: "RELEASE_CHANGELOG.md" diff --git a/CHANGELOG.md b/CHANGELOG.md index 53e8a13..9d12b47 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ Usage: Change log entries are to be added to the Unreleased section under the appropriate stanza (see below). Each entry should ideally include a tag and -the Github issue reference in the following format: +the GitHub issue reference in the following format: * () \# message @@ -33,9 +33,13 @@ Ref: https://keepachangelog.com/en/1.0.0/ ## Unreleased +### Improvements + +(docs/ci) [PR 32](https://github.com/provenance-io/provenance-abci-listener/pull/31) Update deploy doc and scripts and mark pre-releases + --- -## [1.0.0-rc2](https://github.com/provenance-io/provenance-abci-listener/releases/tag/v1.0.0-rc2) - 2023-02-09 +## [1.0.0-rc2](https://github.com/provenance-io/provenance-abci-listener/releases/tag/1.0.0-rc2) - 2023-02-09 ### Improvements @@ -47,7 +51,7 @@ Ref: https://keepachangelog.com/en/1.0.0/ --- -## [1.0.0-rc1](https://github.com/provenance-io/provenance-abci-listener/releases/tag/v1.0.0-rc1) - 2023-02-08 +## [1.0.0-rc1](https://github.com/provenance-io/provenance-abci-listener/releases/tag/1.0.0-rc1) - 2023-02-08 ### Improvements diff --git a/README.md b/README.md index 019c7ad..ab5661b 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,14 @@ brew install ktlint ``` In order to automatically lint/check for things that can't be autocorrected run: -``` +```shell ktlint -F "src/**/*.kt" ``` + +## Distribution + +To build a distribution on your local environment, run: + +```shell +./gradlew assembleDist +``` \ No newline at end of file diff --git a/docs/deploy.md b/docs/deploy.md index 10411a8..7fbf123 100644 --- a/docs/deploy.md +++ b/docs/deploy.md @@ -2,156 +2,104 @@ - [Overview](#overview) - - [Deploying the Plugin](#deploying-the-plugin) - - [Deployment](#deployment) - - [Configuration](#configuration) - - [Environment variables](#environment-variables) - - [Node Configuration](#node-configuration) + - [Plugin Deployment](#plugin-deployment) + - [Node Configuration](#node-configuration) ## Overview This document outlines steps to deploy release distributions of the plugin. -## Deploying the Plugin - -We've created a script, `scrips/deploy.sh`, to help deploy the plugin into your Provenance node environment. The script needs to run from your node. - -```shell -PLUGIN_VERSION={{ RELEASE_VERSION }} -``` -Release versions can be found [here](https://github.com/provenance-io/provenance-abci-listener/tags). - -### Deployment -```shell -curl --create-dirs -o $PIO_HOME/plugins/deploy.sh \ - https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$PLUGIN_VERSION/scripts/deploy.sh \ - && chmod +x $PIO_HOME/plugins/deploy.sh -``` - -```shell -sh $PIO_HOME/plugins/deploy.sh $PLUGIN_VERSION -``` -Will deploy and extract the plugin to `$PIO_HOME/plugins/provenance-abci-listener-{version}`. - -### Configuration - -```shell -curl --create-dirs -o $PIO_HOME/plugins/provenance-abci-listener-$PLUGIN_VERSION/application.conf \ - https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$PLUGIN_VERSION/src/main/resources/application.conf -``` - -#### Alternatively -```hocon -cat << EOF >> $PIO_HOME/plugins/provenance-abci-listener-$PLUGIN_VERSION/application.conf -# Grpc server config -grpc.server { - addr = localhost - port = 1234 -} - -# Kafka producer config -kafka.producer { - # Assign a topic name and optional prefix where events will be written. - listen-topics { - prefix = "local-" - listen-begin-block = ${?kafka.producer.listen-topics.prefix}"listen-begin-block" - listen-end-block = ${?kafka.producer.listen-topics.prefix}"listen-end-block" - listen-deliver-tx = ${?kafka.producer.listen-topics.prefix}"listen-deliver-tx" - listen-commit = ${?kafka.producer.listen-topics.prefix}"listen-commit" - } - - # Properties defined by org.apache.kafka.clients.producer.ProducerConfig. - # can be defined in this configuration section. - kafka-clients { - bootstrap.servers = "{{ BROKER_ENDPOINT }}" - acks = all - enable.idempotence = true - max.in.flight.requests.per.connection = 1 - linger.ms = 50 - max.request.size = 204857600 - key.serializer = org.apache.kafka.common.serialization.StringSerializer - value.serializer = io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer - schema.registry.url ="http(s?)://{{ SR_ENDPOINT }}" - } -} -EOF -``` - -#### Confluent Cloud Configuration - -```hocon -cat << EOF >> $PIO_HOME/plugins/provenance-abci-listener-$PLUGIN_VERSION/application.conf -# Grpc server config -grpc.server { - addr = localhost - port = 1234 -} - -# Kafka producer config -kafka.producer { - # Assign a topic name and optional prefix where events will be written. - listen-topics { - prefix = "local-" - listen-begin-block = ${?kafka.producer.listen-topics.prefix}"listen-begin-block" - listen-end-block = ${?kafka.producer.listen-topics.prefix}"listen-end-block" - listen-deliver-tx = ${?kafka.producer.listen-topics.prefix}"listen-deliver-tx" - listen-commit = ${?kafka.producer.listen-topics.prefix}"listen-commit" - } - - # Properties defined by org.apache.kafka.clients.producer.ProducerConfig. - # can be defined in this configuration section. - kafka-clients { - bootstrap.servers = "{{ BROKER_ENDPOINT }}" - acks = all - enable.idempotence = true - max.in.flight.requests.per.connection = 1 - linger.ms = 50 - max.request.size = 204857600 - key.serializer = org.apache.kafka.common.serialization.StringSerializer - value.serializer = io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer - - # Required connection configs for Confluent Cloud - ssl.endpoint.identification.algorithm=https - sasl.mechanism=PLAIN - sasl.jaas.config="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"{{ CLOUD_API_KEY }}\" password=\"{{ CLOUD_API_SECRET }}\";" - security.protocol=SASL_SSL - - # Best practice for higher availability in Apache Kafka clients prior to 3.0 - session.timeout.ms=45000 - - request.timeout.ms = 20000 - retry.backoff.ms = 500 - - # Required connection configs for Confluent Cloud Schema Registry - schema.registry.url="https://{{ SR_ENDPOINT }}" - basic.auth.credentials.source=USER_INFO - basic.auth.user.info="{{ SR_API_KEY }}:{{ SR_API_SECRET }}" - } -} -EOF -``` - -### Environment variables - -Let the plugin know about the Kafka configuration settings: -```shell -export PROVENANCE_ABCI_LISTENER_OPTS="-Dconfig.file=$PIO_HOME/plugins/provenance-abci-listener-$PLUGIN_VERSION/application.conf" -``` - -Let the node know where to find the plugin: -```shell -export COSMOS_SDK_ABCI_V1=$PIO_HOME/plugins/provenance-abci-listener-$PLUGIN_VERSION/bin/provenance-abci-listener -``` - -### Node Configuration - -Enable ABCI streaming for your node -```shell -provenanced config set streaming.abci.plugin abci_v1 -``` - -```shell -provenanced start --x-crisis-skip-assert-invariants --log_level=info -``` -Logging level `trace|debug|info|warn|error|fatal|panic` (default "info") +## Deploy Plugin + +Follow the steps below to download, configure and deploy the plugin. + +1. **Specify release** + + ```shell + TAG={{ RELEASE_TAG }} + ``` + Release versions can be found [here](https://github.com/provenance-io/provenance-abci-listener/tags). + + +2. **Download** + + 2.1 - Create directories + + ```shell + mkdir -p $PIO_HOME/plugins + ``` + + 2.2 - Download plugin + + ```shell + curl -s https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$TAG/scripts/deploy.sh | bash -s $TAG + ``` + + 2.3 - Export plugin + + ```shell + export $PIO_HOME/plugins/provenance-abci-listener-$TAG/bin/provenance-abci-listener + ``` + +3. **Configure** + + 3.1 - Self-managed Kafka and Confluent Schema Registry + + ```shell + curl -o $PIO_HOME/plugins/application.conf \ + https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$PLUGIN_VERSION/src/main/resources/application.conf + ``` + + 3.1.1 - Edit configuration and SET the following properties + ```shell + bootstrap.servers + schema.registry.url + ``` + + 3.2 - Confluent Cloud + + ```shell + curl -o $PIO_HOME/plugins/application.conf \ + https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$PLUGIN_VERSION/src/main/resources/ccloud.conf + ``` + + 3.2.2 - Edit configuration and REPLACE with your ccloud values + ```shell + {{ BOOTSTRAP_SERVER }} + {{ CLOUD_API_KEY }} + {{ CLOUD_API_SECRET }} + {{ SR_ENDPOINT }} + {{ SR_API_KEY }} + {{ SR_API_SECRET }} + ``` + + 3.3 Export application config + + ```shell + export PROVENANCE_ABCI_LISTENER_OPTS="-Dconfig.file=$PIO_HOME/plugins/application.conf" + ``` + +## Configure Node + +1. **Enable plugin** + + ```shell + provenanced config set streaming.abci.plugin abci_v1 + ``` + +2. **Enable state change listening** + + ```shell + provenanced config set streaming.abci.keys '["*"]' + ``` + * `'["*"]'` - captures state changes for **all** module stores + * `'["metadata", "attribute", "bank", "gov"[,"..."]]'` - captures state changes for **specific** module stores + + +3. **Start Node** + + ```shell + provenanced start --x-crisis-skip-assert-invariants --log_level=info + ``` + * `trace|debug|info|warn|error|fatal|panic` - log level options (default is `info`) diff --git a/scripts/deploy.sh b/scripts/deploy.sh index bc5359a..1a17bfc 100755 --- a/scripts/deploy.sh +++ b/scripts/deploy.sh @@ -1,7 +1,8 @@ #!/usr/bin/env bash # -# Download plugin distribution and plugin configuration template. +# Download plugin distribution and extract the plugin. +# In addition, validate the md5 checksum of the zip file. # usage() { @@ -24,66 +25,20 @@ PLUGINS_HOME="$PIO_HOME/plugins" PLUGIN_NAME=provenance-abci-listener PLUGIN_DIR="$PLUGINS_HOME/$PLUGIN_NAME-$TAG" RELEASE_URL="https://github.com/provenance-io/provenance-abci-listener/releases/download/$TAG/provenance-abci-listener-$TAG.zip" -CONFIG_URL="https://raw.githubusercontent.com/provenance-io/provenance-abci-listener/$TAG/src/main/resources/application.conf" [[ -z "$TAG" ]] && usage; +echo "Release: $TAG" + # download release distribution -printf "\nDownloading release %s...\n" "$TAG" -curl --create-dirs -o "$PLUGIN_DIR.zip" -L "$RELEASE_URL" +echo "Downloading release..." +# curl -s --create-dirs -o "$PLUGIN_DIR.zip" -L "$RELEASE_URL" # validate md5 checksum -echo "Validating md5 checksum..." -curl --create-dirs -o "$PLUGIN_DIR.zip.md5" -L "$RELEASE_URL.md5" +echo "Validating release (md5)..." +curl -s --create-dirs -o "$PLUGIN_DIR.zip.md5" -L "$RELEASE_URL.md5" cd "$PLUGINS_HOME" || exit 1 md5sum -c "$PLUGIN_DIR.zip.md5" || exit 1 -printf "\nExtracting release %s...\n" "$TAG" -unzip "$PLUGIN_DIR.zip" -d "$PLUGINS_HOME" - -# download application.conf -printf "\nDownloading application.conf...\n" -curl --create-dirs -o "$PLUGIN_DIR/application.conf" -L "$CONFIG_URL" - -# set up plugin OPTS -printf "\nSetting up plugin environment variables...\n" -export PROVENANCE_ABCI_LISTENER_OPTS="-Dconfig.file=$PLUGIN_DIR/application.conf" -printf "\nPROVENANCE_ABCI_LISTENER_OPTS=%s" "$PROVENANCE_ABCI_LISTENER_OPTS" -export COSMOS_SDK_ABCI_V1="$PLUGIN_DIR/bin/$PLUGIN_NAME" -printf "\nCOSMOS_SDK_ABCI_V1=%s\n" "$COSMOS_SDK_ABCI_V1" - -# check plugin can run with current config -cat << EOF -Plugin deployed! - -DO NOT FORGET TO UPDATE application.conf to match your environment. - - Producer config options, see: https://kafka.apache.org/documentation/#producerconfigs - - Confluent Cloud configuration: - - kafka-clients { - # Producer configs - # https://kafka.apache.org/documentation/#producerconfigs - bootstrap.servers = "{{ BOOTSTRAP_SERVERS }}" - acks = all - enable.idempotence = true - max.in.flight.requests.per.connection = 1 - linger.ms = 50 - max.request.size = 8388608 - key.serializer = org.apache.kafka.common.serialization.StringSerializer - value.serializer = io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer - - # Required connection configs for Confluent Cloud - ssl.endpoint.identification.algorithm=https - sasl.mechanism=PLAIN - sasl.jaas.config="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"{{ CLUSTER_API_KEY }}\" password=\"'{{ CLUSTER_API_SECRET }}\";" - security.protocol=SASL_SSL - - # Required connection configs for Confluent Cloud Schema Registry - schema.registry.url="{{ SR_URL }}" - basic.auth.credentials.source=USER_INFO - basic.auth.user.info="{{ SR_API_KEY }}:{{ SR_API_SECRET }}" - } - -EOF +echo "Extracting release..." +unzip -qq "$PLUGIN_DIR.zip" -d "$PLUGINS_HOME" diff --git a/src/main/resources/application.conf b/src/main/resources/application.conf index 2f84ac8..2456adb 100644 --- a/src/main/resources/application.conf +++ b/src/main/resources/application.conf @@ -6,7 +6,7 @@ grpc.server { # Kafka producer config kafka.producer { - # Assign a topic name and optional prefix where events will be written. + # Assign a topic name and an optional prefix where events will be written. listen-topics { prefix = "local-" listen-begin-block = ${?kafka.producer.listen-topics.prefix}"listen-begin-block" diff --git a/src/main/resources/ccloud.conf b/src/main/resources/ccloud.conf new file mode 100644 index 0000000..60831d9 --- /dev/null +++ b/src/main/resources/ccloud.conf @@ -0,0 +1,47 @@ +# Grpc server config +grpc.server { + addr = localhost + port = 1234 +} + +# Kafka producer config +kafka.producer { + # Assign a topic name and an optional prefix where events will be written. + listen-topics { + prefix = "local-" + listen-begin-block = ${?kafka.producer.listen-topics.prefix}"listen-begin-block" + listen-end-block = ${?kafka.producer.listen-topics.prefix}"listen-end-block" + listen-deliver-tx = ${?kafka.producer.listen-topics.prefix}"listen-deliver-tx" + listen-commit = ${?kafka.producer.listen-topics.prefix}"listen-commit" + } + + # Properties defined by org.apache.kafka.clients.producer.ProducerConfig. + # can be defined in this configuration section. + kafka-clients { + bootstrap.servers = "{{ BROKER_ENDPOINT }}" + acks = all + enable.idempotence = true + max.in.flight.requests.per.connection = 1 + linger.ms = 50 + max.request.size = 204857600 + key.serializer = org.apache.kafka.common.serialization.StringSerializer + value.serializer = io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer + + # Required connection configs for Confluent Cloud + ssl.endpoint.identification.algorithm=https + sasl.mechanism=PLAIN + sasl.jaas.config="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"{{ CLOUD_API_KEY }}\" password=\"{{ CLOUD_API_SECRET }}\";" + security.protocol=SASL_SSL + + # Best practice for higher availability in Apache Kafka clients prior to 3.0 + session.timeout.ms=45000 + + request.timeout.ms = 20000 + retry.backoff.ms = 500 + + # Required connection configs for Confluent Cloud Schema Registry + schema.registry.url="https://{{ SR_ENDPOINT }}" + basic.auth.credentials.source=USER_INFO + basic.auth.user.info="{{ SR_API_KEY }}:{{ SR_API_SECRET }}" + } +} \ No newline at end of file