From 355eae82dee6193e6b23544abbd12cb5dc51e6e1 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 01:25:47 +0000 Subject: [PATCH 01/22] try verify client ivc --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 1 - yarn-project/bb-prover/src/bb/execute.ts | 11 +++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index efbfd9baa84..2ad67cc039b 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -1307,7 +1307,6 @@ int main(int argc, char* argv[]) if (command == "prove_and_verify_mega_honk_program") { return proveAndVerifyHonkProgram(bytecode_path, witness_path) ? 0 : 1; } - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1050) we need a verify_client_ivc bb cli command // TODO(#7371): remove this if (command == "client_ivc_prove_output_all_msgpack") { std::filesystem::path output_dir = get_option(args, "-o", "./target"); diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index d682e563fc5..56748237d2d 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -4,10 +4,13 @@ import { type LogFn, currentLevel as currentLogLevel } from '@aztec/foundation/l import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; + + import * as proc from 'child_process'; import * as fs from 'fs/promises'; import { basename, dirname, join } from 'path'; + export const VK_FILENAME = 'vk'; export const VK_FIELDS_FILENAME = 'vk_fields.json'; export const PROOF_FILENAME = 'proof'; @@ -455,10 +458,14 @@ export async function generateTubeProof( const logFunction = (message: string) => { log(`TubeCircuit (prove) BB out - ${message}`); }; + const verifyResult = await executeBB(pathToBB, 'verify_client_ivc', args, logFunction); + if (verifyResult.status !== BB_RESULT.SUCCESS) { + return { status: BB_RESULT.FAILURE, reason: `Failed to verify client IVC at ${workingDirectory}` }; + } const result = await executeBB(pathToBB, 'prove_tube', args, logFunction); const durationMs = timer.ms(); - if (result.status == BB_RESULT.SUCCESS) { + if (result.status === BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, durationMs, @@ -869,4 +876,4 @@ async function fsCache( } return res; -} +} \ No newline at end of file From a4655b4c309d5b8029c6f8ece80d4aeb153ffff6 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:34:28 +0000 Subject: [PATCH 02/22] it should just work now? --- .github/workflows/ci-arm.yml | 138 +++++++++++++++------- .github/workflows/ci.yml | 81 ++++++------- .github/workflows/publish-base-images.yml | 39 ------ barretenberg/Earthfile | 4 +- build-images/Earthfile | 6 + noir/Earthfile | 4 +- scripts/earthly-ci | 15 ++- 7 files changed, 154 insertions(+), 133 deletions(-) delete mode 100644 .github/workflows/publish-base-images.yml diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index 283d05be290..6f228ae01a4 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -22,58 +22,108 @@ env: # kludge until we move away from runners WAIT_FOR_RUNNERS: false jobs: + setup: + uses: ./.github/workflows/setup-runner.yml + with: + username: ${{ github.event.pull_request.user.login || github.actor }} + runner_type: builder-arm + secrets: inherit + + changes: + runs-on: ubuntu-20.04 + # Required permissions. + permissions: + pull-requests: read + # Set job outputs to values from filter step + outputs: + build-images: ${{ steps.filter.outputs.build-images }} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 + id: filter + with: + filters: | + build-images: + - 'build-images/**' + + build-images: + needs: [setup, changes] + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-images-arm + - name: "Push Build Images If Changed" + if: ${{ needs.changes.outputs.build-images }} + timeout-minutes: 40 + run: | + earthly-ci --push ./build-images/+build + build: - runs-on: ubuntu-latest + needs: [build-images] + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm steps: - uses: actions/checkout@v4 - with: { ref: "${{ github.event.pull_request.head.sha }}" } + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-arm + # prepare images locally, tagged by commit hash - name: "Build E2E Image" timeout-minutes: 40 - uses: ./.github/ensure-builder - with: - runner_type: builder-arm - run: | - set -eux - git submodule update --init --recursive --recommend-shallow - echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u aztecprotocolci --password-stdin - scripts/earthly-ci \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - ./yarn-project+export-e2e-test-images + run: | + earthly-ci ./yarn-project+export-e2e-test-images - # all the end-to-end integration tests for aztec + # all the non-bench end-to-end integration tests for aztec e2e: - needs: build - runs-on: ubuntu-latest + needs: [build] + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm steps: - uses: actions/checkout@v4 - with: { ref: "${{ github.event.pull_request.head.sha }}" } - - name: "Test" - timeout-minutes: 25 - uses: ./.github/ensure-builder + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action with: - runner_type: builder-arm - run: | - sudo shutdown -P 25 # hack until core part of the scripts - set -eux - echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u aztecprotocolci --password-stdin - scripts/earthly-ci \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - --no-output ./yarn-project/end-to-end/+uniswap-trade-on-l1-from-l2 + concurrency_key: e2e-arm + # prepare images locally, tagged by commit hash + - name: "Build E2E Image" + timeout-minutes: 40 + run: | + earthly-ci ./yarn-project/end-to-end+uniswap-trade-on-l1-from-l2 - # not notifying failures right now - # notify: - # needs: [e2e] - # runs-on: ubuntu-latest - # if: ${{ github.ref == 'refs/heads/master' && failure() }} - # steps: - # - name: Send notification to aztec3-ci channel if workflow failed on master - # uses: slackapi/slack-github-action@v1.25.0 - # with: - # payload: | - # { - # "url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" - # } - # env: - # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY_WORKFLOW_TRIGGER_URL }} + rerun-check: + runs-on: ubuntu-20.04 + permissions: + actions: write + needs: [setup, build-images, build, e2e] + if: ${{ !cancelled() }} + steps: + - name: Check for Rerun + env: + # We treat any skipped or failing jobs as a failure for the workflow as a whole. + HAD_FAILURE: ${{ contains(needs.*.result, 'failure') }} + GH_REPO: ${{ github.repository }} + GH_TOKEN: ${{ github.token }} + run: | + if [[ $HAD_FAILURE == true ]] && [[ $RUN_ATTEMPT -lt 2 ]] ; then + echo "Retrying first workflow failure. This is a stop-gap until things are more stable." + gh workflow run rerun.yml -F run_id=${{ github.run_id }} + fi + + # NOTE: we only notify failures after a rerun has occurred + notify: + needs: [e2e] + runs-on: ubuntu-latest + if: ${{ github.ref == 'refs/heads/master' && failure() && github.run_attempt >= 2 }} + steps: + - name: Send notification to aztec3-ci channel if workflow failed on master + uses: slackapi/slack-github-action@v1.25.0 + with: + payload: | + { + "url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_NOTIFY_WORKFLOW_TRIGGER_URL }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb797f3c050..4a9e9752941 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,6 +42,7 @@ jobs: # Set job outputs to values from filter step outputs: avm-transpiler: ${{ steps.filter.outputs.avm-transpiler }} + build-images: ${{ steps.filter.outputs.build-images }} barretenberg: ${{ steps.filter.outputs.barretenberg }} barretenberg-cpp: ${{ steps.filter.outputs.barretenberg-cpp }} noir: ${{ steps.filter.outputs.noir }} @@ -61,6 +62,8 @@ jobs: - 'barretenberg/**' barretenberg-cpp: - 'barretenberg/cpp/**' + build-images: + - 'build-images/**' noir: - 'noir/**' avm-transpiler: @@ -82,8 +85,23 @@ jobs: # Always rebuild when this file changes - .github/workflows/ci.yml - build: + build-images: needs: [setup, changes] + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-images-x86 + - name: "Push Build Images If Changed" + if: ${{ needs.changes.outputs.build-images }} + timeout-minutes: 40 + run: | + earthly-ci --push ./build-images/+build + + build: + needs: [build-images, changes] if: ${{ needs.changes.outputs.non-docs == 'true' && needs.changes.outputs.non-misc-ci == 'true' && needs.changes.outputs.non-barretenberg-cpp == 'true' }} runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 outputs: @@ -99,10 +117,7 @@ jobs: - name: "Build E2E Image" timeout-minutes: 40 run: | - earthly-ci \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - ./yarn-project+export-e2e-test-images + earthly-ci ./yarn-project+export-e2e-test-images # We base our e2e list used in e2e-x86 off the targets in ./yarn-project/end-to-end # (Note ARM uses just 2 tests as a smoketest) - name: Create list of non-bench end-to-end jobs @@ -165,22 +180,18 @@ jobs: # if they fail to copy, it will try to build them on the tester and fail builder_images_to_copy: aztecprotocol/aztec:${{ env.GIT_COMMIT }} aztecprotocol/end-to-end:${{ env.GIT_COMMIT }} # command to produce the images in case they don't exist - builder_command: cd yarn-project/end-to-end/ && ../../scripts/earthly-ci +${{ matrix.test }} + builder_command: scripts/earthly-ci ./yarn-project+export-e2e-test-images tester_ttl: 40 run: | set -eux cd ./yarn-project/end-to-end/ export FORCE_COLOR=1 export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" - ../../scripts/earthly-ci -P \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - --no-output \ - +${{ matrix.test }} + ../../scripts/earthly-ci -P --no-output +${{ matrix.test }} acir-bench: runs-on: ubuntu-20.04 - needs: [setup, changes] + needs: [build-images, changes] # Note: not fully accurate, but to work with bench-summary needs to be the same as bench-e2e if: ${{ needs.changes.outputs.non-barretenberg-cpp == 'true' }} steps: @@ -203,11 +214,7 @@ jobs: cd ./noir/ export FORCE_COLOR=1 export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" - ../scripts/earthly-ci -P \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - --no-output \ - +bench-publish-acir-bb + ../scripts/earthly-ci -P --no-output +bench-publish-acir-bb bench-summary: needs: @@ -254,7 +261,7 @@ jobs: +bench-comment bb-gcc: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg-cpp == 'true' }} steps: @@ -274,7 +281,7 @@ jobs: # barretenberg (prover) native and AVM (public VM) tests # only ran on x86 for resource reasons (memory intensive) bb-native-tests: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg-cpp == 'true' }} steps: @@ -292,7 +299,7 @@ jobs: run: earthly-ci --no-output +test --hardware_concurrency=64 bb-js-test: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg == 'true' }} steps: @@ -307,7 +314,7 @@ jobs: run: earthly-ci --no-output ./+test noir-build-acir-tests: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg == 'true' || needs.changes.outputs.noir == 'true' }} steps: @@ -366,7 +373,7 @@ jobs: run: earthly-ci --no-output ./+barretenberg-acir-tests-bb.js noir-format: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.noir == 'true' || needs.changes.outputs.noir-projects == 'true' }} steps: @@ -389,7 +396,7 @@ jobs: ./+format noir-test: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.noir == 'true' }} steps: @@ -402,7 +409,7 @@ jobs: run: earthly-ci --no-output ./noir+test noir-examples: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg == 'true' || needs.changes.outputs.noir == 'true' }} steps: @@ -415,7 +422,7 @@ jobs: run: earthly-ci --no-output ./noir+examples noir-packages-test: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg == 'true' || needs.changes.outputs.noir == 'true' }} steps: @@ -428,7 +435,7 @@ jobs: run: earthly-ci --no-output ./noir+packages-test noir-projects: - needs: [setup, changes, build] + needs: [build-images, changes, build] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.barretenberg == 'true' || needs.changes.outputs.noir == 'true' || needs.changes.outputs.noir-projects == 'true' }} steps: @@ -440,13 +447,10 @@ jobs: - name: "Noir Projects" timeout-minutes: 40 run: | - earthly-ci --no-output \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - ./noir-projects/+test + earthly-ci --no-output ./noir-projects/+test avm-format: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.avm-transpiler == 'true' || needs.changes.outputs.noir == 'true' }} steps: @@ -500,7 +504,7 @@ jobs: run: earthly-ci --no-output ./yarn-project/+prover-client-test l1-contracts-test: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.l1-contracts == 'true' }} steps: @@ -513,7 +517,7 @@ jobs: run: earthly-ci --no-output ./l1-contracts+test docs-preview: - needs: [setup, changes] + needs: [build-images, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 if: ${{ needs.changes.outputs.non-barretenberg-cpp == 'true' }} steps: @@ -527,8 +531,6 @@ jobs: timeout-minutes: 40 run: | earthly-ci --no-output \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ ./docs/+deploy-preview --ENV=staging --PR=${{ github.event.number }} \ --AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} \ --NETLIFY_AUTH_TOKEN=${{ secrets.NETLIFY_AUTH_TOKEN }} \ @@ -536,7 +538,7 @@ jobs: bb-bench: runs-on: ubuntu-20.04 - needs: [setup, changes] + needs: [build-images, changes] if: ${{ needs.changes.outputs.barretenberg-cpp == 'true' }} steps: - uses: actions/checkout@v4 @@ -618,7 +620,7 @@ jobs: run: earthly-ci -P --no-output +test --box=${{ matrix.box }} --browser=${{ matrix.browser }} --mode=cache protocol-circuits-gates-report: - needs: [setup, changes] + needs: [build-images, changes] if: ${{ needs.changes.outputs.non-docs == 'true' && needs.changes.outputs.non-misc-ci == 'true' }} runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 permissions: @@ -635,8 +637,6 @@ jobs: timeout-minutes: 40 run: | earthly-ci \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ --artifact +gates-report/gates_report.json mv gates_report.json ../protocol_circuits_report.json @@ -752,11 +752,12 @@ jobs: gh workflow run rerun.yml -F run_id=${{ github.run_id }} fi + # NOTE: we only notify failures after a rerun has occurred notify: needs: - merge-check runs-on: ubuntu-20.04 - if: ${{ github.ref == 'refs/heads/master' && failure() }} + if: ${{ github.ref == 'refs/heads/master' && failure() && github.run_attempt >= 2 }} steps: - name: Send notification to aztec3-ci channel if workflow failed on master uses: slackapi/slack-github-action@v1.25.0 diff --git a/.github/workflows/publish-base-images.yml b/.github/workflows/publish-base-images.yml deleted file mode 100644 index c2f52ab39b5..00000000000 --- a/.github/workflows/publish-base-images.yml +++ /dev/null @@ -1,39 +0,0 @@ -# Publishes our base images with custom installs or builds etc -# These publish a multi-arch image by first publishing with x86, and then with arm -# This is a bit of a hack, but earthly needs to see both image types to make a multiplatform image -# and its easiest for arm to just pull the x86 image after. -name: Publish Base Images -on: - workflow_dispatch: {} - -jobs: - publish: - runs-on: ubuntu-latest - env: - EARTHLY_TOKEN: ${{ secrets.EARTHLY_TOKEN }} - # cancel if reran on same PR if exists, otherwise if on same commit - concurrency: - group: publish-base-images-${{ github.event.pull_request.number || github.ref_name }} - cancel-in-progress: ${{ github.ref_name != 'master' }} - steps: - - uses: earthly/actions-setup@v1 - with: - version: v0.8.5 - - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - submodules: recursive - - - name: Setup - working-directory: ./scripts - run: ./setup_env.sh ${{ secrets.DOCKERHUB_PASSWORD }} ${{ github.actor }} - - - name: Publish Barretenberg Base Images - working-directory: ./barretenberg/cpp - run: | - # see note above about how this creates a multiplatform image - earthly-cloud build x86 --push +build-base - earthly-cloud build arm --push +build-base - diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index 56776780cfa..b10de708342 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -14,7 +14,7 @@ sol: barretenberg-acir-tests-bb: - FROM ../build-images/+build + FROM ../build-images/+from-registry COPY ./cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb COPY +acir-tests/ /usr/src/barretenberg/acir_tests @@ -54,7 +54,7 @@ barretenberg-acir-tests-bb: RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul barretenberg-acir-tests-sol: - FROM ../build-images/+build + FROM ../build-images/+from-registry COPY ./cpp/+preset-sol/ /usr/src/barretenberg/cpp/build COPY ./cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb diff --git a/build-images/Earthfile b/build-images/Earthfile index bde05449320..f9a91d77779 100644 --- a/build-images/Earthfile +++ b/build-images/Earthfile @@ -192,8 +192,14 @@ build: && rm -rf wasmtime* ARG TARGETARCH + # NOTE: bump this version when doing non-backwards compatible changes SAVE IMAGE --push aztecprotocol/build:1.0-$TARGETARCH +from-registry: + ARG TARGETARCH + # NOTE: match version string above + FROM aztecprotocol/build:1.0-$TARGETARCH + ######################################################################################################################## # We want to produce downstream images: devbox and sysbox. This image is the base image for each. # It contains a suite of tools that developers might use to develop aztec. diff --git a/noir/Earthfile b/noir/Earthfile index 6cee66f45c7..833cebbe533 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -2,7 +2,7 @@ VERSION 0.8 nargo-src: - FROM ../build-images/+build + FROM ../build-images/+from-registry WORKDIR /usr/src # Relevant source (TODO finer-grained 'tooling') COPY --dir \ @@ -208,7 +208,7 @@ build-acir-tests: # Prepare our exact dependency formula, this avoids problems with copied empty folders or build artifacts RUN rm -rf .earthly-staging && mkdir -p .earthly-staging RUN cp --parents $(git ls-files "noir-repo/test_programs/*.toml" "noir-repo/test_programs/*.nr" "noir-repo/test_programs/rebuild.sh") .earthly-staging - FROM ../build-images/+build + FROM ../build-images/+from-registry COPY +nargo/ /usr/src/noir-repo/target/release ENV PATH="/usr/src/noir-repo/target/release:${PATH}" WORKDIR /usr/src/noir-repo/test_programs diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 49cc4db605a..b1c8fed35b5 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -33,9 +33,11 @@ function wipe_non_cache_docker_state { sudo service docker restart } +EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}" + # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do - if earthly $@ 2>&1 | tee $OUTPUT_FILE >&2 ; then + if earthly $EARTHLY_ARGS $@ 2>&1 | tee $OUTPUT_FILE >&2 ; then exit 0 # Success, exit the script else # Increment attempt counter @@ -45,12 +47,13 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do # Check the output for specific errors if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq $MAX_ATTEMPTS ]; then - echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Do something to change the earthly cache state, like merging master or just retrying after other things build. If all else fails, connect to runner with ci.py and run 'earthly prune' for a bit (can cancel early)." - exit 1 + if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -gte 2 ]; then + echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Trying 'earthly prune' and starting again." + earthly prune + else + echo "Got 'inconsistent graph state' or 'failed to get state for index'. Retrying once." + sleep 5 fi - echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 30 seconds and retrying." - sleep 30 elif grep 'Error: pull ping error: pull ping response' $OUTPUT_FILE >/dev/null; then echo "Got 'Error: pull ping error: pull ping response', intermittent failure when writing out images to docker. If this persists, try 'systemctl restart docker' on the spot instance." elif grep '================================= System Info ==================================' $OUTPUT_FILE >/dev/null || grep 'Error response from daemon: removal of container earthly-buildkitd is already in progress: exit status 1' $OUTPUT_FILE >/dev/null ; then From b54e7fa38bc5b15721e6a7fbbccc503f4af72b17 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:37:17 +0000 Subject: [PATCH 03/22] recovery --- scripts/earthly-ci | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index b1c8fed35b5..0409429921c 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -68,7 +68,10 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do wipe_non_cache_docker_state fi sleep 20 - elif grep 'status 125: docker: Error response from daemon: layer does not exist.' $OUTPUT_FILE >/dev/null || grep 'could not determine buildkit address - is Docker or Podman running?' $OUTPUT_FILE >/dev/null || grep 'please make sure the URL is valid, and Docker 18.09 or later is installed on the remote host' $OUTPUT_FILE >/dev/null ; then + elif grep 'status 125: docker: Error response from daemon: layer does not exist.' $OUTPUT_FILE >/dev/null \ + || grep 'could not determine buildkit address - is Docker or Podman running?' $OUTPUT_FILE >/dev/null \ + || grep 'please make sure the URL is valid, and Docker 18.09 or later is installed on the remote host' $OUTPUT_FILE >/dev/null \ + || grep 'docker: failed to register layer' $OUTPUT_FILE >/dev/null ; then wipe_non_cache_docker_state # wait for other docker restarts sleep 20 From 6a694aa7948c404cb83e5ceb9977ef8a41e7266a Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:50:57 +0000 Subject: [PATCH 04/22] fix secret passing --- .github/workflows/ci.yml | 16 +++------------- scripts/earthly-ci | 2 +- 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4a9e9752941..d50b26a1aec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -233,10 +233,7 @@ jobs: - name: "Build and upload bench aggregate file" working-directory: ./yarn-project/scripts run: | - earthly-ci -P \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - +bench-aggregate + earthly-ci -P +bench-aggregate - name: "Download base benchmark and package into earthly" if: ${{ github.event_name == 'pull_request' }} run: | @@ -254,11 +251,7 @@ jobs: if: ${{ github.event_name == 'pull_request' }} working-directory: ./yarn-project/scripts run: | - earthly-ci -P \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} \ - +bench-comment + earthly-ci -P +bench-comment bb-gcc: needs: [build-images, changes] @@ -390,10 +383,7 @@ jobs: working-directory: ./noir-projects/ timeout-minutes: 40 run: | - earthly-ci --no-output \ - --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ - --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ - ./+format + earthly-ci --no-output ./+format noir-test: needs: [build-images, changes] diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 0409429921c..062c4bd2d06 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -33,7 +33,7 @@ function wipe_non_cache_docker_state { sudo service docker restart } -EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}" +EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${secrets.AZTEC_BOT_GITHUB_TOKEN:-}" # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do From b9334069be24e30bcb90f32b387e5ed172911ad9 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:52:37 +0000 Subject: [PATCH 05/22] fix substitution --- scripts/earthly-ci | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 062c4bd2d06..3a6bd4197a3 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -33,7 +33,7 @@ function wipe_non_cache_docker_state { sudo service docker restart } -EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${secrets.AZTEC_BOT_GITHUB_TOKEN:-}" +EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${AZTEC_BOT_GITHUB_TOKEN:-}" # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do From 0a58bf4d6e5884da543143e118067baf3ad93344 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:55:13 +0000 Subject: [PATCH 06/22] small opt --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d50b26a1aec..2a01ce9d01a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -87,11 +87,14 @@ jobs: build-images: needs: [setup, changes] + # Note we don't but the 'if' here as that would also apply to dependent jobs, instead we just copy it into each step runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 steps: - uses: actions/checkout@v4 + if: ${{ needs.changes.outputs.build-images }} with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action + if: ${{ needs.changes.outputs.build-images }} with: concurrency_key: build-images-x86 - name: "Push Build Images If Changed" From cde25c0531d633ef918e8fedeccc20d161d0ca92 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 16:55:45 +0000 Subject: [PATCH 07/22] Revert "try verify client ivc" This reverts commit 355eae82dee6193e6b23544abbd12cb5dc51e6e1. --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 1 + yarn-project/bb-prover/src/bb/execute.ts | 11 ++--------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 2ad67cc039b..efbfd9baa84 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -1307,6 +1307,7 @@ int main(int argc, char* argv[]) if (command == "prove_and_verify_mega_honk_program") { return proveAndVerifyHonkProgram(bytecode_path, witness_path) ? 0 : 1; } + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1050) we need a verify_client_ivc bb cli command // TODO(#7371): remove this if (command == "client_ivc_prove_output_all_msgpack") { std::filesystem::path output_dir = get_option(args, "-o", "./target"); diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index 56748237d2d..d682e563fc5 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -4,13 +4,10 @@ import { type LogFn, currentLevel as currentLogLevel } from '@aztec/foundation/l import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; - - import * as proc from 'child_process'; import * as fs from 'fs/promises'; import { basename, dirname, join } from 'path'; - export const VK_FILENAME = 'vk'; export const VK_FIELDS_FILENAME = 'vk_fields.json'; export const PROOF_FILENAME = 'proof'; @@ -458,14 +455,10 @@ export async function generateTubeProof( const logFunction = (message: string) => { log(`TubeCircuit (prove) BB out - ${message}`); }; - const verifyResult = await executeBB(pathToBB, 'verify_client_ivc', args, logFunction); - if (verifyResult.status !== BB_RESULT.SUCCESS) { - return { status: BB_RESULT.FAILURE, reason: `Failed to verify client IVC at ${workingDirectory}` }; - } const result = await executeBB(pathToBB, 'prove_tube', args, logFunction); const durationMs = timer.ms(); - if (result.status === BB_RESULT.SUCCESS) { + if (result.status == BB_RESULT.SUCCESS) { return { status: BB_RESULT.SUCCESS, durationMs, @@ -876,4 +869,4 @@ async function fsCache( } return res; -} \ No newline at end of file +} From ee38a5dcfd100ead576ff07e7ac9c572e9ca8673 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:16:22 +0000 Subject: [PATCH 08/22] env var passing --- scripts/run_on_builder | 17 ++++++++++++++++- scripts/run_on_tester | 17 ++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/scripts/run_on_builder b/scripts/run_on_builder index a170a9727db..8d50aac9674 100755 --- a/scripts/run_on_builder +++ b/scripts/run_on_builder @@ -4,4 +4,19 @@ set -eu # Enter the repo root cd "$(dirname "$0")/.." -ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$BUILDER_SPOT_KEY" ubuntu@"$BUILDER_SPOT_IP" "$@" +# Define environment variables +ENV_VARS=" + DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD + RUN_ID=$RUN_ID + RUN_ATTEMPT=$RUN_ATTEMPT + USERNAME=$USERNAME + GITHUB_TOKEN=$GITHUB_TOKEN + GH_SELF_HOSTED_RUNNER_TOKEN=$GH_SELF_HOSTED_RUNNER_TOKEN + AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY + BUILD_INSTANCE_SSH_KEY=$BUILD_INSTANCE_SSH_KEY + GIT_COMMIT=$GIT_COMMIT + WAIT_FOR_RUNNERS=$WAIT_FOR_RUNNERS +" + +ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$BUILDER_SPOT_KEY" ubuntu@"$BUILDER_SPOT_IP" "$ENV_VARS $@" diff --git a/scripts/run_on_tester b/scripts/run_on_tester index 73c2ddbdd40..2191bb99cb7 100755 --- a/scripts/run_on_tester +++ b/scripts/run_on_tester @@ -4,4 +4,19 @@ set -eu # Enter the repo root cd "$(dirname "$0")/.." -ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$SPOT_KEY" ubuntu@"$SPOT_IP" "$@" +# Define environment variables +ENV_VARS=" + DOCKERHUB_PASSWORD=$DOCKERHUB_PASSWORD + RUN_ID=$RUN_ID + RUN_ATTEMPT=$RUN_ATTEMPT + USERNAME=$USERNAME + GITHUB_TOKEN=$GITHUB_TOKEN + GH_SELF_HOSTED_RUNNER_TOKEN=$GH_SELF_HOSTED_RUNNER_TOKEN + AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY + BUILD_INSTANCE_SSH_KEY=$BUILD_INSTANCE_SSH_KEY + GIT_COMMIT=$GIT_COMMIT + WAIT_FOR_RUNNERS=$WAIT_FOR_RUNNERS +" + +ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$SPOT_KEY" ubuntu@"$SPOT_IP" "$ENV_VARS $@" From da8b8d2f5534d2c4ee6b55b3c1bf5be5331819eb Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:19:34 +0000 Subject: [PATCH 09/22] fix ci --- .github/workflows/setup-runner.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index 3434e3432b3..1f9a3a485f5 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -40,6 +40,7 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} BUILD_INSTANCE_SSH_KEY: ${{ secrets.BUILD_INSTANCE_SSH_KEY }} + GIT_COMMIT: ${{ github.sha }} # kludge until we move away from runners WAIT_FOR_RUNNERS: true From 811b80d8e33ab62ef108522b86f94200fbf6827b Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:23:59 +0000 Subject: [PATCH 10/22] fix ci --- scripts/run_on_builder | 5 ++++- scripts/run_on_tester | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/run_on_builder b/scripts/run_on_builder index 8d50aac9674..511dc7b5eea 100755 --- a/scripts/run_on_builder +++ b/scripts/run_on_builder @@ -19,4 +19,7 @@ ENV_VARS=" WAIT_FOR_RUNNERS=$WAIT_FOR_RUNNERS " -ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$BUILDER_SPOT_KEY" ubuntu@"$BUILDER_SPOT_IP" "$ENV_VARS $@" +# Format the environment variables for the SSH command +ENV_EXPORTS=$(printf 'export %s; ' $ENV_VARS) + +ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$BUILDER_SPOT_KEY" ubuntu@"$BUILDER_SPOT_IP" "$ENV_EXPORTS $@" diff --git a/scripts/run_on_tester b/scripts/run_on_tester index 2191bb99cb7..1443ebc67c0 100755 --- a/scripts/run_on_tester +++ b/scripts/run_on_tester @@ -19,4 +19,7 @@ ENV_VARS=" WAIT_FOR_RUNNERS=$WAIT_FOR_RUNNERS " -ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$SPOT_KEY" ubuntu@"$SPOT_IP" "$ENV_VARS $@" +# Format the environment variables for the SSH command +ENV_EXPORTS=$(printf 'export %s; ' $ENV_VARS) + +ssh -o ControlMaster=auto -o ControlPath=~/.ssh_mux_%h_%p_%r -o ControlPersist=30s -o TCPKeepAlive=no -o ServerAliveCountMax=5 -o ServerAliveInterval=30 -o StrictHostKeyChecking=no -i "$SPOT_KEY" ubuntu@"$SPOT_IP" "$ENV_EXPORTS $@" From 5f232adcda666dc68afd3a3789cea42b005cfe20 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:30:22 +0000 Subject: [PATCH 11/22] fix cache recovery --- scripts/earthly-ci | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 3a6bd4197a3..3701f36eede 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -47,7 +47,7 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do # Check the output for specific errors if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -gte 2 ]; then + if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 2 ]; then echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Trying 'earthly prune' and starting again." earthly prune else From 13c98e4eda2b97242fb0919e37f79bfc0ae3ad25 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:31:13 +0000 Subject: [PATCH 12/22] fix --- scripts/earthly-ci | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 3701f36eede..21acafe80db 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -47,7 +47,7 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do # Check the output for specific errors if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 2 ]; then + if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ]; then echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Trying 'earthly prune' and starting again." earthly prune else From 0eaa9693a413d6d14ce5511da75fe1b494df3750 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 17:49:54 +0000 Subject: [PATCH 13/22] try with no cache instead --- .github/workflows/ci.yml | 4 ++-- barretenberg/Earthfile | 4 ++-- barretenberg/cpp/Earthfile | 2 +- barretenberg/ts/Earthfile | 2 +- boxes/Earthfile | 2 +- l1-contracts/Earthfile | 2 +- noir-projects/Earthfile | 12 ++++++------ scripts/earthly-ci | 9 ++++++--- yarn-project/Earthfile | 4 ++-- 9 files changed, 22 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2a01ce9d01a..02ee4253c10 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -587,7 +587,7 @@ jobs: concurrency_key: boxes-${{ github.event.pull_request.user.login || github.actor }}-build - name: Build working-directory: ./boxes - timeout-minutes: 20 + timeout-minutes: 40 run: earthly-ci +export-boxes boxes-test: @@ -609,7 +609,7 @@ jobs: concurrency_key: boxes-${{ github.event.pull_request.user.login || github.actor }}-x86-${{ matrix.box }}-${{ matrix.browser }} - name: Box test working-directory: ./boxes - timeout-minutes: 10 + timeout-minutes: 40 run: earthly-ci -P --no-output +test --box=${{ matrix.box }} --browser=${{ matrix.browser }} --mode=cache protocol-circuits-gates-report: diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index b10de708342..2c3b9a9a129 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -1,13 +1,13 @@ VERSION 0.8 acir-tests: - FROM ../build-images+build + FROM ../build-images+from-registry WORKDIR /usr/src/barretenberg COPY ./acir_tests . SAVE ARTIFACT ./* sol: - FROM ../build-images+build + FROM ../build-images+from-registry WORKDIR /usr/src/barretenberg COPY ./sol . SAVE ARTIFACT ./* diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 555b05e9b9b..f340432b505 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -7,7 +7,7 @@ wasmtime: SAVE ARTIFACT /root/.wasmtime/bin/wasmtime source: - FROM ../../build-images+build + FROM ../../build-images+from-registry WORKDIR /usr/src/barretenberg # cpp source COPY --dir src/barretenberg src/CMakeLists.txt src diff --git a/barretenberg/ts/Earthfile b/barretenberg/ts/Earthfile index 3cebf04974c..b18e91f9039 100644 --- a/barretenberg/ts/Earthfile +++ b/barretenberg/ts/Earthfile @@ -1,6 +1,6 @@ VERSION 0.8 -FROM ../../build-images+build +FROM ../../build-images+from-registry WORKDIR /usr/src/barretenberg/ts-build # minimum files to download yarn packages diff --git a/boxes/Earthfile b/boxes/Earthfile index 928cea77719..0ceeb51ce4d 100644 --- a/boxes/Earthfile +++ b/boxes/Earthfile @@ -7,7 +7,7 @@ deps: # use the build image to build the project so that nargo has access to the same libc # later on, once everything is built and we don't need nargo, copy to a debian based image # that's supported by playwright - FROM ../build-images+build + FROM ../build-images+from-registry # copy the aztec yarn workspace, needed to resolve portal dependencies COPY ../yarn-project+build-dev/usr/src /usr/src diff --git a/l1-contracts/Earthfile b/l1-contracts/Earthfile index 5e1e559be62..9ad5d1000f6 100644 --- a/l1-contracts/Earthfile +++ b/l1-contracts/Earthfile @@ -1,7 +1,7 @@ VERSION 0.8 build: - FROM ../build-images+build + FROM ../build-images+from-registry WORKDIR /usr/src/l1-contracts COPY --dir lib src terraform test *.json *.toml *.sh . #RUN git init && git add . && yarn lint && yarn slither && yarn slither-has-diff diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index c5a24bd0a85..f1b254c3dfc 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -1,7 +1,7 @@ VERSION 0.8 source: - FROM ../build-images+build + FROM ../build-images+from-registry # Install nargo COPY ../noir/+nargo/nargo /usr/bin/nargo @@ -24,7 +24,7 @@ build-contracts: RUN cd noir-contracts && NARGO=nargo TRANSPILER=avm-transpiler ./bootstrap.sh SAVE ARTIFACT noir-contracts - + build-protocol-circuits: LOCALLY LET bb_source_hash = $(cd .. && git ls-tree -r HEAD | grep 'barretenberg/cpp' | awk '{print $3}' | git hash-object --stdin) @@ -38,10 +38,10 @@ build: FROM +source BUILD +build-contracts BUILD +build-protocol-circuits - + COPY +build-contracts/noir-contracts ./noir-contracts COPY +build-protocol-circuits/noir-protocol-circuits ./noir-protocol-circuits - + SAVE ARTIFACT aztec-nr SAVE ARTIFACT noir-contracts SAVE ARTIFACT noir-protocol-circuits @@ -55,7 +55,7 @@ test: COPY +build/. /usr/src/noir-projects RUN cd /usr/src/noir-projects/noir-protocol-circuits && nargo test --silence-warnings - + RUN cd /usr/src/yarn-project/txe && yarn start & \ # Wait for TXE to initialize sleep 5 && \ @@ -91,5 +91,5 @@ gates-report: ENV BB_BIN /usr/src/barretenberg/cpp/build/bin/bb RUN ./gates_report.sh - + SAVE ARTIFACT ./noir-protocol-circuits/gates_report.json gates_report.json diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 21acafe80db..77ec0f0cc5a 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -34,6 +34,10 @@ function wipe_non_cache_docker_state { } EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${AZTEC_BOT_GITHUB_TOKEN:-}" +if "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ; then + echo "Trying earthly build with no cache, this will take longer but is very unlikely to not hit the graph edge bug." + export EARTHLY_NO_CACHE=1 +fi # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do @@ -48,11 +52,10 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ]; then - echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Trying 'earthly prune' and starting again." - earthly prune + echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Starting again with no cache." else echo "Got 'inconsistent graph state' or 'failed to get state for index'. Retrying once." - sleep 5 + sleep 20 fi elif grep 'Error: pull ping error: pull ping response' $OUTPUT_FILE >/dev/null; then echo "Got 'Error: pull ping error: pull ping response', intermittent failure when writing out images to docker. If this persists, try 'systemctl restart docker' on the spot instance." diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 220f8292c8e..feb74419c30 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -4,7 +4,7 @@ deps: LOCALLY LET packages = $(git ls-files "**/package*.json" package*.json) LET tsconfigs = $(git ls-files "**/tsconfig*.json" tsconfig*.json) - FROM ../build-images+build + FROM ../build-images+from-registry # copy bb, bb-js and noir-packages COPY ../barretenberg/cpp/+preset-release/bin /usr/src/barretenberg/cpp/build/ COPY ../barretenberg/ts/+build/build /usr/src/barretenberg/ts @@ -231,7 +231,7 @@ end-to-end-prod: SAVE ARTIFACT /usr/src /usr/src anvil: - FROM ../build-images+build + FROM ../build-images+from-registry SAVE ARTIFACT /opt/foundry/bin/anvil end-to-end-base: From 53d8003709c57f58043d8564af664e3ed6b31332 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:14:03 +0000 Subject: [PATCH 14/22] no-cache for earthly ci fix --- scripts/earthly-ci | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 77ec0f0cc5a..1a2d684b939 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -34,7 +34,7 @@ function wipe_non_cache_docker_state { } EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${AZTEC_BOT_GITHUB_TOKEN:-}" -if "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ; then +if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ] ; then echo "Trying earthly build with no cache, this will take longer but is very unlikely to not hit the graph edge bug." export EARTHLY_NO_CACHE=1 fi From 1e57632602264824b766ceed9499375489e11e92 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:18:20 +0000 Subject: [PATCH 15/22] remove use inline cache --- scripts/earthly-ci | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 1a2d684b939..326812e17e1 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -16,7 +16,6 @@ ATTEMPT_COUNT=0 # earthly settings export EARTHLY_ALLOW_PRIVILEGED=true -export EARTHLY_USE_INLINE_CACHE=true export EARTHLY_NO_BUILDKIT_UPDATE=true # make sure earthly gives annotations that github picks up export GITHUB_ACTIONS=true From f292e574d4506a4fbfaed2aa5a63eaf8136f74e4 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:18:49 +0000 Subject: [PATCH 16/22] no-cache --- scripts/earthly-ci | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 326812e17e1..253864aaf93 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -35,7 +35,7 @@ function wipe_non_cache_docker_state { EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${AZTEC_BOT_GITHUB_TOKEN:-}" if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ] ; then echo "Trying earthly build with no cache, this will take longer but is very unlikely to not hit the graph edge bug." - export EARTHLY_NO_CACHE=1 + EARTHLY_ARGS="--no-cache $EARTHLY_ARGS" fi # Handle earthly commands and retries From 35c306c67f10023dfb7431d1f17114bf38aa898e Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:19:45 +0000 Subject: [PATCH 17/22] no-cache fix --- scripts/earthly-ci | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 253864aaf93..0e7224077f5 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -33,10 +33,6 @@ function wipe_non_cache_docker_state { } EARTHLY_ARGS="--secret AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} --secret AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${AZTEC_BOT_GITHUB_TOKEN:-}" -if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ] ; then - echo "Trying earthly build with no cache, this will take longer but is very unlikely to not hit the graph edge bug." - EARTHLY_ARGS="--no-cache $EARTHLY_ARGS" -fi # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do @@ -52,6 +48,7 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -ge 3 ]; then echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Starting again with no cache." + EARTHLY_ARGS="--no-cache $EARTHLY_ARGS" else echo "Got 'inconsistent graph state' or 'failed to get state for index'. Retrying once." sleep 20 From e981faa05f479c2e1b011a2b96e2200f346a40fb Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:36:53 +0000 Subject: [PATCH 18/22] kludge --- .github/workflows/ci-arm.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index 6f228ae01a4..b9740b21710 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -66,6 +66,8 @@ jobs: needs: [build-images] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm steps: + # permission kludge before checkout, see https://github.com/actions/checkout/issues/211#issuecomment-611986243 + - run: sudo chown -R $USER:$USER /home/github/actions-runner/_work/ - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action From 491e35ec3544f063494e3a3f20bcfd8575b2898b Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:40:36 +0000 Subject: [PATCH 19/22] perms --- .github/workflows/ci-arm.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index b9740b21710..2dc25a31f2e 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -67,7 +67,7 @@ jobs: runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm steps: # permission kludge before checkout, see https://github.com/actions/checkout/issues/211#issuecomment-611986243 - - run: sudo chown -R $USER:$USER /home/github/actions-runner/_work/ + - run: sudo chown -R $USER:$USER /home/ubuntu/ - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action From 14b95e0367d3da0d5c0738748f6ae9de761f0754 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 18:40:50 +0000 Subject: [PATCH 20/22] perms --- .github/workflows/ci-arm.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index 2dc25a31f2e..5a527090606 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -51,6 +51,8 @@ jobs: needs: [setup, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-arm steps: + # permission kludge before checkout, see https://github.com/actions/checkout/issues/211#issuecomment-611986243 + - run: sudo chown -R $USER:$USER /home/ubuntu/ - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action From 464a5859e16d1679680ad13db286d6ae75bd1328 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 19:18:13 +0000 Subject: [PATCH 21/22] bundle disable --- .../end-to-end/src/shared/uniswap_l1_l2.ts | 502 +++++++++--------- 1 file changed, 244 insertions(+), 258 deletions(-) diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index 6caf5b59482..65a2e4a8ca6 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -1,36 +1,21 @@ -import { - type AccountWallet, - AztecAddress, - type AztecNode, - type DebugLogger, - EthAddress, - Fr, - type PXE, - computeAuthWitMessageHash, -} from '@aztec/aztec.js'; +import { type AccountWallet, AztecAddress, type AztecNode, type DebugLogger, EthAddress, Fr, type PXE, computeAuthWitMessageHash } from '@aztec/aztec.js'; import { deployL1Contract } from '@aztec/ethereum'; import { sha256ToField } from '@aztec/foundation/crypto'; import { InboxAbi, UniswapPortalAbi, UniswapPortalBytecode } from '@aztec/l1-artifacts'; import { UniswapContract } from '@aztec/noir-contracts.js/Uniswap'; + + import { jest } from '@jest/globals'; import { strict as assert } from 'assert'; -import { - type Account, - type Chain, - type GetContractReturnType, - type HttpTransport, - type PublicClient, - type WalletClient, - decodeEventLog, - getContract, - parseEther, - toFunctionSelector, -} from 'viem'; +import { type Account, type Chain, type GetContractReturnType, type HttpTransport, type PublicClient, type WalletClient, decodeEventLog, getContract, parseEther, toFunctionSelector } from 'viem'; + + import { publicDeployAccounts } from '../fixtures/utils.js'; import { CrossChainTestHarness } from './cross_chain_test_harness.js'; + // PSA: This tests works on forked mainnet. There is a dump of the data in `dumpedState` such that we // don't need to burn through RPC requests. // To generate a new dump, use the `dumpChainState` cheatcode. @@ -84,7 +69,7 @@ export const uniswapL1L2TestSuite = ( let ownerEthAddress: EthAddress; // does transactions on behalf of owner on Aztec: let sponsorWallet: AccountWallet; - let sponsorAddress: AztecAddress; + // let sponsorAddress: AztecAddress; let daiCrossChainHarness: CrossChainTestHarness; let wethCrossChainHarness: CrossChainTestHarness; @@ -379,240 +364,241 @@ export const uniswapL1L2TestSuite = ( }); // docs:end:uniswap_private - // docs:start:uniswap_public - it('should uniswap trade on L1 from L2 funds publicly (swaps WETH -> DAI)', async () => { - const wethL1BeforeBalance = await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress); - - // 1. Approve and deposit weth to the portal and move to L2 - const [secretForMintingWeth, secretHashForMintingWeth] = wethCrossChainHarness.generateClaimSecret(); - - const wethDepositMsgHash = await wethCrossChainHarness.sendTokensToPortalPublic( - wethAmountToBridge, - secretHashForMintingWeth, - ); - // funds transferred from owner to token portal - expect(await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress)).toBe( - wethL1BeforeBalance - wethAmountToBridge, - ); - expect(await wethCrossChainHarness.getL1BalanceOf(wethCrossChainHarness.tokenPortalAddress)).toBe( - wethAmountToBridge, - ); - - // Wait for the message to be available for consumption - await wethCrossChainHarness.makeMessageConsumable(wethDepositMsgHash); - - // Get message leaf index, needed for claiming in public - const wethDepositMaybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness( - 'latest', - wethDepositMsgHash, - 0n, - ); - assert(wethDepositMaybeIndexAndPath !== undefined, 'Message not found in tree'); - const wethDepositMessageLeafIndex = wethDepositMaybeIndexAndPath[0]; - - // 2. Claim WETH on L2 - logger.info('Minting weth on L2'); - await wethCrossChainHarness.consumeMessageOnAztecAndMintPublicly( - wethAmountToBridge, - secretForMintingWeth, - wethDepositMessageLeafIndex, - ); - await wethCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, wethAmountToBridge); - - // Store balances - const wethL2BalanceBeforeSwap = await wethCrossChainHarness.getL2PublicBalanceOf(ownerAddress); - const daiL2BalanceBeforeSwap = await daiCrossChainHarness.getL2PublicBalanceOf(ownerAddress); - - // 3. Owner gives uniswap approval to transfer funds on its behalf - const nonceForWETHTransferApproval = new Fr(1n); - - await ownerWallet - .setPublicAuthWit( - { - caller: uniswapL2Contract.address, - action: wethCrossChainHarness.l2Token.methods - .transfer_public( - ownerAddress, - uniswapL2Contract.address, - wethAmountToBridge, - nonceForWETHTransferApproval, - ) - .request(), - }, - true, - ) - .send() - .wait(); - - // 4. Swap on L1 - sends L2 to L1 message to withdraw WETH to L1 and another message to swap assets. - const [secretForDepositingSwappedDai, secretHashForDepositingSwappedDai] = - daiCrossChainHarness.generateClaimSecret(); - - // 4.1 Owner approves user to swap on their behalf: - const nonceForSwap = new Fr(3n); - const action = uniswapL2Contract - .withWallet(sponsorWallet) - .methods.swap_public( - ownerAddress, - wethCrossChainHarness.l2Bridge.address, - wethAmountToBridge, - daiCrossChainHarness.l2Bridge.address, - nonceForWETHTransferApproval, - uniswapFeeTier, - minimumOutputAmount, - ownerAddress, - secretHashForDepositingSwappedDai, - ownerEthAddress, - nonceForSwap, - ); - await ownerWallet.setPublicAuthWit({ caller: sponsorAddress, action }, true).send().wait(); - - // 4.2 Call swap_public from user2 on behalf of owner - const uniswapL2Interaction = await action.send().wait(); - - const swapPublicContent = sha256ToField([ - Buffer.from( - toFunctionSelector('swap_public(address,uint256,uint24,address,uint256,bytes32,bytes32,address)').substring( - 2, - ), - 'hex', - ), - wethCrossChainHarness.tokenPortalAddress.toBuffer32(), - new Fr(wethAmountToBridge), - new Fr(uniswapFeeTier), - daiCrossChainHarness.tokenPortalAddress.toBuffer32(), - new Fr(minimumOutputAmount), - ownerAddress, - secretHashForDepositingSwappedDai, - ownerEthAddress.toBuffer32(), - ]); - - const swapPublicLeaf = sha256ToField([ - uniswapL2Contract.address, - new Fr(1), // aztec version - EthAddress.fromString(uniswapPortal.address).toBuffer32(), - new Fr(publicClient.chain.id), // chain id - swapPublicContent, - ]); - - const withdrawContent = sha256ToField([ - Buffer.from(toFunctionSelector('withdraw(address,uint256,address)').substring(2), 'hex'), - uniswapPortalAddress.toBuffer32(), - new Fr(wethAmountToBridge), - uniswapPortalAddress.toBuffer32(), - ]); - - const withdrawLeaf = sha256ToField([ - wethCrossChainHarness.l2Bridge.address, - new Fr(1), // aztec version - wethCrossChainHarness.tokenPortalAddress.toBuffer32(), - new Fr(publicClient.chain.id), // chain id - withdrawContent, - ]); - - // check weth balance of owner on L2 (we first bridged `wethAmountToBridge` into L2 and now withdrew it!) - await wethCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, wethL2BalanceBeforeSwap - wethAmountToBridge); - - // 5. Perform the swap on L1 with the `uniswapPortal.swap_private()` (consuming L2 to L1 messages) - logger.info('Execute withdraw and swap on the uniswapPortal!'); - const daiL1BalanceOfPortalBeforeSwap = await daiCrossChainHarness.getL1BalanceOf( - daiCrossChainHarness.tokenPortalAddress, - ); - - const [swapPrivateL2MessageIndex, swapPrivateSiblingPath] = await aztecNode.getL2ToL1MessageMembershipWitness( - uniswapL2Interaction.blockNumber!, - swapPublicLeaf, - ); - const [withdrawL2MessageIndex, withdrawSiblingPath] = await aztecNode.getL2ToL1MessageMembershipWitness( - uniswapL2Interaction.blockNumber!, - withdrawLeaf, - ); - - const withdrawMessageMetadata = { - _l2BlockNumber: BigInt(uniswapL2Interaction.blockNumber!), - _leafIndex: BigInt(withdrawL2MessageIndex), - _path: withdrawSiblingPath - .toBufferArray() - .map((buf: Buffer) => `0x${buf.toString('hex')}`) as readonly `0x${string}`[], - }; - - const swapPrivateMessageMetadata = { - _l2BlockNumber: BigInt(uniswapL2Interaction.blockNumber!), - _leafIndex: BigInt(swapPrivateL2MessageIndex), - _path: swapPrivateSiblingPath - .toBufferArray() - .map((buf: Buffer) => `0x${buf.toString('hex')}`) as readonly `0x${string}`[], - }; - - const swapArgs = [ - wethCrossChainHarness.tokenPortalAddress.toString(), - wethAmountToBridge, - Number(uniswapFeeTier), - daiCrossChainHarness.tokenPortalAddress.toString(), - minimumOutputAmount, - ownerAddress.toString(), - secretHashForDepositingSwappedDai.toString(), - true, - [withdrawMessageMetadata, swapPrivateMessageMetadata], - ] as const; - - // this should also insert a message into the inbox. - const txHash = await uniswapPortal.write.swapPublic(swapArgs, {} as any); - - // We get the msg leaf from event so that we can later wait for it to be available for consumption - let outTokenDepositMsgHash: Fr; - { - const txReceipt = await daiCrossChainHarness.publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - const txLog = txReceipt.logs[9]; - const topics = decodeEventLog({ - abi: InboxAbi, - data: txLog.data, - topics: txLog.topics, - }); - outTokenDepositMsgHash = Fr.fromString(topics.args.hash); - } - - // weth was swapped to dai and send to portal - const daiL1BalanceOfPortalAfter = await daiCrossChainHarness.getL1BalanceOf( - daiCrossChainHarness.tokenPortalAddress, - ); - expect(daiL1BalanceOfPortalAfter).toBeGreaterThan(daiL1BalanceOfPortalBeforeSwap); - const daiAmountToBridge = BigInt(daiL1BalanceOfPortalAfter - daiL1BalanceOfPortalBeforeSwap); - - // Wait for the message to be available for consumption - await daiCrossChainHarness.makeMessageConsumable(outTokenDepositMsgHash); - - // Get message leaf index, needed for claiming in public - const outTokenDepositMaybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness( - 'latest', - outTokenDepositMsgHash, - 0n, - ); - assert(outTokenDepositMaybeIndexAndPath !== undefined, 'Message not found in tree'); - const outTokenDepositMessageLeafIndex = outTokenDepositMaybeIndexAndPath[0]; - - // 6. claim dai on L2 - logger.info('Consuming messages to mint dai on L2'); - await daiCrossChainHarness.consumeMessageOnAztecAndMintPublicly( - daiAmountToBridge, - secretForDepositingSwappedDai, - outTokenDepositMessageLeafIndex, - ); - await daiCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, daiL2BalanceBeforeSwap + daiAmountToBridge); - - const wethL2BalanceAfterSwap = await wethCrossChainHarness.getL2PublicBalanceOf(ownerAddress); - const daiL2BalanceAfterSwap = await daiCrossChainHarness.getL2PublicBalanceOf(ownerAddress); - - logger.info('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); - logger.info('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); - logger.info('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); - logger.info('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); - logger.info('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); - }); - // docs:end:uniswap_public + // TODO(#7463): reenable look into this failure https://github.com/AztecProtocol/aztec-packages/actions/runs/9912612912/job/27388320150?pr=7462 + // // docs:start:uniswap_public + // it('should uniswap trade on L1 from L2 funds publicly (swaps WETH -> DAI)', async () => { + // const wethL1BeforeBalance = await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress); + + // // 1. Approve and deposit weth to the portal and move to L2 + // const [secretForMintingWeth, secretHashForMintingWeth] = wethCrossChainHarness.generateClaimSecret(); + + // const wethDepositMsgHash = await wethCrossChainHarness.sendTokensToPortalPublic( + // wethAmountToBridge, + // secretHashForMintingWeth, + // ); + // // funds transferred from owner to token portal + // expect(await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress)).toBe( + // wethL1BeforeBalance - wethAmountToBridge, + // ); + // expect(await wethCrossChainHarness.getL1BalanceOf(wethCrossChainHarness.tokenPortalAddress)).toBe( + // wethAmountToBridge, + // ); + + // // Wait for the message to be available for consumption + // await wethCrossChainHarness.makeMessageConsumable(wethDepositMsgHash); + + // // Get message leaf index, needed for claiming in public + // const wethDepositMaybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness( + // 'latest', + // wethDepositMsgHash, + // 0n, + // ); + // assert(wethDepositMaybeIndexAndPath !== undefined, 'Message not found in tree'); + // const wethDepositMessageLeafIndex = wethDepositMaybeIndexAndPath[0]; + + // // 2. Claim WETH on L2 + // logger.info('Minting weth on L2'); + // await wethCrossChainHarness.consumeMessageOnAztecAndMintPublicly( + // wethAmountToBridge, + // secretForMintingWeth, + // wethDepositMessageLeafIndex, + // ); + // await wethCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, wethAmountToBridge); + + // // Store balances + // const wethL2BalanceBeforeSwap = await wethCrossChainHarness.getL2PublicBalanceOf(ownerAddress); + // const daiL2BalanceBeforeSwap = await daiCrossChainHarness.getL2PublicBalanceOf(ownerAddress); + + // // 3. Owner gives uniswap approval to transfer funds on its behalf + // const nonceForWETHTransferApproval = new Fr(1n); + + // await ownerWallet + // .setPublicAuthWit( + // { + // caller: uniswapL2Contract.address, + // action: wethCrossChainHarness.l2Token.methods + // .transfer_public( + // ownerAddress, + // uniswapL2Contract.address, + // wethAmountToBridge, + // nonceForWETHTransferApproval, + // ) + // .request(), + // }, + // true, + // ) + // .send() + // .wait(); + + // // 4. Swap on L1 - sends L2 to L1 message to withdraw WETH to L1 and another message to swap assets. + // const [secretForDepositingSwappedDai, secretHashForDepositingSwappedDai] = + // daiCrossChainHarness.generateClaimSecret(); + + // // 4.1 Owner approves user to swap on their behalf: + // const nonceForSwap = new Fr(3n); + // const action = uniswapL2Contract + // .withWallet(sponsorWallet) + // .methods.swap_public( + // ownerAddress, + // wethCrossChainHarness.l2Bridge.address, + // wethAmountToBridge, + // daiCrossChainHarness.l2Bridge.address, + // nonceForWETHTransferApproval, + // uniswapFeeTier, + // minimumOutputAmount, + // ownerAddress, + // secretHashForDepositingSwappedDai, + // ownerEthAddress, + // nonceForSwap, + // ); + // await ownerWallet.setPublicAuthWit({ caller: sponsorAddress, action }, true).send().wait(); + + // // 4.2 Call swap_public from user2 on behalf of owner + // const uniswapL2Interaction = await action.send().wait(); + + // const swapPublicContent = sha256ToField([ + // Buffer.from( + // toFunctionSelector('swap_public(address,uint256,uint24,address,uint256,bytes32,bytes32,address)').substring( + // 2, + // ), + // 'hex', + // ), + // wethCrossChainHarness.tokenPortalAddress.toBuffer32(), + // new Fr(wethAmountToBridge), + // new Fr(uniswapFeeTier), + // daiCrossChainHarness.tokenPortalAddress.toBuffer32(), + // new Fr(minimumOutputAmount), + // ownerAddress, + // secretHashForDepositingSwappedDai, + // ownerEthAddress.toBuffer32(), + // ]); + + // const swapPublicLeaf = sha256ToField([ + // uniswapL2Contract.address, + // new Fr(1), // aztec version + // EthAddress.fromString(uniswapPortal.address).toBuffer32(), + // new Fr(publicClient.chain.id), // chain id + // swapPublicContent, + // ]); + + // const withdrawContent = sha256ToField([ + // Buffer.from(toFunctionSelector('withdraw(address,uint256,address)').substring(2), 'hex'), + // uniswapPortalAddress.toBuffer32(), + // new Fr(wethAmountToBridge), + // uniswapPortalAddress.toBuffer32(), + // ]); + + // const withdrawLeaf = sha256ToField([ + // wethCrossChainHarness.l2Bridge.address, + // new Fr(1), // aztec version + // wethCrossChainHarness.tokenPortalAddress.toBuffer32(), + // new Fr(publicClient.chain.id), // chain id + // withdrawContent, + // ]); + + // // check weth balance of owner on L2 (we first bridged `wethAmountToBridge` into L2 and now withdrew it!) + // await wethCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, wethL2BalanceBeforeSwap - wethAmountToBridge); + + // // 5. Perform the swap on L1 with the `uniswapPortal.swap_private()` (consuming L2 to L1 messages) + // logger.info('Execute withdraw and swap on the uniswapPortal!'); + // const daiL1BalanceOfPortalBeforeSwap = await daiCrossChainHarness.getL1BalanceOf( + // daiCrossChainHarness.tokenPortalAddress, + // ); + + // const [swapPrivateL2MessageIndex, swapPrivateSiblingPath] = await aztecNode.getL2ToL1MessageMembershipWitness( + // uniswapL2Interaction.blockNumber!, + // swapPublicLeaf, + // ); + // const [withdrawL2MessageIndex, withdrawSiblingPath] = await aztecNode.getL2ToL1MessageMembershipWitness( + // uniswapL2Interaction.blockNumber!, + // withdrawLeaf, + // ); + + // const withdrawMessageMetadata = { + // _l2BlockNumber: BigInt(uniswapL2Interaction.blockNumber!), + // _leafIndex: BigInt(withdrawL2MessageIndex), + // _path: withdrawSiblingPath + // .toBufferArray() + // .map((buf: Buffer) => `0x${buf.toString('hex')}`) as readonly `0x${string}`[], + // }; + + // const swapPrivateMessageMetadata = { + // _l2BlockNumber: BigInt(uniswapL2Interaction.blockNumber!), + // _leafIndex: BigInt(swapPrivateL2MessageIndex), + // _path: swapPrivateSiblingPath + // .toBufferArray() + // .map((buf: Buffer) => `0x${buf.toString('hex')}`) as readonly `0x${string}`[], + // }; + + // const swapArgs = [ + // wethCrossChainHarness.tokenPortalAddress.toString(), + // wethAmountToBridge, + // Number(uniswapFeeTier), + // daiCrossChainHarness.tokenPortalAddress.toString(), + // minimumOutputAmount, + // ownerAddress.toString(), + // secretHashForDepositingSwappedDai.toString(), + // true, + // [withdrawMessageMetadata, swapPrivateMessageMetadata], + // ] as const; + + // // this should also insert a message into the inbox. + // const txHash = await uniswapPortal.write.swapPublic(swapArgs, {} as any); + + // // We get the msg leaf from event so that we can later wait for it to be available for consumption + // let outTokenDepositMsgHash: Fr; + // { + // const txReceipt = await daiCrossChainHarness.publicClient.waitForTransactionReceipt({ + // hash: txHash, + // }); + + // const txLog = txReceipt.logs[9]; + // const topics = decodeEventLog({ + // abi: InboxAbi, + // data: txLog.data, + // topics: txLog.topics, + // }); + // outTokenDepositMsgHash = Fr.fromString(topics.args.hash); + // } + + // // weth was swapped to dai and send to portal + // const daiL1BalanceOfPortalAfter = await daiCrossChainHarness.getL1BalanceOf( + // daiCrossChainHarness.tokenPortalAddress, + // ); + // expect(daiL1BalanceOfPortalAfter).toBeGreaterThan(daiL1BalanceOfPortalBeforeSwap); + // const daiAmountToBridge = BigInt(daiL1BalanceOfPortalAfter - daiL1BalanceOfPortalBeforeSwap); + + // // Wait for the message to be available for consumption + // await daiCrossChainHarness.makeMessageConsumable(outTokenDepositMsgHash); + + // // Get message leaf index, needed for claiming in public + // const outTokenDepositMaybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness( + // 'latest', + // outTokenDepositMsgHash, + // 0n, + // ); + // assert(outTokenDepositMaybeIndexAndPath !== undefined, 'Message not found in tree'); + // const outTokenDepositMessageLeafIndex = outTokenDepositMaybeIndexAndPath[0]; + + // // 6. claim dai on L2 + // logger.info('Consuming messages to mint dai on L2'); + // await daiCrossChainHarness.consumeMessageOnAztecAndMintPublicly( + // daiAmountToBridge, + // secretForDepositingSwappedDai, + // outTokenDepositMessageLeafIndex, + // ); + // await daiCrossChainHarness.expectPublicBalanceOnL2(ownerAddress, daiL2BalanceBeforeSwap + daiAmountToBridge); + + // const wethL2BalanceAfterSwap = await wethCrossChainHarness.getL2PublicBalanceOf(ownerAddress); + // const daiL2BalanceAfterSwap = await daiCrossChainHarness.getL2PublicBalanceOf(ownerAddress); + + // logger.info('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); + // logger.info('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); + // logger.info('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); + // logger.info('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); + // logger.info('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); + // }); + // // docs:end:uniswap_public // Edge cases for the private flow: // note - tests for uniswapPortal.sol and minting asset on L2 are covered in other tests. @@ -1071,4 +1057,4 @@ export const uniswapL1L2TestSuite = ( ).rejects.toThrow('The contract function "swapPrivate" reverted.'); }); }); -}; +}; \ No newline at end of file From f89db78d17d4b94559bb37c780954eb4ede42f15 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 12 Jul 2024 19:21:44 +0000 Subject: [PATCH 22/22] format fix --- .../end-to-end/src/shared/uniswap_l1_l2.ts | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index 65a2e4a8ca6..f2baacdb8e0 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -1,21 +1,35 @@ -import { type AccountWallet, AztecAddress, type AztecNode, type DebugLogger, EthAddress, Fr, type PXE, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { + type AccountWallet, + AztecAddress, + type AztecNode, + type DebugLogger, + EthAddress, + Fr, + type PXE, + computeAuthWitMessageHash, +} from '@aztec/aztec.js'; import { deployL1Contract } from '@aztec/ethereum'; import { sha256ToField } from '@aztec/foundation/crypto'; import { InboxAbi, UniswapPortalAbi, UniswapPortalBytecode } from '@aztec/l1-artifacts'; import { UniswapContract } from '@aztec/noir-contracts.js/Uniswap'; - - import { jest } from '@jest/globals'; -import { strict as assert } from 'assert'; -import { type Account, type Chain, type GetContractReturnType, type HttpTransport, type PublicClient, type WalletClient, decodeEventLog, getContract, parseEther, toFunctionSelector } from 'viem'; - - +import { + type Account, + type Chain, + type GetContractReturnType, + type HttpTransport, + type PublicClient, + type WalletClient, + decodeEventLog, + getContract, + parseEther, + toFunctionSelector, +} from 'viem'; import { publicDeployAccounts } from '../fixtures/utils.js'; import { CrossChainTestHarness } from './cross_chain_test_harness.js'; - // PSA: This tests works on forked mainnet. There is a dump of the data in `dumpedState` such that we // don't need to burn through RPC requests. // To generate a new dump, use the `dumpChainState` cheatcode. @@ -90,7 +104,7 @@ export const uniswapL1L2TestSuite = ( } ownerAddress = ownerWallet.getAddress(); - sponsorAddress = sponsorWallet.getAddress(); + // sponsorAddress = sponsorWallet.getAddress(); ownerEthAddress = EthAddress.fromString((await walletClient.getAddresses())[0]); await publicDeployAccounts(ownerWallet, [ownerWallet, sponsorWallet]); @@ -1057,4 +1071,4 @@ export const uniswapL1L2TestSuite = ( ).rejects.toThrow('The contract function "swapPrivate" reverted.'); }); }); -}; \ No newline at end of file +};