diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml
new file mode 100644
index 000000000..efed85578
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/epic.yml
@@ -0,0 +1,30 @@
+---
+name: Epic/Major Feature
+about: File an Epic
+assignees: 'dfox-consensys,julien-marchand'
+---
+
+## Description
+Write a clear and concise description of what you want to happen. Add any considered drawbacks.
+
+## Motivation
+This section should indicate what value we are receiving from it.
+
+## Architecture
+- [ ] Link to an architecture document
+
+## Implementation guidelines
+- [ ] List implementation steps to complete the epic/feature
+
+## Metrics/monitoring
+- [ ] What needs to be monitored once this epic has been shipped?
+
+## Rollout
+- [ ] Describe rollout plan for this epic
+
+## Infrastructure & Node operators
+- [ ] What is the impact and migration steps required for the infrastructure and node operators
+
+## Documentation & communication
+- [ ] What needs to be documented for this feature?
+
diff --git a/.github/actions/check-image-tags-exist/action.yml b/.github/actions/check-image-tags-exist/action.yml
index 1caf8e001..e3ab6eb74 100644
--- a/.github/actions/check-image-tags-exist/action.yml
+++ b/.github/actions/check-image-tags-exist/action.yml
@@ -5,48 +5,24 @@ inputs:
last_commit_tag:
description: 'The tag of the last commit image'
required: true
- common_ancestor_tag:
- description: 'The tag of the common ancestor commit image'
- required: true
image_name:
description: 'The name of the image to check'
required: true
- docker_username:
- description: 'The username to login to Docker Hub'
- required: true
- docker_password:
- description: 'The password to login to Docker Hub'
- required: true
outputs:
last_commit_tag_exists:
description: 'Whether the last commit image tag exists'
value: ${{ steps.last_commit_image_exists.outputs.last_commit_tag_exists }}
- common_ancestor_commit_tag_exists:
- description: 'Whether the common ancestor commit image tag exists'
- value: ${{ steps.ancestor_commit_image_exists.outputs.common_ancestor_commit_tag_exists }}
runs:
using: 'composite'
steps:
- - name: Login to Docker Hub
- uses: docker/login-action@v2
- with:
- username: ${{ inputs.docker_username }}
- password: ${{ inputs.docker_password }}
- name: Check last commit image tag exists
id: last_commit_image_exists
shell: bash
continue-on-error: true
run: |
echo last_commit_tag_exists=$(docker pull ${{ inputs.image_name }}:${{ inputs.last_commit_tag }} > /dev/null ; echo $?) >> $GITHUB_OUTPUT
- - name: Check ancestor commit image tag exists
- shell: bash
- id: ancestor_commit_image_exists
- continue-on-error: true
- run: |
- echo common_ancestor_commit_tag_exists=$(docker pull ${{ inputs.image_name }}:${{ inputs.common_ancestor_tag }} > /dev/null ; echo $?) >> $GITHUB_OUTPUT
- name: Show outputs
shell: bash
run: |
echo "last_commit_tag_exists: ${{ steps.last_commit_image_exists.outputs.last_commit_tag_exists }}"
- echo "common_ancestor_commit_tag_exists: ${{ steps.ancestor_commit_image_exists.outputs.common_ancestor_commit_tag_exists }}"
\ No newline at end of file
diff --git a/.github/actions/image-tag-and-push/action.yml b/.github/actions/image-tag-and-push/action.yml
index 555dc0e09..519e526ef 100644
--- a/.github/actions/image-tag-and-push/action.yml
+++ b/.github/actions/image-tag-and-push/action.yml
@@ -8,33 +8,24 @@ inputs:
last_commit_tag:
description: 'The tag of the last commit image'
required: true
- common_ancestor_tag:
- description: 'The tag of the common ancestor commit image'
- required: true
develop_tag:
description: 'The tag of the develop image'
required: true
- untested_tag_suffix:
- description: 'The suffix to add to untested images'
- required: true
image_name:
description: 'The name of the image to tag and push'
required: true
last_commit_tag_exists:
description: 'Whether the last commit image tag exists'
required: true
- common_ancestor_commit_tag_exists:
- description: 'Whether the common ancestor commit image tag exists'
- required: true
docker_username:
description: 'The username to login to Docker Hub'
- required: true
+ required: false
docker_password:
description: 'The password to login to Docker Hub'
- required: true
+ required: false
outputs:
- image_tagged:
+ image_tagged:
description: 'Whether the image was tagged'
value: ${{ steps.set-output.outputs.image_tagged }}
@@ -42,6 +33,7 @@ runs:
using: 'composite'
steps:
- name: Login to Docker Hub
+ if: ${{ github.ref == 'refs/heads/main' && inputs.last_commit_tag_exists == '0' }}
uses: docker/login-action@v2
with:
username: ${{ inputs.docker_username }}
@@ -50,29 +42,15 @@ runs:
shell: bash
run: |
echo IMAGE_TAGGED=false >> $GITHUB_ENV
- - name: Tag Docker image with last commit tag with the commit hash plus w/o "untested" suffix
- shell: bash
- if: ${{ inputs.last_commit_tag != '0000000' && inputs.last_commit_tag_exists == '0' }}
- run: |
- docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.commit_tag }} ${{ inputs.image_name }}:${{ inputs.last_commit_tag }}
- docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }} ${{ inputs.image_name }}:${{ inputs.last_commit_tag }}
- echo IMAGE_TAGGED=true >> $GITHUB_ENV
- - name: Tag Docker image with common ancestor commit tag with the commit hash plus w/o "untested" suffix
- shell: bash
- if: ${{ inputs.last_commit_tag == '0000000' && inputs.common_ancestor_commit_tag_exists == '0' }}
- run: |
- docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.commit_tag }} ${{ inputs.image_name }}:${{ inputs.common_ancestor_tag }}
- docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }} ${{ inputs.image_name }}:${{ inputs.common_ancestor_tag }}
- echo IMAGE_TAGGED=true >> $GITHUB_ENV
- name: Tag Docker image with develop if on main branch
shell: bash
if: ${{ github.ref == 'refs/heads/main' && inputs.last_commit_tag_exists == '0' }}
run: |
docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.develop_tag }} ${{ inputs.image_name }}:${{ inputs.last_commit_tag }}
+ docker buildx imagetools create --tag ${{ inputs.image_name }}:${{ inputs.commit_tag }} ${{ inputs.image_name }}:${{ inputs.last_commit_tag }}
echo IMAGE_TAGGED=true >> $GITHUB_ENV
-
- name: Set output from environment variable
shell: bash
id: set-output
run: |
- echo "image_tagged=$IMAGE_TAGGED" >> $GITHUB_OUTPUT
\ No newline at end of file
+ echo "image_tagged=$IMAGE_TAGGED" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/all-tools.yml b/.github/workflows/all-tools.yml
index edcfc7fe1..a2405eec5 100644
--- a/.github/workflows/all-tools.yml
+++ b/.github/workflows/all-tools.yml
@@ -50,10 +50,7 @@ jobs:
uses: ./.github/actions/check-image-tags-exist
with:
last_commit_tag: ${{ needs.store_image_name_and_tags.outputs.last_commit_tag }}
- common_ancestor_tag: ${{ needs.store_image_name_and_tags.outputs.common_ancestor_tag }}
image_name: consensys/linea-alltools
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
all-tools-tag-only:
runs-on: [self-hosted, ubuntu-20.04, X64, small]
@@ -71,12 +68,9 @@ jobs:
with:
commit_tag: ${{ needs.store_image_name_and_tags.outputs.commit_tag }}
last_commit_tag: ${{ needs.store_image_name_and_tags.outputs.last_commit_tag }}
- common_ancestor_tag: ${{ needs.store_image_name_and_tags.outputs.common_ancestor_tag }}
develop_tag: ${{ needs.store_image_name_and_tags.outputs.develop_tag }}
- untested_tag_suffix: ${{ needs.store_image_name_and_tags.outputs.untested_tag_suffix }}
image_name: consensys/linea-alltools
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-and-publish.yml b/.github/workflows/build-and-publish.yml
index c6dad7628..b1298f8f1 100644
--- a/.github/workflows/build-and-publish.yml
+++ b/.github/workflows/build-and-publish.yml
@@ -6,18 +6,13 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
coordinator_changed:
required: true
type: string
@@ -50,9 +45,9 @@ on:
type: string
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
- required: true
+ required: false
jobs:
coordinator:
@@ -60,11 +55,9 @@ jobs:
if: ${{ always() && (inputs.coordinator_changed == 'true' || inputs.coordinator_image_tagged != 'true') }}
with:
commit_tag: ${{ inputs.commit_tag }}
- last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-coordinator
+ push_image: ${{ inputs.push_image }}
secrets: inherit
prover:
@@ -72,11 +65,9 @@ jobs:
if: ${{ always() && (inputs.prover_changed == 'true' || inputs.prover_image_tagged != 'true') }}
with:
commit_tag: ${{ inputs.commit_tag }}
- last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-prover
+ push_image: ${{ inputs.push_image }}
secrets: inherit
postman:
@@ -84,11 +75,9 @@ jobs:
if: ${{ always() && (inputs.postman_changed == 'true' || inputs.postman_image_tagged != 'true') }}
with:
commit_tag: ${{ inputs.commit_tag }}
- last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-postman
+ push_image: ${{ inputs.push_image }}
secrets: inherit
traces-api-facade:
@@ -96,11 +85,9 @@ jobs:
if: ${{ always() && (inputs.traces_api_facade_changed == 'true' || inputs.traces_api_facade_image_tagged != 'true') }}
with:
commit_tag: ${{ inputs.commit_tag }}
- last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-traces-api-facade
+ push_image: ${{ inputs.push_image }}
secrets: inherit
transaction_exclusion_api:
@@ -108,9 +95,7 @@ jobs:
if: ${{ always() && (inputs.transaction_exclusion_api_changed == 'true' || inputs.transaction_exclusion_api_image_tagged != 'true') }}
with:
commit_tag: ${{ inputs.commit_tag }}
- last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-transaction-exclusion-api
+ push_image: ${{ inputs.push_image }}
secrets: inherit
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index f182c60b5..80777cb15 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -75,3 +75,11 @@ jobs:
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"
+ output: sarif-results
+ sarif-file: ${{ matrix.language }}-results.sarif
+ - name: Upload CodeQL Results
+ uses: actions/upload-artifact@v3
+ with:
+ name: codeql-results-${{ matrix.language }}
+ path: sarif-results
+ retention-days: 1
diff --git a/.github/workflows/coordinator-build-and-publish.yml b/.github/workflows/coordinator-build-and-publish.yml
index 02a4f4bce..e964f0e58 100644
--- a/.github/workflows/coordinator-build-and-publish.yml
+++ b/.github/workflows/coordinator-build-and-publish.yml
@@ -6,26 +6,44 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
image_name:
required: true
type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
secrets:
DOCKERHUB_USERNAME:
required: true
DOCKERHUB_TOKEN:
required: true
+ workflow_dispatch:
+ inputs:
+ commit_tag:
+ description: 'Image tag'
+ required: true
+ type: string
+ develop_tag:
+ description: 'Image tag will be "develop" if target branch is main'
+ required: true
+ type: choice
+ options:
+ - develop
+ default: 'develop'
+ image_name:
+ description: 'Image name'
+ required: true
+ type: string
+ default: 'consensys/linea-coordinator'
+ push_image:
+ description: 'Toggle whether to push image to docker registry'
+ required: false
+ type: boolean
+ default: true
concurrency:
group: coordinator-build-and-publish-${{ github.workflow }}-${{ github.ref }}
@@ -38,10 +56,16 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
DEVELOP_TAG: ${{ inputs.develop_tag }}
- UNTESTED_TAG_SUFFIX: ${{ inputs.untested_tag_suffix }}
IMAGE_NAME: ${{ inputs.image_name }}
- GITHUB_TOKEN: ${{ secrets._GITHUB_TOKEN_RELEASE_ACCESS }}
+ PUSH_IMAGE: ${{ inputs.push_image }}
+ TAGS: ${{ inputs.image_name }}:${{ inputs.commit_tag }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
+ - name: Set develop tag if main branch
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |
+ echo "TAGS=${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }},${{ env.IMAGE_NAME }}:${{ env.DEVELOP_TAG }}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- uses: actions/setup-java@v4
@@ -54,13 +78,14 @@ jobs:
run: |
./gradlew coordinator:app:distZip --no-daemon
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- - name: Set up Docker Buildx
+ - name: Set up Docker Buildx - local
id: buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
@@ -68,15 +93,38 @@ jobs:
uses: docker/metadata-action@v3
with:
images: ${{ env.IMAGE_NAME }}
+ - name: Build for testing
+ uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ with:
+ context: .
+ build-contexts: zip=./coordinator/app/build/distributions/
+ file: ./coordinator/Dockerfile
+ platforms: linux/amd64
+ load: true
+ push: false
+ tags: ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}
+ - name: Save Docker image as artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ run: |
+ docker save ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }} | gzip > linea-coordinator-docker-image.tar.gz
+ shell: bash
+ - name: Upload Docker image artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: linea-coordinator
+ path: linea-coordinator-docker-image.tar.gz
- name: Build & push
uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'true' || github.event_name == 'workflow_dispatch' }}
with:
context: .
build-contexts: zip=./coordinator/app/build/distributions/
file: ./coordinator/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
- tags: |
- ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}-${{ env.UNTESTED_TAG_SUFFIX }}
+ tags: ${{ env.TAGS }}
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache
cache-to: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache,mode=max
+
diff --git a/.github/workflows/coordinator-testing.yml b/.github/workflows/coordinator-testing.yml
index 58abc2ad4..96ad4a3b8 100644
--- a/.github/workflows/coordinator-testing.yml
+++ b/.github/workflows/coordinator-testing.yml
@@ -8,9 +8,9 @@ on:
type: string
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
- required: true
+ required: false
concurrency:
group: coordinator-testing-${{ github.workflow }}-${{ github.ref }}
@@ -24,6 +24,8 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
GITHUB_TOKEN: ${{ secrets._GITHUB_TOKEN_RELEASE_ACCESS }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
runs-on: [self-hosted, ubuntu-22.04, X64, medium]
name: Coordinator tests
steps:
@@ -52,6 +54,7 @@ jobs:
run: |
./gradlew -V coordinator:app:buildNeeded
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 94241b453..d4f1ab769 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -118,9 +118,7 @@ jobs:
with:
commit_tag: ${{ needs.store-image-name-and-tags.outputs.commit_tag }}
last_commit_tag: ${{ needs.store-image-name-and-tags.outputs.last_commit_tag }}
- common_ancestor_tag: ${{ needs.store-image-name-and-tags.outputs.common_ancestor_tag }}
develop_tag: ${{ needs.store-image-name-and-tags.outputs.develop_tag }}
- untested_tag_suffix: ${{ needs.store-image-name-and-tags.outputs.untested_tag_suffix }}
coordinator_changed: ${{ needs.filter-commit-changes.outputs.coordinator }}
postman_changed: ${{ needs.filter-commit-changes.outputs.postman }}
prover_changed: ${{ needs.filter-commit-changes.outputs.prover }}
@@ -132,6 +130,9 @@ jobs:
runs-on: [self-hosted, ubuntu-20.04, X64, small]
needs: [ store-image-name-and-tags, filter-commit-changes, check-and-tag-images ]
environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }}
+ concurrency:
+ group: manual-docker-build-and-e2e-tests-${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
steps:
- name: Deploy environment
run: |
@@ -142,10 +143,7 @@ jobs:
uses: ./.github/workflows/build-and-publish.yml
with:
commit_tag: ${{ needs.store-image-name-and-tags.outputs.commit_tag }}
- last_commit_tag: ${{ needs.store-image-name-and-tags.outputs.last_commit_tag }}
- common_ancestor_tag: ${{ needs.store-image-name-and-tags.outputs.common_ancestor_tag }}
develop_tag: ${{ needs.store-image-name-and-tags.outputs.develop_tag }}
- untested_tag_suffix: ${{ needs.store-image-name-and-tags.outputs.untested_tag_suffix }}
coordinator_changed: ${{ needs.filter-commit-changes.outputs.coordinator }}
postman_changed: ${{ needs.filter-commit-changes.outputs.postman }}
prover_changed: ${{ needs.filter-commit-changes.outputs.prover }}
@@ -193,7 +191,6 @@ jobs:
uses: ./.github/workflows/reuse-run-e2e-tests.yml
with:
commit_tag: ${{ needs.store-image-name-and-tags.outputs.commit_tag }}
- untested_tag_suffix: ${{ needs.store-image-name-and-tags.outputs.untested_tag_suffix }}
tracing-engine: 'geth'
e2e-tests-logs-dump: true
secrets: inherit
@@ -207,20 +204,28 @@ jobs:
uses: ./.github/workflows/reuse-run-e2e-tests.yml
with:
commit_tag: ${{ needs.store-image-name-and-tags.outputs.commit_tag }}
- untested_tag_suffix: ${{ needs.store-image-name-and-tags.outputs.untested_tag_suffix }}
tracing-engine: 'besu'
e2e-tests-logs-dump: true
secrets: inherit
- tag-after-run-tests-success:
+ publish-images-after-run-tests-success-on-main:
needs: [ store-image-name-and-tags, testing, run-e2e-tests, run-e2e-tests-geth-tracing ]
- if: ${{ always() && needs.testing.result == 'success' && needs.run-e2e-tests.outputs.tests_outcome == 'success' && needs.run-e2e-tests-geth-tracing.outputs.tests_outcome == 'success' }}
- uses: ./.github/workflows/reuse-tag-without-untested-suffix.yml
+ if: ${{ always() && github.ref == 'refs/heads/main' && needs.testing.result == 'success' && needs.run-e2e-tests.outputs.tests_outcome == 'success' && needs.run-e2e-tests-geth-tracing.outputs.tests_outcome == 'success' }}
+ uses: ./.github/workflows/build-and-publish.yml
with:
+ push_image: true
commit_tag: ${{ needs.store-image-name-and-tags.outputs.commit_tag }}
develop_tag: ${{ needs.store-image-name-and-tags.outputs.develop_tag }}
- untested_tag_suffix: ${{ needs.store-image-name-and-tags.outputs.untested_tag_suffix }}
- image_names: '["consensys/linea-coordinator", "consensys/linea-postman", "consensys/linea-prover", "consensys/linea-traces-api-facade", "consensys/linea-transaction-exclusion-api"]'
+ coordinator_changed: ${{ needs.filter-commit-changes.outputs.coordinator }}
+ postman_changed: ${{ needs.filter-commit-changes.outputs.postman }}
+ prover_changed: ${{ needs.filter-commit-changes.outputs.prover }}
+ traces_api_facade_changed: ${{ needs.filter-commit-changes.outputs.traces-api-facade }}
+ transaction_exclusion_api_changed: ${{ needs.filter-commit-changes.outputs.transaction-exclusion-api }}
+ coordinator_image_tagged: ${{ needs.check-and-tag-images.outputs.image_tagged_coordinator }}
+ postman_image_tagged: ${{ needs.check-and-tag-images.outputs.image_tagged_postman }}
+ prover_image_tagged: ${{ needs.check-and-tag-images.outputs.image_tagged_prover }}
+ traces_api_facade_image_tagged: ${{ needs.check-and-tag-images.outputs.image_tagged_traces_api_facade }}
+ transaction_exclusion_api_image_tagged: ${{ needs.check-and-tag-images.outputs.image_tagged_transaction_exclusion_api }}
secrets: inherit
cleanup-deployments:
diff --git a/.github/workflows/postman-build-and-publish.yml b/.github/workflows/postman-build-and-publish.yml
index ab2246112..d95715e99 100644
--- a/.github/workflows/postman-build-and-publish.yml
+++ b/.github/workflows/postman-build-and-publish.yml
@@ -6,26 +6,44 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
image_name:
required: true
type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
+ required: false
+ workflow_dispatch:
+ inputs:
+ commit_tag:
+ description: 'Image tag'
+ required: true
+ type: string
+ develop_tag:
+ description: 'Image tag will be "develop" if target branch is main'
required: true
+ type: choice
+ options:
+ - develop
+ default: 'develop'
+ image_name:
+ description: 'Image name'
+ required: true
+ type: string
+ default: 'consensys/linea-postman'
+ push_image:
+ description: 'Toggle whether to push image to docker registry'
+ required: false
+ type: boolean
+ default: true
concurrency:
group: postman-build-and-publish-${{ github.workflow }}-${{ github.ref }}
@@ -38,9 +56,16 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
DEVELOP_TAG: ${{ inputs.develop_tag }}
- UNTESTED_TAG_SUFFIX: ${{ inputs.untested_tag_suffix }}
IMAGE_NAME: ${{ inputs.image_name }}
+ PUSH_IMAGE: ${{ inputs.push_image }}
+ TAGS: ${{ inputs.image_name }}:${{ inputs.commit_tag }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
+ - name: Set develop tag if main branch
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |
+ echo "TAGS=${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }},${{ env.IMAGE_NAME }}:${{ env.DEVELOP_TAG }}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v3
with:
@@ -48,6 +73,7 @@ jobs:
submodules: true
persist-credentials: false
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -62,15 +88,38 @@ jobs:
run: |
echo "We inject the commit tag in the docker image ${{ env.COMMIT_TAG }}"
echo COMMIT_TAG=${{ env.COMMIT_TAG }} >> $GITHUB_ENV
+ - name: Build postman image for testing
+ uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ with:
+ context: ./
+ file: ./sdk/Dockerfile
+ platforms: linux/amd64
+ load: true
+ push: false
+ tags: ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}
+ build-args: |
+ NATIVE_LIBS_RELEASE_TAG=blob-libs-v1.0.1
+ - name: Save Docker image as artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ run: |
+ docker save ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }} | gzip > linea-postman-docker-image.tar.gz
+ shell: bash
+ - name: Upload Docker image artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: linea-postman
+ path: linea-postman-docker-image.tar.gz
- name: Build and push postman image
uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'true' || github.event_name == 'workflow_dispatch' }}
with:
context: ./
file: ./sdk/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
- tags: |
- ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}-${{ env.UNTESTED_TAG_SUFFIX }}
+ tags: ${{ env.TAGS }}
cache-from: |
type=registry,ref=${{ env.IMAGE_NAME }}:buildcache-amd64,platform=linux/amd64
type=registry,ref=${{ env.IMAGE_NAME }}:buildcache-arm64,platform=linux/arm64
diff --git a/.github/workflows/postman-testing.yml b/.github/workflows/postman-testing.yml
index 64071e2f1..a352614ef 100644
--- a/.github/workflows/postman-testing.yml
+++ b/.github/workflows/postman-testing.yml
@@ -2,11 +2,6 @@ name: postman-and-SDK-testing
on:
workflow_call:
- secrets:
- DOCKERHUB_USERNAME:
- required: true
- DOCKERHUB_TOKEN:
- required: true
concurrency:
group: postman-testing-${{ github.workflow }}-${{ github.ref }}
diff --git a/.github/workflows/prover-build-and-publish.yml b/.github/workflows/prover-build-and-publish.yml
index bae07ab60..fd8ad2029 100644
--- a/.github/workflows/prover-build-and-publish.yml
+++ b/.github/workflows/prover-build-and-publish.yml
@@ -6,26 +6,44 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
image_name:
required: true
type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
+ required: false
+ workflow_dispatch:
+ inputs:
+ commit_tag:
+ description: 'Image tag'
+ required: true
+ type: string
+ develop_tag:
+ description: 'Image tag will be "develop" if target branch is main'
required: true
+ type: choice
+ options:
+ - develop
+ default: 'develop'
+ image_name:
+ description: 'Image name'
+ required: true
+ type: string
+ default: 'consensys/linea-prover'
+ push_image:
+ description: 'Toggle whether to push image to docker registry'
+ required: false
+ type: boolean
+ default: true
concurrency:
group: prover-build-and-publish-${{ github.workflow }}-${{ github.ref }}
@@ -41,9 +59,16 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
DEVELOP_TAG: ${{ inputs.develop_tag }}
- UNTESTED_TAG_SUFFIX: ${{ inputs.untested_tag_suffix }}
IMAGE_NAME: ${{ inputs.image_name }}
+ PUSH_IMAGE: ${{ inputs.push_image }}
+ TAGS: ${{ inputs.image_name }}:${{ inputs.commit_tag }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
+ - name: Set develop tag if main branch
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |
+ echo "TAGS=${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }},${{ env.IMAGE_NAME }}:${{ env.DEVELOP_TAG }}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
with:
@@ -51,6 +76,7 @@ jobs:
submodules: true
persist-credentials: false
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -63,6 +89,34 @@ jobs:
echo COMMIT_TAG=${{ env.COMMIT_TAG }} >> $GITHUB_ENV
- name: Build and push prover image
uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ with:
+ context: .
+ file: ./prover/Dockerfile
+ build-args: |
+ RUSTFLAGS="-C target-cpu=x86-64-v3"
+ build-contexts: |
+ prover=prover/
+ corset=corset/
+ constraints=constraints/
+ platforms: linux/amd64
+ load: true
+ push: false
+ tags: ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}
+ - name: Save Docker image as artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ run: |
+ docker save ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }} | gzip > linea-prover-docker-image.tar.gz
+ shell: bash
+ - name: Upload Docker image artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: linea-prover
+ path: linea-prover-docker-image.tar.gz
+ - name: Build and push prover image
+ uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'true' || github.event_name == 'workflow_dispatch' }}
with:
context: .
file: ./prover/Dockerfile
@@ -73,8 +127,8 @@ jobs:
corset=corset/
constraints=constraints/
platforms: linux/amd64
+ load: false
push: true
- tags: |
- ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}-${{ env.UNTESTED_TAG_SUFFIX }}
+ tags: ${{ env.TAGS }}
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache
cache-to: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache,mode=max
diff --git a/.github/workflows/prover-native-lib-blob-compressor-release.yml b/.github/workflows/prover-native-lib-blob-compressor-release.yml
index 5c0bb0174..6874cf214 100644
--- a/.github/workflows/prover-native-lib-blob-compressor-release.yml
+++ b/.github/workflows/prover-native-lib-blob-compressor-release.yml
@@ -7,6 +7,7 @@ on:
description: 'Version (e.g. v1.2.3)'
required: true
default: 'v0.0.0'
+ type: string
draft-release:
description: 'Draft Release'
required: false
@@ -37,13 +38,16 @@ jobs:
VERSION: ${{ github.event.inputs.version }}
SRC_SHNARF: "./lib/shnarf_calculator/shnarf_calculator.go"
TARGET_SHNARF: "shnarf_calculator"
- SRC_COMPRESSOR: "./lib/compressor/libcompressor.go"
+ SRC_COMPRESSOR: "./lib/compressor/libcompressor/libcompressor.go"
TARGET_COMPRESSOR: "blob_compressor"
+ SRC_DECOMPRESSOR: "./lib/compressor/libdecompressor/libdecompressor.go"
+ TARGET_DECOMPRESSOR: "blob_decompressor"
run: |
cd prover
mkdir target
GOARCH="amd64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_SHNARF}_${VERSION}_linux_x86_64.so ${SRC_SHNARF}
GOARCH="amd64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_COMPRESSOR}_${VERSION}_linux_x86_64.so ${SRC_COMPRESSOR}
+ GOARCH="amd64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_DECOMPRESSOR}_${VERSION}_linux_x86_64.so ${SRC_DECOMPRESSOR}
- name: Cache built binaries
uses: actions/upload-artifact@master
@@ -69,13 +73,16 @@ jobs:
VERSION: ${{ github.event.inputs.version }}
SRC_SHNARF: "./lib/shnarf_calculator/shnarf_calculator.go"
TARGET_SHNARF: "shnarf_calculator"
- SRC_COMPRESSOR: "./lib/compressor/libcompressor.go"
+ SRC_COMPRESSOR: "./lib/compressor/libcompressor/libcompressor.go"
TARGET_COMPRESSOR: "blob_compressor"
+ SRC_DECOMPRESSOR: "./lib/compressor/libdecompressor/libdecompressor.go"
+ TARGET_DECOMPRESSOR: "blob_decompressor"
run: |
cd prover
mkdir target
GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_SHNARF}_${VERSION}_linux_arm64.so ${SRC_SHNARF}
GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_COMPRESSOR}_${VERSION}_linux_arm64.so ${SRC_COMPRESSOR}
+ GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_DECOMPRESSOR}_${VERSION}_linux_arm64.so ${SRC_DECOMPRESSOR}
- name: Cache built binaries
uses: actions/upload-artifact@master
with:
@@ -98,8 +105,10 @@ jobs:
VERSION: ${{ github.event.inputs.version }}
SRC_SHNARF: "./lib/shnarf_calculator/shnarf_calculator.go"
TARGET_SHNARF: "shnarf_calculator"
- SRC_COMPRESSOR: "./lib/compressor/libcompressor.go"
+ SRC_COMPRESSOR: "./lib/compressor/libcompressor/libcompressor.go"
TARGET_COMPRESSOR: "blob_compressor"
+ SRC_DECOMPRESSOR: "./lib/compressor/libdecompressor/libdecompressor.go"
+ TARGET_DECOMPRESSOR: "blob_decompressor"
run: |
cd prover
mkdir target
@@ -107,6 +116,8 @@ jobs:
GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_SHNARF}_${VERSION}_darwin_arm64.dylib ${SRC_SHNARF}
GOARCH="amd64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_COMPRESSOR}_${VERSION}_darwin_x86_64.dylib ${SRC_COMPRESSOR}
GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_COMPRESSOR}_${VERSION}_darwin_arm64.dylib ${SRC_COMPRESSOR}
+ GOARCH="amd64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_DECOMPRESSOR}_${VERSION}_darwin_x86_64.dylib ${SRC_DECOMPRESSOR}
+ GOARCH="arm64" go build -tags=nocorset -buildmode=c-shared -o ./target/${TARGET_DECOMPRESSOR}_${VERSION}_darwin_arm64.dylib ${SRC_DECOMPRESSOR}
- name: Cache built binaries
uses: actions/upload-artifact@v4
diff --git a/.github/workflows/reuse-check-images-tags-and-push.yml b/.github/workflows/reuse-check-images-tags-and-push.yml
index 4d9ae8d22..e0e5b70d5 100644
--- a/.github/workflows/reuse-check-images-tags-and-push.yml
+++ b/.github/workflows/reuse-check-images-tags-and-push.yml
@@ -8,15 +8,9 @@ on:
last_commit_tag:
required: true
type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
coordinator_changed:
required: true
type: string
@@ -45,9 +39,9 @@ on:
value: ${{ jobs.image_tag_push.outputs.image_tagged_transaction_exclusion_api }}
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
- required: true
+ required: false
concurrency:
group: check-images-tags-and-push-${{ github.workflow }}-${{ github.ref }}
@@ -78,10 +72,7 @@ jobs:
id: check_image_tags_exist_coordinator
with:
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
image_name: consensys/linea-coordinator
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check image tags exist for postman
uses: ./.github/actions/check-image-tags-exist
@@ -89,10 +80,7 @@ jobs:
id: check_image_tags_exist_postman
with:
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
image_name: consensys/linea-postman
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check image tags exist for prover
uses: ./.github/actions/check-image-tags-exist
@@ -100,10 +88,7 @@ jobs:
id: check_image_tags_exist_prover
with:
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
image_name: consensys/linea-prover
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check image tags exist for traces-api-facade
uses: ./.github/actions/check-image-tags-exist
@@ -111,10 +96,7 @@ jobs:
id: check_image_tags_exist_traces_api_facade
with:
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
image_name: consensys/linea-traces-api-facade
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check image tags exist for transaction-exclusion-api
uses: ./.github/actions/check-image-tags-exist
@@ -122,13 +104,11 @@ jobs:
id: check_image_tags_exist_transaction_exclusion_api
with:
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
image_name: consensys/linea-transaction-exclusion-api
- docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
- docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
image_tag_push:
runs-on: [self-hosted, ubuntu-20.04, X64, small]
+ if: ${{ github.ref == 'refs/heads/main' }}
name: Tag and push images
needs: [ check_image_tags_exist ]
outputs:
@@ -148,12 +128,9 @@ jobs:
with:
commit_tag: ${{ inputs.commit_tag }}
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-coordinator
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists_coordinator }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists_coordinator }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -164,12 +141,9 @@ jobs:
with:
commit_tag: ${{ inputs.commit_tag }}
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-postman
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists_postman }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists_postman }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -180,12 +154,9 @@ jobs:
with:
commit_tag: ${{ inputs.commit_tag }}
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-prover
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists_prover }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists_prover }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -196,12 +167,9 @@ jobs:
with:
commit_tag: ${{ inputs.commit_tag }}
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-traces-api-facade
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists_traces_api_facade }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists_traces_api_facade }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -212,11 +180,8 @@ jobs:
with:
commit_tag: ${{ inputs.commit_tag }}
last_commit_tag: ${{ inputs.last_commit_tag }}
- common_ancestor_tag: ${{ inputs.common_ancestor_tag }}
develop_tag: ${{ inputs.develop_tag }}
- untested_tag_suffix: ${{ inputs.untested_tag_suffix }}
image_name: consensys/linea-transaction-exclusion-api
last_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.last_commit_tag_exists_transaction_exclusion_api }}
- common_ancestor_commit_tag_exists: ${{ needs.check_image_tags_exist.outputs.common_ancestor_commit_tag_exists_transaction_exclusion_api }}
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
docker_password: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/reuse-run-e2e-tests.yml b/.github/workflows/reuse-run-e2e-tests.yml
index a5cb81beb..3c1df5333 100644
--- a/.github/workflows/reuse-run-e2e-tests.yml
+++ b/.github/workflows/reuse-run-e2e-tests.yml
@@ -6,10 +6,6 @@ on:
description: 'The commit tag to use'
required: true
type: string
- untested_tag_suffix:
- description: 'The untested tag suffix to use'
- required: true
- type: string
tracing-engine:
description: Variable option for running tests against [besu] or [geth] stack
required: true
@@ -34,9 +30,6 @@ on:
commit_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
tracing-engine:
description: Variable option for running tests against [besu] or [geth] stack
required: true
@@ -61,19 +54,21 @@ on:
value: ${{ jobs.run-e2e-tests.outputs.tests_outcome }}
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
- required: true
+ required: false
jobs:
run-e2e-tests:
env:
- COORDINATOR_TAG: ${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
- POSTMAN_TAG: ${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
- PROVER_TAG: ${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
- TRACES_API_TAG: ${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
- TRANSACTION_EXCLUSION_API_TAG: ${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
+ COORDINATOR_TAG: ${{ inputs.commit_tag }}
+ POSTMAN_TAG: ${{ inputs.commit_tag }}
+ PROVER_TAG: ${{ inputs.commit_tag }}
+ TRACES_API_TAG: ${{ inputs.commit_tag }}
+ TRANSACTION_EXCLUSION_API_TAG: ${{ inputs.commit_tag }}
GITHUB_TOKEN: ${{ secrets._GITHUB_TOKEN_RELEASE_ACCESS }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
outputs:
tests_outcome: ${{ steps.run_e2e_tests.outcome }}
runs-on: [self-hosted, ubuntu-22.04, X64, large]
@@ -88,6 +83,7 @@ jobs:
with:
pnpm-install-options: '--frozen-lockfile --prefer-offline'
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -112,7 +108,17 @@ jobs:
retry_wait_seconds: 30
timeout_minutes: 10
command: |
- make pull-all-images
+ make pull-images-external-to-monorepo
+ - name: Download local docker image artifacts
+ uses: actions/download-artifact@v4
+ - name: Load Docker images
+ run: |
+ gunzip -c /runner/_work/linea-monorepo/linea-monorepo/linea-coordinator/linea-coordinator-docker-image.tar.gz | docker load &&
+ gunzip -c /runner/_work/linea-monorepo/linea-monorepo/linea-postman/linea-postman-docker-image.tar.gz | docker load &&
+ gunzip -c /runner/_work/linea-monorepo/linea-monorepo/linea-prover/linea-prover-docker-image.tar.gz | docker load &&
+ gunzip -c /runner/_work/linea-monorepo/linea-monorepo/linea-traces-api-facade/linea-traces-api-facade-docker-image.tar.gz | docker load &&
+ gunzip -c /runner/_work/linea-monorepo/linea-monorepo/linea-transaction-exclusion-api/linea-transaction-exclusion-api-docker-image.tar.gz | docker load
+ shell: bash
- name: Spin up fresh environment with geth tracing with retry
if: ${{ inputs.tracing-engine == 'geth' }}
uses: nick-fields/retry@v2
diff --git a/.github/workflows/reuse-store-image-name-and-tags.yml b/.github/workflows/reuse-store-image-name-and-tags.yml
index bad0023fc..05a61e23e 100644
--- a/.github/workflows/reuse-store-image-name-and-tags.yml
+++ b/.github/workflows/reuse-store-image-name-and-tags.yml
@@ -8,10 +8,6 @@ on:
value: ${{ jobs.store_image_name_and_tags.outputs.last_commit_tag }}
develop_tag:
value: ${{ jobs.store_image_name_and_tags.outputs.develop_tag }}
- common_ancestor_tag:
- value: ${{ jobs.store_image_name_and_tags.outputs.common_ancestor_tag }}
- untested_tag_suffix:
- value: ${{ jobs.store_image_name_and_tags.outputs.untested_tag_suffix }}
concurrency:
group: store-image-name-and-tags-${{ github.workflow }}-${{ github.ref }}
@@ -23,35 +19,24 @@ jobs:
name: Compute version tags
env:
# REF_NAME: ${{ github.ref_name }}
- BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
EVENT_BEFORE: ${{ github.event.before }}
outputs:
- commit_tag: ${{ steps.step2.outputs.COMMIT_TAG }}
- last_commit_tag: ${{ steps.step2.outputs.LAST_COMMIT_TAG }}
- common_ancestor_tag: ${{ steps.step2.outputs.COMMON_ANCESTOR_TAG }}
- develop_tag: ${{ steps.step2.outputs.DEVELOP_TAG }}
- untested_tag_suffix: ${{ steps.step2.outputs.UNTESTED_TAG_SUFFIX }}
+ commit_tag: ${{ steps.step1.outputs.COMMIT_TAG }}
+ last_commit_tag: ${{ steps.step1.outputs.LAST_COMMIT_TAG }}
+ develop_tag: ${{ steps.step1.outputs.DEVELOP_TAG }}
steps:
- name: Checkout
uses: actions/checkout@v3
- - name: Get common ancestor commit
- id: step1
- run: |
- git fetch --no-tags --depth=100 origin main ${{ env.BRANCH_NAME }}
- echo COMMON_ANCESTOR=$(git merge-base refs/remotes/origin/main refs/remotes/origin/${{ env.BRANCH_NAME }}) >> $GITHUB_ENV
- name: Compute version tags
- id: step2
+ id: step1
run: |
echo COMMIT_TAG=$(git rev-parse --short "$GITHUB_SHA") >> $GITHUB_OUTPUT
echo LAST_COMMIT_TAG=$(git rev-parse --short "${{ env.EVENT_BEFORE }}") >> $GITHUB_OUTPUT
echo DEVELOP_TAG=develop >> $GITHUB_OUTPUT
- echo COMMON_ANCESTOR_TAG=$(git rev-parse --short "${{ env.COMMON_ANCESTOR }}") >> $GITHUB_OUTPUT
- echo UNTESTED_TAG_SUFFIX=untested >> $GITHUB_OUTPUT
- name: Show version tags
- id: step3
+ id: step2
run: |
- echo "COMMIT_TAG: ${{ steps.step2.outputs.COMMIT_TAG }}"
- echo "LAST_COMMIT_TAG: ${{ steps.step2.outputs.LAST_COMMIT_TAG }}"
- echo "DEVELOP_TAG: ${{ steps.step2.outputs.DEVELOP_TAG }}"
- echo "COMMON_ANCESTOR_TAG: ${{ steps.step2.outputs.COMMON_ANCESTOR_TAG }}"
- echo "UNTESTED_TAG_SUFFIX: ${{ steps.step2.outputs.UNTESTED_TAG_SUFFIX }}"
+ echo "COMMIT_TAG: ${{ steps.step1.outputs.COMMIT_TAG }}"
+ echo "LAST_COMMIT_TAG: ${{ steps.step1.outputs.LAST_COMMIT_TAG }}"
+ echo "DEVELOP_TAG: ${{ steps.step1.outputs.DEVELOP_TAG }}"
+ echo "github.ref: ${{ github.ref }}"
diff --git a/.github/workflows/reuse-tag-without-untested-suffix.yml b/.github/workflows/reuse-tag-without-untested-suffix.yml
deleted file mode 100644
index 2eb3f943f..000000000
--- a/.github/workflows/reuse-tag-without-untested-suffix.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-name: Reusable tag without untested suffix
-on:
- workflow_call:
- inputs:
- commit_tag:
- required: true
- type: string
- develop_tag:
- required: true
- type: string
- untested_tag_suffix:
- required: true
- type: string
- image_names:
- required: true
- type: string
- secrets:
- DOCKERHUB_USERNAME:
- required: true
- DOCKERHUB_TOKEN:
- required: true
-
-jobs:
- tag-without-untested-suffix:
- runs-on: [self-hosted, ubuntu-20.04, X64, small]
- name: tag without untested suffix
- strategy:
- matrix:
- image_name: ${{ fromJSON(inputs.image_names) }}
- steps:
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Tag Docker image with develop if on main branch
- if: ${{ github.ref == 'refs/heads/main' }}
- run: |
- docker buildx imagetools create --tag ${{ matrix.image_name }}:${{ inputs.develop_tag }} ${{ matrix.image_name }}:${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
- - name: Tag Docker image with the commit hash (without the "untested" suffix)
- run: |
- docker buildx imagetools create --tag ${{ matrix.image_name }}:${{ inputs.commit_tag }} ${{ matrix.image_name }}:${{ inputs.commit_tag }}-${{ inputs.untested_tag_suffix }}
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
index 571fc7cc8..b7c20c1c5 100644
--- a/.github/workflows/testing.yml
+++ b/.github/workflows/testing.yml
@@ -33,11 +33,6 @@ on:
transaction_exclusion_api_image_tagged:
required: true
type: string
- secrets:
- DOCKERHUB_USERNAME:
- required: true
- DOCKERHUB_TOKEN:
- required: true
jobs:
coordinator:
@@ -60,9 +55,7 @@ jobs:
traces-api-facade:
uses: ./.github/workflows/traces-api-facade-testing.yml
if: ${{ always() && (inputs.traces_api_facade_changed == 'true' || inputs.traces_api_facade_image_tagged != 'true') }}
- secrets: inherit
transaction-exclusion-api:
uses: ./.github/workflows/transaction-exclusion-api-testing.yml
if: ${{ always() && (inputs.transaction_exclusion_api_changed == 'true' || inputs.transaction_exclusion_api_image_tagged != 'true') }}
- secrets: inherit
diff --git a/.github/workflows/traces-api-facade-build-and-publish.yml b/.github/workflows/traces-api-facade-build-and-publish.yml
index e2dd28bbd..811e3e81a 100644
--- a/.github/workflows/traces-api-facade-build-and-publish.yml
+++ b/.github/workflows/traces-api-facade-build-and-publish.yml
@@ -6,26 +6,44 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
image_name:
required: true
type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
+ required: false
+ workflow_dispatch:
+ inputs:
+ commit_tag:
+ description: 'Image tag'
+ required: true
+ type: string
+ develop_tag:
+ description: 'Image tag will be "develop" if target branch is main'
required: true
+ type: choice
+ options:
+ - develop
+ default: 'develop'
+ image_name:
+ description: 'Image name'
+ required: true
+ type: string
+ default: 'consensys/linea-traces-api-facade'
+ push_image:
+ description: 'Toggle whether to push image to docker registry'
+ required: false
+ type: boolean
+ default: true
concurrency:
group: traces-api-facade-build-and-publish-${{ github.workflow }}-${{ github.ref }}
@@ -38,9 +56,16 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
DEVELOP_TAG: ${{ inputs.develop_tag }}
- UNTESTED_TAG_SUFFIX: ${{ inputs.untested_tag_suffix }}
IMAGE_NAME: ${{ inputs.image_name }}
+ PUSH_IMAGE: ${{ inputs.push_image }}
+ TAGS: ${{ inputs.image_name }}:${{ inputs.commit_tag }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
+ - name: Set develop tag if main branch
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |
+ echo "TAGS=${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }},${{ env.IMAGE_NAME }}:${{ env.DEVELOP_TAG }}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v3
with:
@@ -57,6 +82,7 @@ jobs:
./gradlew traces-api-facade:app:shadowJar
echo ${{ github.workspace }}
- name: Login to Docker Hub
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -71,15 +97,37 @@ jobs:
uses: docker/metadata-action@v3
with:
images: consensys/linea-traces-api-facade
+ - name: Build for testing
+ uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ with:
+ context: .
+ build-contexts: jar=./traces-api-facade/app/build/libs/
+ file: ./traces-api-facade/Dockerfile
+ platforms: linux/amd64
+ load: true
+ push: false
+ tags: ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}
+ - name: Save Docker image as artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ run: |
+ docker save ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }} | gzip > linea-traces-api-facade-docker-image.tar.gz
+ shell: bash
+ - name: Upload Docker image artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: linea-traces-api-facade
+ path: linea-traces-api-facade-docker-image.tar.gz
- name: Build & push
uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'true' || github.event_name == 'workflow_dispatch' }}
with:
context: .
build-contexts: jar=./traces-api-facade/app/build/libs/
file: ./traces-api-facade/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
- tags: |
- ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}-${{ env.UNTESTED_TAG_SUFFIX }}
+ tags: ${{ env.TAGS }}
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache
cache-to: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache,mode=max
diff --git a/.github/workflows/traces-api-facade-testing.yml b/.github/workflows/traces-api-facade-testing.yml
index 6df13304c..d532eb15f 100644
--- a/.github/workflows/traces-api-facade-testing.yml
+++ b/.github/workflows/traces-api-facade-testing.yml
@@ -2,11 +2,7 @@ name: traces-api-facade-testing
on:
workflow_call:
- secrets:
- DOCKERHUB_USERNAME:
- required: true
- DOCKERHUB_TOKEN:
- required: true
+
workflow_dispatch:
inputs:
coverage:
diff --git a/.github/workflows/transaction-exclusion-api-build-and-publish.yml b/.github/workflows/transaction-exclusion-api-build-and-publish.yml
index 3a1a3ce0a..bb7ed0c0d 100644
--- a/.github/workflows/transaction-exclusion-api-build-and-publish.yml
+++ b/.github/workflows/transaction-exclusion-api-build-and-publish.yml
@@ -6,26 +6,48 @@ on:
commit_tag:
required: true
type: string
- last_commit_tag:
- required: true
- type: string
- common_ancestor_tag:
- required: true
- type: string
develop_tag:
required: true
type: string
- untested_tag_suffix:
- required: true
- type: string
image_name:
required: true
type: string
+ push_image:
+ required: false
+ type: boolean
+ default: false
secrets:
DOCKERHUB_USERNAME:
- required: true
+ required: false
DOCKERHUB_TOKEN:
+ required: false
+ workflow_dispatch:
+ inputs:
+ commit_tag:
+ description: 'Image tag'
+ required: true
+ type: string
+ develop_tag:
+ description: 'Image tag will be "develop" if target branch is main'
+ required: true
+ type: choice
+ options:
+ - develop
+ default: 'develop'
+ image_name:
+ description: 'Image name'
required: true
+ type: string
+ default: 'consensys/linea-transaction-exclusion-api'
+ push_image:
+ description: 'Toggle whether to push image to docker registry'
+ required: false
+ type: boolean
+ default: true
+
+concurrency:
+ group: transaction-exclusion-api-build-and-publish-${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs:
build-and-publish:
@@ -34,9 +56,16 @@ jobs:
env:
COMMIT_TAG: ${{ inputs.commit_tag }}
DEVELOP_TAG: ${{ inputs.develop_tag }}
- UNTESTED_TAG_SUFFIX: ${{ inputs.untested_tag_suffix }}
IMAGE_NAME: ${{ inputs.image_name }}
+ PUSH_IMAGE: ${{ inputs.push_image }}
+ TAGS: ${{ inputs.image_name }}:${{ inputs.commit_tag }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
+ - name: Set develop tag if main branch
+ if: ${{ github.ref == 'refs/heads/main' }}
+ run: |
+ echo "TAGS=${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }},${{ env.IMAGE_NAME }}:${{ env.DEVELOP_TAG }}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v3
with:
@@ -51,30 +80,53 @@ jobs:
- name: Build dist
run: |
./gradlew transaction-exclusion-api:app:distZip --no-daemon
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v2
- - name: Set up Docker Buildx
- id: buildx
- uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
- uses: docker/login-action@v1
+ if: ${{ env.DOCKERHUB_USERNAME != '' && env.DOCKERHUB_TOKEN != '' }}
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ - name: Set up Docker Buildx - local
+ id: buildx
+ uses: docker/setup-buildx-action@v3
- name: Docker meta
id: transaction-exclusion-api
uses: docker/metadata-action@v3
with:
- images: consensys/linea-transaction-exclusion-api
+ images: ${{ env.IMAGE_NAME }}
+ - name: Build for testing
+ uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ with:
+ context: .
+ build-contexts: zip=./transaction-exclusion-api/app/build/distributions/
+ file: ./transaction-exclusion-api/Dockerfile
+ platforms: linux/amd64
+ load: true
+ push: false
+ tags: ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}
+ - name: Save Docker image as artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ run: |
+ docker save ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }} | gzip > linea-transaction-exclusion-api-docker-image.tar.gz
+ shell: bash
+ - name: Upload Docker image artifact
+ if: ${{ env.PUSH_IMAGE == 'false' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: linea-transaction-exclusion-api
+ path: linea-transaction-exclusion-api-docker-image.tar.gz
- name: Build & push
uses: docker/build-push-action@v6
+ if: ${{ env.PUSH_IMAGE == 'true' || github.event_name == 'workflow_dispatch' }}
with:
context: .
build-contexts: zip=./transaction-exclusion-api/app/build/distributions/
file: ./transaction-exclusion-api/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
- tags: |
- ${{ env.IMAGE_NAME }}:${{ env.COMMIT_TAG }}-${{ env.UNTESTED_TAG_SUFFIX }}
+ tags: ${{ env.TAGS }}
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache
cache-to: type=registry,ref=${{ env.IMAGE_NAME }}:buildcache,mode=max
diff --git a/.github/workflows/transaction-exclusion-api-testing.yml b/.github/workflows/transaction-exclusion-api-testing.yml
index 77fd024da..cb076b35d 100644
--- a/.github/workflows/transaction-exclusion-api-testing.yml
+++ b/.github/workflows/transaction-exclusion-api-testing.yml
@@ -2,11 +2,6 @@ name: transaction-exclusion-api-testing
on:
workflow_call:
- secrets:
- DOCKERHUB_USERNAME:
- required: true
- DOCKERHUB_TOKEN:
- required: true
workflow_dispatch:
inputs:
coverage:
@@ -14,6 +9,10 @@ on:
required: false
type: boolean
default: false
+
+concurrency:
+ group: transaction-exclusion-api-testing-${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs:
run-tests:
diff --git a/Makefile b/Makefile
index bca9ec1b5..8f72498f4 100644
--- a/Makefile
+++ b/Makefile
@@ -54,6 +54,9 @@ start-whole-environment-traces-v2:
pull-all-images:
docker compose -f docker/compose.yml -f docker/compose-local-dev-traces-v2.overrides.yml --profile l1 --profile l2 pull
+pull-images-external-to-monorepo:
+ docker compose -f docker/compose.yml -f docker/compose-local-dev-traces-v2.overrides.yml --profile external-to-monorepo pull
+
compile-contracts:
cd contracts; \
make compile
@@ -77,6 +80,21 @@ deploy-linea-rollup:
LINEA_ROLLUP_GENESIS_TIMESTAMP=1683325137 \
npx hardhat deploy --no-compile --network zkevm_dev --tags PlonkVerifier,LineaRollupV5
+deploy-linea-rollup-v6:
+ # WARNING: FOR LOCAL DEV ONLY - DO NOT REUSE THESE KEYS ELSEWHERE
+ cd contracts/; \
+ PRIVATE_KEY=$${DEPLOYMENT_PRIVATE_KEY:-0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80} \
+ BLOCKCHAIN_NODE=http:\\localhost:8445/ \
+ PLONKVERIFIER_NAME=IntegrationTestTrueVerifier \
+ LINEA_ROLLUP_INITIAL_STATE_ROOT_HASH=0x072ead6777750dc20232d1cee8dc9a395c2d350df4bbaa5096c6f59b214dcecd \
+ LINEA_ROLLUP_INITIAL_L2_BLOCK_NUMBER=0 \
+ LINEA_ROLLUP_SECURITY_COUNCIL=0x90F79bf6EB2c4f870365E785982E1f101E93b906 \
+ LINEA_ROLLUP_OPERATORS=$${LINEA_ROLLUP_OPERATORS:-0x70997970C51812dc3A010C7d01b50e0d17dc79C8,0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC} \
+ LINEA_ROLLUP_RATE_LIMIT_PERIOD=86400 \
+ LINEA_ROLLUP_RATE_LIMIT_AMOUNT=1000000000000000000000 \
+ LINEA_ROLLUP_GENESIS_TIMESTAMP=1683325137 \
+ npx hardhat deploy --no-compile --network zkevm_dev --tags PlonkVerifier,LineaRollup
+
deploy-l2messageservice:
# WARNING: FOR LOCAL DEV ONLY - DO NOT REUSE THESE KEYS ELSEWHERE
cd contracts/; \
diff --git a/build.gradle b/build.gradle
index 18ec1cda5..4b708aa6e 100644
--- a/build.gradle
+++ b/build.gradle
@@ -13,6 +13,10 @@ plugins {
task compileAll
+// to allow to have unused vars/imports,etc for faster debugging/prototyping
+// instead of deleting and re-adding code all the time
+def allowCompilationWarnings = System.getenv('LINEA_DEV_ALLOW_WARNINGS') != null
+
allprojects {
repositories { mavenCentral() }
@@ -22,7 +26,7 @@ allprojects {
tasks.withType(KotlinCompile).configureEach {
compileAll.dependsOn it
compilerOptions {
- allWarningsAsErrors = true
+ allWarningsAsErrors = !allowCompilationWarnings
}
}
@@ -38,8 +42,12 @@ allprojects {
'-Xlint:finally',
'-Xlint:static',
'-Xlint:deprecation',
- '-Werror'
])
+ if (!allowCompilationWarnings) {
+ options.compilerArgs.addAll([
+ '-Werror'
+ ])
+ }
if (!project.path.contains("testing-tools")) {
// testing tools have 100+ errors because of this
diff --git a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-conventions.gradle b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-conventions.gradle
index c435c4330..59f53d91b 100644
--- a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-conventions.gradle
+++ b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-conventions.gradle
@@ -2,41 +2,10 @@ plugins {
id 'net.consensys.zkevm.kotlin-common-minimal-conventions'
}
-repositories {
- // Use Maven Central for resolving dependencies.
- mavenCentral()
- maven {
- url 'https://artifacts.consensys.net/public/teku/maven/'
- content { includeGroupAndSubgroups('tech.pegasys') }
- }
- maven {
- url "https://hyperledger.jfrog.io/artifactory/besu-maven/"
- content { includeGroupAndSubgroups('org.hyperledger.besu') }
- }
- maven {
- url "https://artifacts.consensys.net/public/maven/maven/"
- }
- // to allow locally published artifacts to be resolved uncomment enable mavenLocal() below
- // build.linea:blob-compressor:2.0.0-SNAPSHOT
- // useful to test WIP development versions
- //mavenLocal() {
- // content {
- // includeModule('build.linea', 'blob-compressor')
- // includeModule('build.linea', 'shnarf-calculator')
- // }
- //}
-}
-
dependencies {
- constraints {
- // Define dependency versions as constraints
- implementation 'org.apache.commons:commons-text:1.9'
- }
-
api platform("io.vertx:vertx-stack-depchain:${libs.versions.vertx.get()}")
//
- implementation 'com.michael-bull.kotlin-result:kotlin-result:1.1.16'
- implementation "tech.pegasys.teku.internal:bytes:${libs.versions.teku.get()}"
+ implementation "com.michael-bull.kotlin-result:kotlin-result:${libs.versions.kotlinResult.get()}"
implementation ("tech.pegasys.teku.internal:async:${libs.versions.teku.get()}") {
exclude group: 'org.slf4j'
exclude group: 'com.github.jnr'
@@ -44,9 +13,7 @@ dependencies {
exclude group: 'org.apache.logging.log4j'
exclude group: 'tech.pegasys.teku.internal', module: 'metrics'
exclude group: 'tech.pegasys.teku.internal', module: 'time'
- }
- implementation "org.apache.tuweni:tuweni-units:${libs.versions.tuweni.get()}"
- //
+ } because("We are using Teku's SafeFuture, an improved/extension of CompletableFuture.")
//
implementation "org.apache.logging.log4j:log4j-api:${libs.versions.log4j.get()}"
diff --git a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-minimal-conventions.gradle b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-minimal-conventions.gradle
index af45bee41..34fb71301 100644
--- a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-minimal-conventions.gradle
+++ b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-common-minimal-conventions.gradle
@@ -12,6 +12,26 @@ java {
repositories {
mavenCentral()
+ maven {
+ url 'https://artifacts.consensys.net/public/teku/maven/'
+ content { includeGroupAndSubgroups('tech.pegasys') }
+ }
+ maven {
+ url "https://hyperledger.jfrog.io/artifactory/besu-maven/"
+ content { includeGroupAndSubgroups('org.hyperledger.besu') }
+ }
+ maven {
+ url "https://artifacts.consensys.net/public/maven/maven/"
+ }
+ // to allow locally published artifacts to be resolved uncomment enable mavenLocal() below
+ // build.linea:blob-compressor:2.0.0-SNAPSHOT
+ // useful to test WIP development versions
+ //mavenLocal() {
+ // content {
+ // includeModule('build.linea', 'blob-compressor')
+ // includeModule('build.linea', 'shnarf-calculator')
+ // }
+ //}
}
dependencies {
diff --git a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-conventions.gradle b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-conventions.gradle
index 637008eac..cb05f69b8 100644
--- a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-conventions.gradle
+++ b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-conventions.gradle
@@ -1,7 +1,4 @@
plugins {
- // Apply the common convention plugin for shared build configuration between library and application projects.
id 'net.consensys.zkevm.kotlin-common-conventions'
-
- // Apply the java-library plugin for API and implementation separation.
id 'java-library'
}
diff --git a/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-minimal-conventions.gradle b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-minimal-conventions.gradle
new file mode 100644
index 000000000..3f9f2f562
--- /dev/null
+++ b/buildSrc/src/main/groovy/net.consensys.zkevm.kotlin-library-minimal-conventions.gradle
@@ -0,0 +1,4 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-common-minimal-conventions'
+ id 'java-library'
+}
diff --git a/config/coordinator/coordinator-docker-traces-v2-override.config.toml b/config/coordinator/coordinator-docker-traces-v2-override.config.toml
index 8cd51ef8a..00f008ea2 100644
--- a/config/coordinator/coordinator-docker-traces-v2-override.config.toml
+++ b/config/coordinator/coordinator-docker-traces-v2-override.config.toml
@@ -18,7 +18,7 @@ blob-compressor-version="V1_0_1"
expected-traces-api-version-v2="v0.8.0-rc3"
[traces.counters-v2]
endpoints=["http://traces-node-v2:8545/"]
-request-limit-per-endpoint=20
+request-limit-per-endpoint=2
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation-v2]
diff --git a/config/coordinator/coordinator-docker.config.toml b/config/coordinator/coordinator-docker.config.toml
index 7bf0e0d72..1b6e1b244 100644
--- a/config/coordinator/coordinator-docker.config.toml
+++ b/config/coordinator/coordinator-docker.config.toml
@@ -48,7 +48,7 @@ raw-execution-traces-version="0.2.0"
expected-traces-api-version="0.2.0"
[traces.counters]
endpoints=["http://traces-api:8080/"]
-request-limit-per-endpoint=20
+request-limit-per-endpoint=2
request-retry.backoff-delay="PT1S"
request-retry.failures-warning-threshold=2
[traces.conflation]
@@ -69,7 +69,7 @@ traces-file-creation-wait-timeout="PT2M"
[state-manager]
version="2.2.0"
endpoints=["http://shomei:8888/"]
-request-limit-per-endpoint=3
+request-limit-per-endpoint=2
request-retry.backoff-delay="PT2S"
request-retry.failures-warning-threshold=2
diff --git a/config/traces-api/traces-app-docker.config.toml b/config/traces-api/traces-app-docker.config.toml
index 4c20bd308..885c2893c 100644
--- a/config/traces-api/traces-app-docker.config.toml
+++ b/config/traces-api/traces-app-docker.config.toml
@@ -7,5 +7,5 @@ traces_file_extension = "json.gz"
port = 8080
path = "/"
# if =0, it will create one verticle per core (or hyperthread if supported)
-number_of_verticles = 0
+number_of_verticles = 2
observability_port = 8090
diff --git a/coordinator/app/build.gradle b/coordinator/app/build.gradle
index 99b61ae20..1f2cd3af4 100644
--- a/coordinator/app/build.gradle
+++ b/coordinator/app/build.gradle
@@ -4,11 +4,14 @@ plugins {
configurations.configureEach {
exclude group: "org.rocksdb", module: "rocksdbjni"
- exclude group: "it.unimi.dsi", module: "fastutil"
exclude group: "org.hyperledger.besu", module: "bls12-381"
exclude group: "org.hyperledger.besu", module: "secp256r1"
}
+configurations {
+ runtimeClasspath.exclude group: "it.unimi.dsi", module: "fastutil"
+}
+
dependencies {
implementation project(':jvm-libs:generic:json-rpc')
implementation project(':jvm-libs:generic:http-rest')
@@ -19,11 +22,11 @@ dependencies {
implementation project(':jvm-libs:linea:core:metrics')
implementation project(':jvm-libs:linea:metrics:micrometer')
implementation project(':jvm-libs:linea:core:domain-models')
+ implementation project(':jvm-libs:linea:clients:linea-state-manager')
implementation project(':coordinator:utilities')
implementation project(':coordinator:core')
implementation project(':coordinator:clients:shomei-client')
implementation project(':coordinator:clients:traces-generator-api-client')
- implementation project(':coordinator:clients:type2-state-manager-client')
implementation project(':coordinator:clients:prover-client:file-based-client')
implementation project(':coordinator:clients:smart-contract-client')
implementation project(':coordinator:ethereum:finalization-monitor')
@@ -63,6 +66,7 @@ dependencies {
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${libs.versions.jackson.get()}")
testImplementation "org.apache.logging.log4j:log4j-slf4j2-impl:${libs.versions.log4j.get()}"
testImplementation project(':coordinator:ethereum:test-utils')
+ testImplementation project(':jvm-libs:linea:testing:teku-helper')
testImplementation "io.vertx:vertx-junit5"
}
@@ -87,6 +91,11 @@ distributions {
}
}
+tasks.distTar {
+ // we don't need the tar distribution
+ onlyIf { false }
+}
+
run {
workingDir = rootProject.projectDir
jvmArgs = [
@@ -97,3 +106,7 @@ run {
.collect { "-D${it.key}=${it.value}" }
args = ["--traces-limits", "config/common/traces-limits-v1.toml", "--smart-contract-errors", "config/common/smart-contract-errors.toml", "--gas-price-cap-time-of-day-multipliers", "config/common/gas-price-cap-time-of-day-multipliers.toml", "config/coordinator/coordinator-docker.config.toml", "config/coordinator/coordinator-local-dev.config.overrides.toml"]
}
+
+test {
+ inputs.dir("$rootDir/config/coordinator/")
+}
diff --git a/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/app/L1DependentApp.kt b/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/app/L1DependentApp.kt
index c30b2a2de..4c0b7e573 100644
--- a/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/app/L1DependentApp.kt
+++ b/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/app/L1DependentApp.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.coordinator.app
+import build.linea.clients.StateManagerClientV1
+import build.linea.clients.StateManagerV1JsonRpcClient
import io.vertx.core.Vertx
import kotlinx.datetime.Clock
import net.consensys.linea.BlockNumberAndHash
@@ -37,7 +39,6 @@ import net.consensys.linea.web3j.Web3jBlobExtended
import net.consensys.linea.web3j.okHttpClientBuilder
import net.consensys.zkevm.LongRunningService
import net.consensys.zkevm.coordinator.app.config.CoordinatorConfig
-import net.consensys.zkevm.coordinator.app.config.StateManagerClientConfig
import net.consensys.zkevm.coordinator.blockcreation.BatchesRepoBasedLastProvenBlockNumberProvider
import net.consensys.zkevm.coordinator.blockcreation.BlockCreationMonitor
import net.consensys.zkevm.coordinator.blockcreation.GethCliqueSafeBlockProvider
@@ -49,8 +50,6 @@ import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
import net.consensys.zkevm.coordinator.clients.ShomeiClient
import net.consensys.zkevm.coordinator.clients.TracesGeneratorJsonRpcClientV1
import net.consensys.zkevm.coordinator.clients.TracesGeneratorJsonRpcClientV2
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerClient
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerJsonRpcClient
import net.consensys.zkevm.coordinator.clients.prover.ProverClientFactory
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClientReadOnly
@@ -299,26 +298,6 @@ class L1DependentApp(
null
}
- private fun createStateManagerClient(
- stateManagerConfig: StateManagerClientConfig,
- logger: Logger
- ): Type2StateManagerClient {
- return Type2StateManagerJsonRpcClient(
- vertx = vertx,
- rpcClient = httpJsonRpcClientFactory.createWithLoadBalancing(
- endpoints = stateManagerConfig.endpoints.toSet(),
- maxInflightRequestsPerClient = stateManagerConfig.requestLimitPerEndpoint,
- log = logger
- ),
- config = Type2StateManagerJsonRpcClient.Config(
- requestRetry = stateManagerConfig.requestRetryConfig,
- zkStateManagerVersion = stateManagerConfig.version
- ),
- retryConfig = stateManagerConfig.requestRetryConfig,
- log = logger
- )
- }
-
private val lastFinalizedBlock = lastFinalizedBlock().get()
private val lastProcessedBlockNumber = resumeConflationFrom(
aggregationsRepository,
@@ -422,8 +401,14 @@ class L1DependentApp(
private val conflationService: ConflationService =
ConflationServiceImpl(calculator = conflationCalculator, metricsFacade = metricsFacade)
- private val zkStateClient: Type2StateManagerClient =
- createStateManagerClient(configs.stateManager, LogManager.getLogger("clients.StateManagerShomeiClient"))
+ private val zkStateClient: StateManagerClientV1 = StateManagerV1JsonRpcClient.create(
+ rpcClientFactory = httpJsonRpcClientFactory,
+ endpoints = configs.stateManager.endpoints.map { it.toURI() },
+ maxInflightRequestsPerClient = configs.stateManager.requestLimitPerEndpoint,
+ requestRetry = configs.stateManager.requestRetryConfig,
+ zkStateManagerVersion = configs.stateManager.version,
+ logger = LogManager.getLogger("clients.StateManagerShomeiClient")
+ )
private val lineaSmartContractClientForDataSubmission: LineaRollupSmartContractClient = run {
// The below gas provider will act as the primary gas provider if L1
@@ -969,7 +954,7 @@ class L1DependentApp(
),
"type 2 state proof provider finalization updates" to FinalizationHandler {
finalizedBlockNotifier.updateFinalizedBlock(
- BlockNumberAndHash(it.blockNumber, it.blockHash)
+ BlockNumberAndHash(it.blockNumber, it.blockHash.toArray())
)
},
"last_proven_block_provider" to FinalizationHandler { update: FinalizationMonitor.FinalizationUpdate ->
diff --git a/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/blockcreation/TracesCountersV1WatcherClient.kt b/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/blockcreation/TracesCountersV1WatcherClient.kt
index 2f322daf0..72711c538 100644
--- a/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/blockcreation/TracesCountersV1WatcherClient.kt
+++ b/coordinator/app/src/main/kotlin/net/consensys/zkevm/coordinator/blockcreation/TracesCountersV1WatcherClient.kt
@@ -8,6 +8,7 @@ import net.consensys.zkevm.coordinator.clients.TracesCountersClientV1
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
+import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
class TracesCountersV1WatcherClient(
@@ -17,7 +18,7 @@ class TracesCountersV1WatcherClient(
) : TracesCountersClientV1 {
override fun rollupGetTracesCounters(block: BlockNumberAndHash):
SafeFuture>> {
- return tracesFilesManager.waitRawTracesGenerationOf(block.number, block.hash).thenCompose {
+ return tracesFilesManager.waitRawTracesGenerationOf(block.number, Bytes32.wrap(block.hash)).thenCompose {
log.trace("Traces file generated: block={}", block.number)
tracesCountersClientV1.rollupGetTracesCounters(block)
}
diff --git a/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobZkStateProviderImpl.kt b/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobZkStateProviderImpl.kt
index e03dd3e2c..fe27a91e6 100644
--- a/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobZkStateProviderImpl.kt
+++ b/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobZkStateProviderImpl.kt
@@ -1,34 +1,21 @@
package net.consensys.zkevm.ethereum.coordination.blob
-import com.github.michaelbull.result.Err
-import com.github.michaelbull.result.Ok
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerClient
+import build.linea.clients.GetStateMerkleProofRequest
+import build.linea.clients.StateManagerClientV1
+import build.linea.domain.BlockInterval
import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
-
-class BlobZkStateProviderImpl(private val zkStateClient: Type2StateManagerClient) : BlobZkStateProvider {
- private fun rollupGetZkEVMStateMerkleProof(startBlockNumber: ULong, endBlockNumber: ULong):
- SafeFuture {
- return zkStateClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber.toLong()),
- UInt64.valueOf(endBlockNumber.toLong())
- ).thenCompose {
- when (it) {
- is Ok -> SafeFuture.completedFuture(it.value)
- is Err -> {
- SafeFuture.failedFuture(it.error.asException())
- }
- }
- }
- }
+class BlobZkStateProviderImpl(
+ private val zkStateClient: StateManagerClientV1
+) : BlobZkStateProvider {
override fun getBlobZKState(blockRange: ULongRange): SafeFuture {
- return rollupGetZkEVMStateMerkleProof(blockRange.first, blockRange.last).thenApply {
- BlobZkState(
- parentStateRootHash = it.zkParentStateRootHash.toArray(),
- finalStateRootHash = it.zkEndStateRootHash.toArray()
- )
- }
+ return zkStateClient
+ .makeRequest(GetStateMerkleProofRequest(BlockInterval(blockRange.first, blockRange.last)))
+ .thenApply {
+ BlobZkState(
+ parentStateRootHash = it.zkParentStateRootHash,
+ finalStateRootHash = it.zkEndStateRootHash
+ )
+ }
}
}
diff --git a/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/ParentBlobDataProviderImpl.kt b/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/ParentBlobDataProviderImpl.kt
deleted file mode 100644
index 0a9f29e2d..000000000
--- a/coordinator/app/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/ParentBlobDataProviderImpl.kt
+++ /dev/null
@@ -1,104 +0,0 @@
-package net.consensys.zkevm.ethereum.coordination.blob
-
-import com.github.michaelbull.result.Err
-import com.github.michaelbull.result.Ok
-import net.consensys.encodeHex
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerClient
-import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.persistence.BlobsRepository
-import org.apache.logging.log4j.LogManager
-import org.apache.logging.log4j.Logger
-import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
-
-class ParentBlobDataProviderImpl(
- private val genesisShnarf: ByteArray,
- private val blobsRepository: BlobsRepository,
- private val zkStateClient: Type2StateManagerClient
-) : ParentBlobDataProvider {
- private val log: Logger = LogManager.getLogger(this::class.java)
-
- companion object {
- private data class BlobDataHashAndShnarf(val blobHash: ByteArray, val shnarf: ByteArray) {
- override fun equals(other: Any?): Boolean {
- if (this === other) return true
- if (javaClass != other?.javaClass) return false
-
- other as BlobDataHashAndShnarf
-
- if (!blobHash.contentEquals(other.blobHash)) return false
- if (!shnarf.contentEquals(other.shnarf)) return false
-
- return true
- }
-
- override fun hashCode(): Int {
- var result = blobHash.contentHashCode()
- result = 31 * result + shnarf.contentHashCode()
- return result
- }
- }
- }
-
- private fun rollupGetZkEVMStateMerkleProof(startBlockNumber: ULong, endBlockNumber: ULong):
- SafeFuture {
- return zkStateClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber.toLong()),
- UInt64.valueOf(endBlockNumber.toLong())
- ).thenCompose {
- when (it) {
- is Ok -> SafeFuture.completedFuture(it.value)
- is Err -> {
- SafeFuture.failedFuture(it.error.asException())
- }
- }
- }
- }
-
- private fun getParentBlobData(endBlockNumber: ULong): SafeFuture {
- return if (endBlockNumber == 0UL) {
- log.info(
- "Requested parent shnarf for the genesis block, returning genesisShnarf={}",
- genesisShnarf.encodeHex()
- )
- SafeFuture.completedFuture(BlobDataHashAndShnarf(ByteArray(32), genesisShnarf))
- } else {
- blobsRepository
- .findBlobByEndBlockNumber(endBlockNumber.toLong())
- .thenCompose { blobRecord: BlobRecord? ->
- if (blobRecord != null) {
- SafeFuture.completedFuture(
- BlobDataHashAndShnarf(
- blobHash = blobRecord.blobHash,
- shnarf = blobRecord.expectedShnarf
- )
- )
- } else {
- SafeFuture.failedFuture(
- IllegalStateException("Failed to find the parent blob in db with end block=$endBlockNumber")
- )
- }
- }
- }
- }
-
- override fun findParentAndZkStateData(
- blockRange: ULongRange
- ): SafeFuture {
- return getParentBlobData(
- blockRange.first.dec()
- ).thenComposeCombined(
- rollupGetZkEVMStateMerkleProof(blockRange.first, blockRange.last)
- ) { blobData: BlobDataHashAndShnarf, zkStateData: GetZkEVMStateMerkleProofResponse ->
- SafeFuture.completedFuture(
- ParentBlobAndZkStateData(
- blobData.blobHash,
- blobData.shnarf,
- zkStateData.zkParentStateRootHash.toArray(),
- zkStateData.zkEndStateRootHash.toArray()
- )
- )
- }
- }
-}
diff --git a/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/app/config/CoordinatorConfigTest.kt b/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/app/config/CoordinatorConfigTest.kt
index 60ecf2e2a..def06e39d 100644
--- a/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/app/config/CoordinatorConfigTest.kt
+++ b/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/app/config/CoordinatorConfigTest.kt
@@ -248,7 +248,7 @@ class CoordinatorConfigTest {
listOf(
URI("http://traces-api:8080/").toURL()
),
- requestLimitPerEndpoint = 20U,
+ requestLimitPerEndpoint = 2U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT1S"),
failuresWarningThreshold = 2
@@ -286,7 +286,7 @@ class CoordinatorConfigTest {
endpoints = listOf(
URI("http://shomei:8888/").toURL()
),
- requestLimitPerEndpoint = 3U,
+ requestLimitPerEndpoint = 2U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT2S"),
failuresWarningThreshold = 2
@@ -842,7 +842,7 @@ class CoordinatorConfigTest {
listOf(
URI("http://traces-node-v2:8545/").toURL()
),
- requestLimitPerEndpoint = 20U,
+ requestLimitPerEndpoint = 2U,
requestRetry = RequestRetryConfigTomlFriendly(
backoffDelay = Duration.parse("PT1S"),
failuresWarningThreshold = 2
diff --git a/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/blockcreation/BlockCreationMonitorTest.kt b/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/blockcreation/BlockCreationMonitorTest.kt
index 71153117c..81577394b 100644
--- a/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/blockcreation/BlockCreationMonitorTest.kt
+++ b/coordinator/app/src/test/kotlin/net/consensys/zkevm/coordinator/blockcreation/BlockCreationMonitorTest.kt
@@ -3,14 +3,13 @@ package net.consensys.zkevm.coordinator.blockcreation
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import io.vertx.junit5.VertxTestContext
+import net.consensys.ByteArrayExt
+import net.consensys.decodeHex
import net.consensys.linea.async.get
import net.consensys.linea.web3j.ExtendedWeb3J
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockCreated
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockCreationListener
import org.apache.logging.log4j.Logger
-import org.apache.tuweni.bytes.Bytes
-import org.apache.tuweni.bytes.Bytes32
-import org.apache.tuweni.units.bigints.UInt256
import org.assertj.core.api.Assertions.assertThat
import org.awaitility.Awaitility.await
import org.junit.jupiter.api.AfterEach
@@ -31,10 +30,8 @@ import org.mockito.kotlin.whenever
import org.web3j.protocol.Web3j
import org.web3j.protocol.core.Request
import org.web3j.protocol.core.methods.response.EthBlock
-import tech.pegasys.teku.ethereum.executionclient.schema.ExecutionPayloadV1
+import tech.pegasys.teku.ethereum.executionclient.schema.executionPayloadV1
import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.bytes.Bytes20
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
import java.math.BigInteger
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
@@ -45,8 +42,7 @@ import kotlin.time.toJavaDuration
@ExtendWith(VertxExtension::class)
class BlockCreationMonitorTest {
- private val parentHash =
- Bytes32.fromHexString("0x1000000000000000000000000000000000000000000000000000000000000000")
+ private val parentHash = "0x1000000000000000000000000000000000000000000000000000000000000000".decodeHex()
private val startingBlockNumberInclusive: Long = 100
private val blocksToFetch: Long = 5L
private val lastBlockNumberInclusiveToProcess: ULong = startingBlockNumberInclusive.toULong() + 10uL
@@ -136,12 +132,12 @@ class BlockCreationMonitorTest {
val payload2 =
executionPayloadV1(
blockNumber = lastBlockNumberInclusiveToProcess.toLong(),
- parentHash = payload.blockHash
+ parentHash = payload.blockHash.toArray()
)
val payload3 =
executionPayloadV1(
blockNumber = lastBlockNumberInclusiveToProcess.toLong() + 1,
- parentHash = payload2.blockHash
+ parentHash = payload2.blockHash.toArray()
)
val headBlockNumber = lastBlockNumberInclusiveToProcess.toLong() + config.blocksToFinalization
@@ -180,7 +176,7 @@ class BlockCreationMonitorTest {
val payload2 =
executionPayloadV1(
blockNumber = startingBlockNumberInclusive + 1,
- parentHash = payload.blockHash
+ parentHash = payload.blockHash.toArray()
)
val headBlockNumber = startingBlockNumberInclusive + config.blocksToFinalization
whenever(web3jClient.ethBlockNumber())
@@ -306,7 +302,7 @@ class BlockCreationMonitorTest {
val payload2 =
executionPayloadV1(
blockNumber = startingBlockNumberInclusive + 1,
- parentHash = Bytes32.random()
+ parentHash = ByteArrayExt.random32()
)
val headBlockNumber = startingBlockNumberInclusive + config.blocksToFinalization
whenever(web3jClient.ethBlockNumber())
@@ -347,7 +343,7 @@ class BlockCreationMonitorTest {
val payload2 =
executionPayloadV1(
blockNumber = startingBlockNumberInclusive + 1,
- parentHash = payload.blockHash
+ parentHash = payload.blockHash.toArray()
)
val headBlockNumber = startingBlockNumberInclusive + config.blocksToFinalization
@@ -388,7 +384,7 @@ class BlockCreationMonitorTest {
val payload2 =
executionPayloadV1(
blockNumber = startingBlockNumberInclusive + 1,
- parentHash = payload.blockHash
+ parentHash = payload.blockHash.toArray()
)
val headBlockNumber = startingBlockNumberInclusive + config.blocksToFinalization
whenever(web3jClient.ethBlockNumber())
@@ -426,12 +422,18 @@ class BlockCreationMonitorTest {
@Test
fun `block shouldn't be fetched when block gap is greater than fetch limit`(testContext: VertxTestContext) {
val payload = executionPayloadV1(blockNumber = startingBlockNumberInclusive, parentHash = parentHash)
- val payload2 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 1, parentHash = payload.blockHash)
- val payload3 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 2, parentHash = payload2.blockHash)
- val payload4 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 3, parentHash = payload3.blockHash)
- val payload5 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 4, parentHash = payload4.blockHash)
- val payload6 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 5, parentHash = payload5.blockHash)
- val payload7 = executionPayloadV1(blockNumber = startingBlockNumberInclusive + 6, parentHash = payload6.blockHash)
+ val payload2 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 1, parentHash = payload.blockHash.toArray())
+ val payload3 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 2, parentHash = payload2.blockHash.toArray())
+ val payload4 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 3, parentHash = payload3.blockHash.toArray())
+ val payload5 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 4, parentHash = payload4.blockHash.toArray())
+ val payload6 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 5, parentHash = payload5.blockHash.toArray())
+ val payload7 =
+ executionPayloadV1(blockNumber = startingBlockNumberInclusive + 6, parentHash = payload6.blockHash.toArray())
val headBlockNumber = startingBlockNumberInclusive + config.blocksToFinalization
whenever(web3jClient.ethGetExecutionPayloadByNumber(any()))
@@ -532,38 +534,4 @@ class BlockCreationMonitorTest {
}
.whenException(testContext::failNow)
}
-
- private fun executionPayloadV1(
- blockNumber: Long = 0,
- parentHash: Bytes32 = Bytes32.random(),
- feeRecipient: Bytes20 = Bytes20(Bytes.random(20)),
- stateRoot: Bytes32 = Bytes32.random(),
- receiptsRoot: Bytes32 = Bytes32.random(),
- logsBloom: Bytes = Bytes32.random(),
- prevRandao: Bytes32 = Bytes32.random(),
- gasLimit: UInt64 = UInt64.valueOf(0),
- gasUsed: UInt64 = UInt64.valueOf(0),
- timestamp: UInt64 = UInt64.valueOf(0),
- extraData: Bytes = Bytes32.random(),
- baseFeePerGas: UInt256 = UInt256.valueOf(256),
- blockHash: Bytes32 = Bytes32.random(),
- transactions: List = emptyList()
- ): ExecutionPayloadV1 {
- return ExecutionPayloadV1(
- parentHash,
- feeRecipient,
- stateRoot,
- receiptsRoot,
- logsBloom,
- prevRandao,
- UInt64.valueOf(blockNumber),
- gasLimit,
- gasUsed,
- timestamp,
- extraData,
- baseFeePerGas,
- blockHash,
- transactions
- )
- }
}
diff --git a/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ConsecutiveProvenBlobsProviderWithLastEndBlockNumberTrackerTest.kt b/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ConsecutiveProvenBlobsProviderWithLastEndBlockNumberTrackerTest.kt
index ea0cbca3c..76e25d1d8 100644
--- a/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ConsecutiveProvenBlobsProviderWithLastEndBlockNumberTrackerTest.kt
+++ b/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ConsecutiveProvenBlobsProviderWithLastEndBlockNumberTrackerTest.kt
@@ -1,9 +1,9 @@
package net.consensys.zkevm.ethereum.coordination.aggregation
+import build.linea.domain.BlockIntervals
import kotlinx.datetime.Instant
import net.consensys.zkevm.domain.BlobAndBatchCounters
import net.consensys.zkevm.domain.BlobCounters
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.persistence.AggregationsRepository
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
diff --git a/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/ForkChoiceUpdaterImplTest.kt b/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/ForkChoiceUpdaterImplTest.kt
index ca03be334..7a7513ff8 100644
--- a/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/ForkChoiceUpdaterImplTest.kt
+++ b/coordinator/app/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/ForkChoiceUpdaterImplTest.kt
@@ -2,12 +2,12 @@ package net.consensys.zkevm.ethereum.coordination.blockcreation
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
+import net.consensys.ByteArrayExt
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.RollupForkChoiceUpdatedClient
import net.consensys.zkevm.coordinator.clients.RollupForkChoiceUpdatedError
import net.consensys.zkevm.coordinator.clients.RollupForkChoiceUpdatedResponse
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.mockito.kotlin.any
@@ -32,7 +32,7 @@ class ForkChoiceUpdaterImplTest {
.thenReturn(SafeFuture.completedFuture(Ok(RollupForkChoiceUpdatedResponse("success"))))
val finalizedBlockNotifierImpl = ForkChoiceUpdaterImpl(listOf(mockClient1, mockClient2))
- val blockNumberAndHash = BlockNumberAndHash(100U, Bytes32.random())
+ val blockNumberAndHash = BlockNumberAndHash(100U, ByteArrayExt.random32())
val result = finalizedBlockNotifierImpl.updateFinalizedBlock(blockNumberAndHash)
assertThat(result).isCompleted()
verify(mockClient1).rollupForkChoiceUpdated(blockNumberAndHash)
@@ -53,7 +53,7 @@ class ForkChoiceUpdaterImplTest {
.thenReturn(SafeFuture.completedFuture(Err(ErrorResponse(RollupForkChoiceUpdatedError.UNKNOWN, ""))))
val finalizedBlockNotifierImpl = ForkChoiceUpdaterImpl(listOf(mockClient1, mockClient2))
- val blockNumberAndHash = BlockNumberAndHash(100U, Bytes32.random())
+ val blockNumberAndHash = BlockNumberAndHash(100U, ByteArrayExt.random32())
val result = finalizedBlockNotifierImpl.updateFinalizedBlock(blockNumberAndHash)
assertThat(result).isCompleted()
verify(mockClient1).rollupForkChoiceUpdated(blockNumberAndHash)
diff --git a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ABProverClientRouter.kt b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ABProverClientRouter.kt
index ad5eefa0f..dc118f3f3 100644
--- a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ABProverClientRouter.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ABProverClientRouter.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.domain.BlockInterval
import net.consensys.zkevm.coordinator.clients.ProverClient
-import net.consensys.zkevm.domain.BlockInterval
import tech.pegasys.teku.infrastructure.async.SafeFuture
class StartBlockNumberBasedSwitchPredicate(
diff --git a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/FileBasedExecutionProverClientV2.kt b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/FileBasedExecutionProverClientV2.kt
index a872cfa58..91c83ac15 100644
--- a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/FileBasedExecutionProverClientV2.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/FileBasedExecutionProverClientV2.kt
@@ -61,7 +61,7 @@ internal class ExecutionProofRequestDataDecorator(
getBlockStateRootHash(request.blocks.first().blockNumber.toULong() - 1UL)
) { blocksAndBridgeLogs, previousKeccakStateRootHash ->
BatchExecutionProofRequestDto(
- zkParentStateRootHash = request.type2StateData.zkParentStateRootHash.toHexString(),
+ zkParentStateRootHash = request.type2StateData.zkParentStateRootHash.encodeHex(),
keccakParentStateRootHash = previousKeccakStateRootHash,
conflatedExecutionTracesFile = request.tracesResponse.tracesFileName,
tracesEngineVersion = request.tracesResponse.tracesEngineVersion,
diff --git a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClient.kt b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClient.kt
index dcc24fb2b..4b71a022c 100644
--- a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClient.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClient.kt
@@ -1,11 +1,11 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.domain.BlockInterval
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.getOrElse
import com.github.michaelbull.result.map
import io.vertx.core.Vertx
import net.consensys.linea.errors.ErrorResponse
-import net.consensys.zkevm.domain.BlockInterval
import net.consensys.zkevm.domain.ProofIndex
import net.consensys.zkevm.fileio.FileMonitor
import net.consensys.zkevm.fileio.FileReader
diff --git a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactory.kt b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactory.kt
index 5802a45c0..ed09edbce 100644
--- a/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactory.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactory.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.domain.BlockInterval
import io.vertx.core.Vertx
import net.consensys.linea.contract.Web3JL2MessageServiceLogsClient
import net.consensys.linea.metrics.LineaMetricsCategory
@@ -9,7 +10,6 @@ import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
import net.consensys.zkevm.coordinator.clients.ExecutionProverClientV2
import net.consensys.zkevm.coordinator.clients.ProofAggregationProverClientV2
import net.consensys.zkevm.coordinator.clients.ProverClient
-import net.consensys.zkevm.domain.BlockInterval
import org.web3j.protocol.Web3j
class ProverClientFactory(
diff --git a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ExecutionProofRequestDataDecoratorTest.kt b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ExecutionProofRequestDataDecoratorTest.kt
index f0cbabf07..839a2e127 100644
--- a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ExecutionProofRequestDataDecoratorTest.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ExecutionProofRequestDataDecoratorTest.kt
@@ -1,14 +1,14 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.clients.GetZkEVMStateMerkleProofResponse
import com.fasterxml.jackson.databind.node.ArrayNode
+import net.consensys.ByteArrayExt
import net.consensys.encodeHex
import net.consensys.zkevm.coordinator.clients.BatchExecutionProofRequestV1
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
import net.consensys.zkevm.coordinator.clients.L2MessageServiceLogsClient
import net.consensys.zkevm.domain.RlpBridgeLogsData
import net.consensys.zkevm.encoding.ExecutionPayloadV1Encoder
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
@@ -48,12 +48,12 @@ class ExecutionProofRequestDataDecoratorTest {
@Test
fun `should decorate data with bridge logs and parent stateRootHash`() {
- val executionPayload1 = executionPayloadV1(blockNumber = 123)
- val executionPayload2 = executionPayloadV1(blockNumber = 124)
+ val executionPayload1 = executionPayloadV1(blockNumber = 123, gasLimit = 20_000_000UL)
+ val executionPayload2 = executionPayloadV1(blockNumber = 124, gasLimit = 20_000_000UL)
val type2StateResponse = GetZkEVMStateMerkleProofResponse(
zkStateMerkleProof = ArrayNode(null),
- zkParentStateRootHash = Bytes32.random(),
- zkEndStateRootHash = Bytes32.random(),
+ zkParentStateRootHash = ByteArrayExt.random32(),
+ zkEndStateRootHash = ByteArrayExt.random32(),
zkStateManagerVersion = "2.0.0"
)
val generateTracesResponse = GenerateTracesResponse(
@@ -82,7 +82,7 @@ class ExecutionProofRequestDataDecoratorTest {
val requestDto = requestDatDecorator.invoke(request).get()
assertThat(requestDto.keccakParentStateRootHash).isEqualTo(stateRoot)
- assertThat(requestDto.zkParentStateRootHash).isEqualTo(type2StateResponse.zkParentStateRootHash.toHexString())
+ assertThat(requestDto.zkParentStateRootHash).isEqualTo(type2StateResponse.zkParentStateRootHash.encodeHex())
assertThat(requestDto.conflatedExecutionTracesFile).isEqualTo("123-114-conflated-traces.json")
assertThat(requestDto.tracesEngineVersion).isEqualTo("1.0.0")
assertThat(requestDto.type2StateManagerVersion).isEqualTo("2.0.0")
diff --git a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClientTest.kt b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClientTest.kt
index d945b1b96..aef7c13f5 100644
--- a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClientTest.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/GenericFileBasedProverClientTest.kt
@@ -1,9 +1,9 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.domain.BlockInterval
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import net.consensys.zkevm.coordinator.clients.prover.serialization.JsonSerialization
-import net.consensys.zkevm.domain.BlockInterval
import net.consensys.zkevm.domain.ProofIndex
import net.consensys.zkevm.fileio.FileReader
import net.consensys.zkevm.fileio.FileWriter
@@ -15,6 +15,7 @@ import org.junit.jupiter.api.assertThrows
import org.junit.jupiter.api.extension.ExtendWith
import org.junit.jupiter.api.io.TempDir
import tech.pegasys.teku.infrastructure.async.SafeFuture
+import java.nio.file.Files
import java.nio.file.Path
import kotlin.time.Duration.Companion.milliseconds
import kotlin.time.Duration.Companion.seconds
@@ -102,7 +103,9 @@ class GenericFileBasedProverClientTest {
}
private fun saveToFile(file: Path, content: Any) {
- JsonSerialization.proofResponseMapperV1.writeValue(file.toFile(), content)
+ val writeInProgessFile = file.resolveSibling(file.fileName.toString() + ".coordinator_writing_inprogress")
+ JsonSerialization.proofResponseMapperV1.writeValue(writeInProgessFile.toFile(), content)
+ Files.move(writeInProgessFile, file)
}
private fun readFromFile(file: Path, valueType: Class): T {
diff --git a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactoryTest.kt b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactoryTest.kt
index 31e8995fa..66b01a8ed 100644
--- a/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactoryTest.kt
+++ b/coordinator/clients/prover-client/file-based-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/ProverClientFactoryTest.kt
@@ -1,16 +1,16 @@
package net.consensys.zkevm.coordinator.clients.prover
+import build.linea.domain.BlockIntervals
import io.micrometer.core.instrument.MeterRegistry
import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import kotlinx.datetime.Clock
+import net.consensys.ByteArrayExt
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ProofIndex
import net.consensys.zkevm.domain.ProofsToAggregate
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.awaitility.Awaitility.await
import org.junit.jupiter.api.BeforeEach
@@ -78,21 +78,21 @@ class ProverClientFactoryTest {
executionProofs = BlockIntervals(startingBlockNumber = 1uL, listOf(9uL)),
parentAggregationLastBlockTimestamp = Clock.System.now(),
parentAggregationLastL1RollingHashMessageNumber = 0uL,
- parentAggregationLastL1RollingHash = Bytes32.random().toArray()
+ parentAggregationLastL1RollingHash = ByteArrayExt.random32()
)
private val request2 = ProofsToAggregate(
compressionProofIndexes = listOf(ProofIndex(startBlockNumber = 10uL, endBlockNumber = 19uL)),
executionProofs = BlockIntervals(startingBlockNumber = 10uL, listOf(19uL)),
parentAggregationLastBlockTimestamp = Clock.System.now(),
parentAggregationLastL1RollingHashMessageNumber = 9uL,
- parentAggregationLastL1RollingHash = Bytes32.random().toArray()
+ parentAggregationLastL1RollingHash = ByteArrayExt.random32()
)
private val request3 = ProofsToAggregate(
compressionProofIndexes = listOf(ProofIndex(startBlockNumber = 300uL, endBlockNumber = 319uL)),
executionProofs = BlockIntervals(startingBlockNumber = 300uL, listOf(319uL)),
parentAggregationLastBlockTimestamp = Clock.System.now(),
parentAggregationLastL1RollingHashMessageNumber = 299uL,
- parentAggregationLastL1RollingHash = Bytes32.random().toArray()
+ parentAggregationLastL1RollingHash = ByteArrayExt.random32()
)
@BeforeEach
diff --git a/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobJsonFileRequestResponse.kt b/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobJsonFileRequestResponse.kt
index d050c09cf..2ecfa52f1 100644
--- a/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobJsonFileRequestResponse.kt
+++ b/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobJsonFileRequestResponse.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.coordinator.clients.prover.serialization
+import build.linea.domain.BlockIntervals
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.core.JsonParser
@@ -13,7 +14,6 @@ import net.consensys.decodeHex
import net.consensys.encodeHex
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.coordinator.clients.BlobCompressionProofRequest
-import net.consensys.zkevm.domain.BlockIntervals
internal class ByteArrayDeserializer : JsonDeserializer() {
override fun deserialize(p: JsonParser, ctxt: DeserializationContext): ByteArray {
diff --git a/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/JsonSerialization.kt b/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/JsonSerialization.kt
index c04f86e9f..654144c84 100644
--- a/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/JsonSerialization.kt
+++ b/coordinator/clients/prover-client/serialization/src/main/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/JsonSerialization.kt
@@ -1,15 +1,10 @@
package net.consensys.zkevm.coordinator.clients.prover.serialization
-import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.DeserializationFeature
-import com.fasterxml.jackson.databind.JsonSerializer
import com.fasterxml.jackson.databind.MapperFeature
import com.fasterxml.jackson.databind.ObjectMapper
-import com.fasterxml.jackson.databind.SerializerProvider
-import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.kotlin.jacksonMapperBuilder
-import org.apache.tuweni.bytes.Bytes
object JsonSerialization {
val proofResponseMapperV1: ObjectMapper =
@@ -17,12 +12,5 @@ object JsonSerialization {
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.addModule(JavaTimeModule())
- .addModule(SimpleModule().addSerializer(Bytes::class.java, TuweniBytesSerializer()))
.build()
}
-
-class TuweniBytesSerializer : JsonSerializer() {
- override fun serialize(value: Bytes, gen: JsonGenerator, provider: SerializerProvider) {
- gen.writeString(value.toHexString().lowercase())
- }
-}
diff --git a/coordinator/clients/prover-client/serialization/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobCompressionProofJsonResponseTest.kt b/coordinator/clients/prover-client/serialization/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobCompressionProofJsonResponseTest.kt
index 5d0a01be9..334d01a52 100644
--- a/coordinator/clients/prover-client/serialization/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobCompressionProofJsonResponseTest.kt
+++ b/coordinator/clients/prover-client/serialization/src/test/kotlin/net/consensys/zkevm/coordinator/clients/prover/serialization/BlobCompressionProofJsonResponseTest.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.coordinator.clients.prover.serialization
+import build.linea.domain.BlockIntervals
import net.consensys.zkevm.coordinator.clients.prover.serialization.JsonSerialization.proofResponseMapperV1
-import net.consensys.zkevm.domain.BlockIntervals
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.junit.jupiter.params.ParameterizedTest
diff --git a/coordinator/clients/shomei-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClient.kt b/coordinator/clients/shomei-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClient.kt
index a68e9f55a..f4001ecf5 100644
--- a/coordinator/clients/shomei-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClient.kt
+++ b/coordinator/clients/shomei-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClient.kt
@@ -5,6 +5,7 @@ import com.github.michaelbull.result.getOrElse
import com.github.michaelbull.result.mapEither
import com.github.michaelbull.result.runCatching
import io.vertx.core.Vertx
+import net.consensys.encodeHex
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.errors.ErrorResponse
@@ -51,7 +52,7 @@ class ShomeiClient(
listOf(
mapOf(
"finalizedBlockNumber" to finalizedBlockNumberAndHash.number.toString(),
- "finalizedBlockHash" to finalizedBlockNumberAndHash.hash.toHexString()
+ "finalizedBlockHash" to finalizedBlockNumberAndHash.hash.encodeHex()
)
)
)
diff --git a/coordinator/clients/shomei-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClientTest.kt b/coordinator/clients/shomei-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClientTest.kt
index 051721dd7..86605e541 100644
--- a/coordinator/clients/shomei-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClientTest.kt
+++ b/coordinator/clients/shomei-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/ShomeiClientTest.kt
@@ -9,11 +9,12 @@ import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
import io.vertx.junit5.VertxExtension
+import net.consensys.ByteArrayExt
+import net.consensys.encodeHex
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.async.get
import net.consensys.linea.jsonrpc.client.JsonRpcClient
import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
@@ -65,7 +66,7 @@ class ShomeiClientTest {
WireMock.ok().withHeader("Content-type", "application/json").withBody(successResponse.toString())
)
)
- val blockNumberAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockNumberAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = shomeiClient.rollupForkChoiceUpdated(blockNumberAndHash)
val result = resultFuture.get()
Assertions.assertThat(resultFuture)
@@ -83,7 +84,7 @@ class ShomeiClientTest {
listOf(
mapOf(
"finalizedBlockNumber" to blockNumberAndHash.number.toString(),
- "finalizedBlockHash" to blockNumberAndHash.hash.toHexString()
+ "finalizedBlockHash" to blockNumberAndHash.hash.encodeHex()
)
)
)
@@ -115,7 +116,7 @@ class ShomeiClientTest {
.withBody(jsonRpcErrorResponse.toString())
)
)
- val blockNumberAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockNumberAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = shomeiClient.rollupForkChoiceUpdated(blockNumberAndHash)
val result = resultFuture.get()
Assertions.assertThat(resultFuture)
@@ -133,7 +134,7 @@ class ShomeiClientTest {
listOf(
mapOf(
"finalizedBlockNumber" to blockNumberAndHash.number.toString(),
- "finalizedBlockHash" to blockNumberAndHash.hash.toHexString()
+ "finalizedBlockHash" to blockNumberAndHash.hash.encodeHex()
)
)
)
diff --git a/coordinator/clients/smart-contract-client/build.gradle b/coordinator/clients/smart-contract-client/build.gradle
index ddebc978b..68fd0b30b 100644
--- a/coordinator/clients/smart-contract-client/build.gradle
+++ b/coordinator/clients/smart-contract-client/build.gradle
@@ -7,6 +7,7 @@ dependencies {
implementation project(':jvm-libs:linea:web3j-extensions')
api 'build.linea:l1-rollup-contract-client:0.0.1'
api 'build.linea:l2-message-service-contract-client:0.0.1'
+ implementation(project(':jvm-libs:linea:linea-contracts:l1-rollup'))
api ("org.web3j:core:${libs.versions.web3j.get()}") {
exclude group: 'org.slf4j', module: 'slf4j-nop'
diff --git a/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JFunctionBuildersV6.kt b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JFunctionBuildersV6.kt
new file mode 100644
index 000000000..1a66f4640
--- /dev/null
+++ b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JFunctionBuildersV6.kt
@@ -0,0 +1,153 @@
+package net.consensys.linea.contract.l1
+
+import build.linea.contract.LineaRollupV6
+import net.consensys.toBigInteger
+import net.consensys.zkevm.coordinator.clients.smartcontract.LineaContractVersion
+import net.consensys.zkevm.domain.BlobRecord
+import net.consensys.zkevm.domain.ProofToFinalize
+import org.web3j.abi.TypeReference
+import org.web3j.abi.datatypes.DynamicArray
+import org.web3j.abi.datatypes.DynamicBytes
+import org.web3j.abi.datatypes.Function
+import org.web3j.abi.datatypes.Type
+import org.web3j.abi.datatypes.generated.Bytes32
+import org.web3j.abi.datatypes.generated.Uint256
+import java.math.BigInteger
+import java.util.Arrays
+
+internal fun buildSubmitBlobsFunction(
+ version: LineaContractVersion,
+ blobs: List
+): Function {
+ return when (version) {
+ LineaContractVersion.V5 -> buildSubmitBlobsFunction(blobs)
+ LineaContractVersion.V6 -> buildSubmitBlobsFunctionV6(blobs)
+ }
+}
+
+internal fun buildSubmitBlobsFunctionV6(
+ blobs: List
+): Function {
+ val blobsSubmissionData = blobs.map { blob ->
+ val blobCompressionProof = blob.blobCompressionProof!!
+ // BlobSubmission(BigInteger dataEvaluationClaim, byte[] kzgCommitment, byte[] kzgProof,
+ // byte[] finalStateRootHash, byte[] snarkHash)
+ LineaRollupV6.BlobSubmission(
+ /*dataEvaluationClaim*/ BigInteger(blobCompressionProof.expectedY),
+ /*kzgCommitment*/ blobCompressionProof.commitment,
+ /*kzgProof*/ blobCompressionProof.kzgProofContract,
+ /*finalStateRootHash*/ blobCompressionProof.finalStateRootHash,
+ /*snarkHash*/ blobCompressionProof.snarkHash
+ )
+ }
+
+ /**
+ function submitBlobs(
+ BlobSubmission[] calldata _blobSubmissions,
+ bytes32 _parentShnarf,
+ bytes32 _finalBlobShnarf
+ )
+ */
+ return Function(
+ LineaRollupV6.FUNC_SUBMITBLOBS,
+ Arrays.asList>(
+ DynamicArray(LineaRollupV6.BlobSubmission::class.java, blobsSubmissionData),
+ Bytes32(blobs.first().blobCompressionProof!!.prevShnarf),
+ Bytes32(blobs.last().blobCompressionProof!!.expectedShnarf)
+ ),
+ emptyList>()
+ )
+}
+
+fun buildFinalizeBlocksFunction(
+ version: LineaContractVersion,
+ aggregationProof: ProofToFinalize,
+ aggregationLastBlob: BlobRecord,
+ parentShnarf: ByteArray,
+ parentL1RollingHash: ByteArray,
+ parentL1RollingHashMessageNumber: Long
+): Function {
+ when (version) {
+ LineaContractVersion.V5 -> {
+ return buildFinalizeBlobsFunction(
+ aggregationProof,
+ aggregationLastBlob,
+ parentShnarf,
+ parentL1RollingHash,
+ parentL1RollingHashMessageNumber
+ )
+ }
+
+ LineaContractVersion.V6 -> {
+ return buildFinalizeBlockFunctionV6(
+ aggregationProof,
+ aggregationLastBlob,
+ parentL1RollingHash,
+ parentL1RollingHashMessageNumber
+ )
+ }
+ }
+}
+
+internal fun buildFinalizeBlockFunctionV6(
+ aggregationProof: ProofToFinalize,
+ aggregationLastBlob: BlobRecord,
+ parentL1RollingHash: ByteArray,
+ parentL1RollingHashMessageNumber: Long
+): Function {
+ val aggregationEndBlobInfo = LineaRollupV6.ShnarfData(
+ /*parentShnarf*/ aggregationLastBlob.blobCompressionProof!!.prevShnarf,
+ /*snarkHash*/ aggregationLastBlob.blobCompressionProof!!.snarkHash,
+ /*finalStateRootHash*/ aggregationLastBlob.blobCompressionProof!!.finalStateRootHash,
+ /*dataEvaluationPoint*/ aggregationLastBlob.blobCompressionProof!!.expectedX,
+ /*dataEvaluationClaim*/ aggregationLastBlob.blobCompressionProof!!.expectedY
+ )
+
+// FinalizationDataV3(
+// byte[] parentStateRootHash,
+// BigInteger endBlockNumber,
+// ShnarfData shnarfData,
+// BigInteger lastFinalizedTimestamp,
+// BigInteger finalTimestamp,
+// byte[] lastFinalizedL1RollingHash,
+// byte[] l1RollingHash,
+// BigInteger lastFinalizedL1RollingHashMessageNumber,
+// BigInteger l1RollingHashMessageNumber,
+// BigInteger l2MerkleTreesDepth,
+// List l2MerkleRoots,
+// byte[] l2MessagingBlocksOffsets
+// )
+
+ val finalizationData = LineaRollupV6.FinalizationDataV3(
+ /*parentStateRootHash*/ aggregationProof.parentStateRootHash,
+ /*endBlockNumber*/ aggregationProof.endBlockNumber.toBigInteger(),
+ /*shnarfData*/ aggregationEndBlobInfo,
+ /*lastFinalizedTimestamp*/ aggregationProof.parentAggregationLastBlockTimestamp.epochSeconds.toBigInteger(),
+ /*finalTimestamp*/ aggregationProof.finalTimestamp.epochSeconds.toBigInteger(),
+ /*lastFinalizedL1RollingHash*/ parentL1RollingHash,
+ /*l1RollingHash*/ aggregationProof.l1RollingHash,
+ /*lastFinalizedL1RollingHashMessageNumber*/ parentL1RollingHashMessageNumber.toBigInteger(),
+ /*l1RollingHashMessageNumber*/ aggregationProof.l1RollingHashMessageNumber.toBigInteger(),
+ /*l2MerkleTreesDepth*/ aggregationProof.l2MerkleTreesDepth.toBigInteger(),
+ /*l2MerkleRoots*/ aggregationProof.l2MerkleRoots,
+ /*l2MessagingBlocksOffsets*/ aggregationProof.l2MessagingBlocksOffsets
+ )
+
+ /**
+ * function finalizeBlocks(
+ * bytes calldata _aggregatedProof,
+ * uint256 _proofType,
+ * FinalizationDataV3 calldata _finalizationData
+ * )
+ */
+ val function = Function(
+ LineaRollupV6.FUNC_FINALIZEBLOCKS,
+ Arrays.asList>(
+ DynamicBytes(aggregationProof.aggregatedProof),
+ Uint256(aggregationProof.aggregatedVerifierIndex.toLong()),
+ finalizationData
+ ),
+ emptyList>()
+ )
+ return function
+}
diff --git a/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClient.kt b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClient.kt
index 51a87ab09..47d87ef3c 100644
--- a/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClient.kt
+++ b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClient.kt
@@ -105,53 +105,39 @@ class Web3JLineaRollupSmartContractClient internal constructor(
}
}
- override fun submitBlobs(
- blobs: List,
- gasPriceCaps: GasPriceCaps?
- ): SafeFuture {
- return submitBlobsV5(blobs, gasPriceCaps)
- }
-
/**
* Sends EIP4844 blob carrying transaction to the smart contract.
* Uses SMC `submitBlobs` function that supports multiple blobs per call.
*/
- private fun submitBlobsV5(
+ override fun submitBlobs(
blobs: List,
gasPriceCaps: GasPriceCaps?
): SafeFuture {
- require(blobs.size in 1..6) { "Blobs size=${blobs.size} must be between 1 and 6." }
- val function = buildSubmitBlobsFunction(
- blobs
- )
-
- return helper.sendBlobCarryingTransactionAndGetTxHash(
- function = function,
- blobs = blobs.map { it.blobCompressionProof!!.compressedData },
- gasPriceCaps = gasPriceCaps
- )
+ return getVersion()
+ .thenCompose { version ->
+ val function = buildSubmitBlobsFunction(version, blobs)
+ helper.sendBlobCarryingTransactionAndGetTxHash(
+ function = function,
+ blobs = blobs.map { it.blobCompressionProof!!.compressedData },
+ gasPriceCaps = gasPriceCaps
+ )
+ }
}
override fun submitBlobsEthCall(
blobs: List,
gasPriceCaps: GasPriceCaps?
): SafeFuture {
- return submitBlobsEthCallImpl(blobs, gasPriceCaps)
- }
-
- private fun submitBlobsEthCallImpl(
- blobs: List,
- gasPriceCaps: GasPriceCaps? = null
- ): SafeFuture {
- val function = buildSubmitBlobsFunction(blobs)
-
- val transaction = helper.createEip4844Transaction(
- function,
- blobs.map { it.blobCompressionProof!!.compressedData }.toWeb3JTxBlob(),
- gasPriceCaps
- )
-
- return web3j.informativeEthCall(transaction, smartContractErrors)
+ return getVersion()
+ .thenCompose { version ->
+ val function = buildSubmitBlobsFunction(version, blobs)
+ val transaction = helper.createEip4844Transaction(
+ function,
+ blobs.map { it.blobCompressionProof!!.compressedData }.toWeb3JTxBlob(),
+ gasPriceCaps
+ )
+ web3j.informativeEthCall(transaction, smartContractErrors)
+ }
}
override fun finalizeBlocks(
@@ -162,38 +148,22 @@ class Web3JLineaRollupSmartContractClient internal constructor(
parentL1RollingHashMessageNumber: Long,
gasPriceCaps: GasPriceCaps?
): SafeFuture {
- return finalizeBlocksV5(
- aggregation,
- aggregationLastBlob,
- parentShnarf,
- parentL1RollingHash,
- parentL1RollingHashMessageNumber,
- gasPriceCaps
- )
- }
-
- private fun finalizeBlocksV5(
- aggregation: ProofToFinalize,
- aggregationLastBlob: BlobRecord,
- parentShnarf: ByteArray,
- parentL1RollingHash: ByteArray,
- parentL1RollingHashMessageNumber: Long,
- gasPriceCaps: GasPriceCaps?
- ): SafeFuture {
- val function = buildFinalizeBlobsFunction(
- aggregation,
- aggregationLastBlob,
- parentShnarf,
- parentL1RollingHash,
- parentL1RollingHashMessageNumber
- )
-
- return SafeFuture.of(
- helper.sendTransactionAsync(function, BigInteger.ZERO, gasPriceCaps)
- ).thenApply { result ->
- throwExceptionIfJsonRpcErrorReturned("eth_sendRawTransaction", result)
- result.transactionHash
- }
+ return getVersion()
+ .thenCompose { version ->
+ val function = buildFinalizeBlocksFunction(
+ version,
+ aggregation,
+ aggregationLastBlob,
+ parentShnarf,
+ parentL1RollingHash,
+ parentL1RollingHashMessageNumber
+ )
+ helper.sendTransactionAsync(function, BigInteger.ZERO, gasPriceCaps)
+ .thenApply { result ->
+ throwExceptionIfJsonRpcErrorReturned("eth_sendRawTransaction", result)
+ result.transactionHash
+ }
+ }
}
override fun finalizeBlocksEthCall(
@@ -203,30 +173,17 @@ class Web3JLineaRollupSmartContractClient internal constructor(
parentL1RollingHash: ByteArray,
parentL1RollingHashMessageNumber: Long
): SafeFuture {
- return finalizeBlocksEthCallV5(
- aggregation,
- aggregationLastBlob,
- parentShnarf,
- parentL1RollingHash,
- parentL1RollingHashMessageNumber
- )
- }
-
- private fun finalizeBlocksEthCallV5(
- aggregation: ProofToFinalize,
- aggregationLastBlob: BlobRecord,
- parentShnarf: ByteArray,
- parentL1RollingHash: ByteArray,
- parentL1RollingHashMessageNumber: Long
- ): SafeFuture {
- val function = buildFinalizeBlobsFunction(
- aggregation,
- aggregationLastBlob,
- parentShnarf,
- parentL1RollingHash,
- parentL1RollingHashMessageNumber
- )
-
- return helper.executeEthCall(function)
+ return getVersion()
+ .thenCompose { version ->
+ val function = buildFinalizeBlocksFunction(
+ version,
+ aggregation,
+ aggregationLastBlob,
+ parentShnarf,
+ parentL1RollingHash,
+ parentL1RollingHashMessageNumber
+ )
+ helper.executeEthCall(function)
+ }
}
}
diff --git a/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClientReadOnly.kt b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClientReadOnly.kt
index b13734da4..e68bdc0e3 100644
--- a/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClientReadOnly.kt
+++ b/coordinator/clients/smart-contract-client/src/main/kotlin/net/consensys/linea/contract/l1/Web3JLineaRollupSmartContractClientReadOnly.kt
@@ -1,5 +1,6 @@
package net.consensys.linea.contract.l1
+import build.linea.contract.LineaRollupV6
import net.consensys.encodeHex
import net.consensys.linea.BlockParameter
import net.consensys.linea.async.toSafeFuture
@@ -13,6 +14,8 @@ import org.apache.logging.log4j.Logger
import org.web3j.crypto.Credentials
import org.web3j.protocol.Web3j
import org.web3j.protocol.core.DefaultBlockParameter
+import org.web3j.tx.Contract
+import org.web3j.tx.exceptions.ContractCallException
import org.web3j.tx.gas.StaticGasProvider
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.math.BigInteger
@@ -34,41 +37,76 @@ open class Web3JLineaRollupSmartContractClientReadOnly(
) : LineaRollupSmartContractClientReadOnly {
protected fun contractClientAtBlock(blockParameter: BlockParameter): LineaRollup {
- return LineaRollup.load(
- contractAddress,
- web3j,
- fakeCredentials,
- StaticGasProvider(BigInteger.ZERO, BigInteger.ZERO)
- ).apply {
- this.setDefaultBlockParameter(blockParameter.toWeb3j())
- }
+ return contractClientAtBlock(blockParameter, LineaRollup::class.java)
+ }
+
+ protected fun contractClientAtBlock(blockParameter: BlockParameter, contract: Class): T {
+ @Suppress("UNCHECKED_CAST")
+ return when {
+ LineaRollupV6::class.java.isAssignableFrom(contract) -> LineaRollupV6.load(
+ contractAddress,
+ web3j,
+ fakeCredentials,
+ StaticGasProvider(BigInteger.ZERO, BigInteger.ZERO)
+ ).apply {
+ this.setDefaultBlockParameter(blockParameter.toWeb3j())
+ }
+
+ LineaRollup::class.java.isAssignableFrom(contract) -> LineaRollup.load(
+ contractAddress,
+ web3j,
+ fakeCredentials,
+ StaticGasProvider(BigInteger.ZERO, BigInteger.ZERO)
+ ).apply {
+ this.setDefaultBlockParameter(blockParameter.toWeb3j())
+ }
+
+ else -> throw IllegalArgumentException("Unsupported contract type: ${contract::class.java}")
+ } as T
}
protected val smartContractVersionCache: AtomicReference =
AtomicReference(fetchSmartContractVersion().get())
private fun getSmartContractVersion(): SafeFuture {
- return if (smartContractVersionCache.get() == LineaContractVersion.V5) {
+ return if (smartContractVersionCache.get() == LineaContractVersion.V6) {
// once upgraded, it's not downgraded
- SafeFuture.completedFuture(LineaContractVersion.V5)
+ SafeFuture.completedFuture(LineaContractVersion.V6)
} else {
- fetchSmartContractVersion().thenPeek { contractLatestVersion ->
- if (contractLatestVersion != smartContractVersionCache.get()) {
- log.info(
- "Smart contract upgraded: prevVersion={} upgradedVersion={}",
- smartContractVersionCache.get(),
- contractLatestVersion
- )
+ fetchSmartContractVersion()
+ .thenPeek { contractLatestVersion ->
+ if (contractLatestVersion != smartContractVersionCache.get()) {
+ log.info(
+ "Smart contract upgraded: prevVersion={} upgradedVersion={}",
+ smartContractVersionCache.get(),
+ contractLatestVersion
+ )
+ }
+ smartContractVersionCache.set(contractLatestVersion)
}
- smartContractVersionCache.set(contractLatestVersion)
- }
}
}
private fun fetchSmartContractVersion(): SafeFuture {
- // FIXME: this is a temporary solution to determine the smart contract version.
- // It should rely on events
- return SafeFuture.completedFuture(LineaContractVersion.V5)
+ return contractClientAtBlock(BlockParameter.Tag.LATEST, LineaRollupV6::class.java)
+ .CONTRACT_VERSION()
+ .sendAsync()
+ .toSafeFuture()
+ .thenApply { version ->
+ when {
+ version.startsWith("6") -> LineaContractVersion.V6
+ else -> throw IllegalStateException("Unsupported contract version: $version")
+ }
+ }
+ .exceptionallyCompose { error ->
+ if (error.cause is ContractCallException) {
+ // means that contract does not have CONTRACT_VERSION method available yet
+ // so it is still V5, so defaulting to V5
+ SafeFuture.completedFuture(LineaContractVersion.V5)
+ } else {
+ SafeFuture.failedFuture(error)
+ }
+ }
}
override fun getAddress(): String = contractAddress
@@ -94,11 +132,18 @@ open class Web3JLineaRollupSmartContractClientReadOnly(
return contractClientAtBlock(blockParameter).rollingHashes(messageNumber.toBigInteger()).sendAsync().toSafeFuture()
}
- override fun findBlobFinalBlockNumberByShnarf(blockParameter: BlockParameter, shnarf: ByteArray): SafeFuture {
- return contractClientAtBlock(blockParameter)
- .shnarfFinalBlockNumbers(shnarf).sendAsync()
- .thenApply { if (it == BigInteger.ZERO) null else it.toULong() }
- .toSafeFuture()
+ override fun isBlobShnarfPresent(blockParameter: BlockParameter, shnarf: ByteArray): SafeFuture {
+ return getVersion()
+ .thenCompose { version ->
+ if (version == LineaContractVersion.V5) {
+ contractClientAtBlock(blockParameter, LineaRollup::class.java).shnarfFinalBlockNumbers(shnarf)
+ } else {
+ contractClientAtBlock(blockParameter, LineaRollupV6::class.java).blobShnarfExists(shnarf)
+ }
+ .sendAsync()
+ .thenApply { it != BigInteger.ZERO }
+ .toSafeFuture()
+ }
}
override fun blockStateRootHash(blockParameter: BlockParameter, lineaL2BlockNumber: ULong): SafeFuture {
diff --git a/coordinator/clients/traces-generator-api-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1.kt b/coordinator/clients/traces-generator-api-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1.kt
index 08b16bc72..9f0f85516 100644
--- a/coordinator/clients/traces-generator-api-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1.kt
+++ b/coordinator/clients/traces-generator-api-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1.kt
@@ -4,6 +4,7 @@ import com.github.michaelbull.result.Result
import com.github.michaelbull.result.mapEither
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
+import net.consensys.encodeHex
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.errors.ErrorResponse
@@ -58,7 +59,7 @@ class TracesGeneratorJsonRpcClientV1(
mapOf(
"block" to mapOf(
"blockNumber" to block.number.toString(),
- "blockHash" to block.hash.toHexString()
+ "blockHash" to block.hash.encodeHex()
),
"rawExecutionTracesVersion" to config.rawExecutionTracesVersion,
"expectedTracesApiVersion" to config.expectedTracesApiVersion
@@ -92,7 +93,7 @@ class TracesGeneratorJsonRpcClientV1(
"blockNumber",
block.number.toString(),
"blockHash",
- block.hash.toHexString()
+ block.hash.encodeHex()
)
},
"rawExecutionTracesVersion" to config.rawExecutionTracesVersion,
diff --git a/coordinator/clients/traces-generator-api-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1Test.kt b/coordinator/clients/traces-generator-api-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1Test.kt
index 627f480a1..2575af469 100644
--- a/coordinator/clients/traces-generator-api-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1Test.kt
+++ b/coordinator/clients/traces-generator-api-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/TracesGeneratorJsonRpcClientV1Test.kt
@@ -17,6 +17,8 @@ import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import io.vertx.core.Vertx
import io.vertx.core.json.JsonObject
import io.vertx.junit5.VertxExtension
+import net.consensys.ByteArrayExt
+import net.consensys.encodeHex
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.async.get
import net.consensys.linea.errors.ErrorResponse
@@ -25,7 +27,6 @@ import net.consensys.linea.jsonrpc.client.RequestRetryConfig
import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.TracingModuleV1
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
@@ -137,7 +138,7 @@ class TracesGeneratorJsonRpcClientV1Test {
)
)
- val blockIdAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
resultFuture.get()
@@ -166,7 +167,7 @@ class TracesGeneratorJsonRpcClientV1Test {
mapOf(
"block" to mapOf(
"blockNumber" to "1",
- "blockHash" to blockIdAndHash.hash.toHexString()
+ "blockHash" to blockIdAndHash.hash.encodeHex()
),
"rawExecutionTracesVersion" to rawExecutionTracesVersion,
"expectedTracesApiVersion" to expectedTracesApiVersion
@@ -207,7 +208,7 @@ class TracesGeneratorJsonRpcClientV1Test {
)
)
- val blockIdAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
val exception = assertThrows { resultFuture.get() }
assertThat(exception.message).contains("missing modules: WCP")
@@ -241,7 +242,7 @@ class TracesGeneratorJsonRpcClientV1Test {
)
)
- val blockIdAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
val exception = assertThrows { resultFuture.get() }
assertThat(exception.message).contains("unsupported modules: NEW_EVM_MODULE")
@@ -280,9 +281,9 @@ class TracesGeneratorJsonRpcClientV1Test {
)
val blocks = listOf(
- BlockNumberAndHash(1U, Bytes32.random()),
- BlockNumberAndHash(2U, Bytes32.random()),
- BlockNumberAndHash(3U, Bytes32.random())
+ BlockNumberAndHash(1U, ByteArrayExt.random32()),
+ BlockNumberAndHash(2U, ByteArrayExt.random32()),
+ BlockNumberAndHash(3U, ByteArrayExt.random32())
)
val resultFuture =
@@ -309,7 +310,7 @@ class TracesGeneratorJsonRpcClientV1Test {
"blockNumber",
it.number.toString(),
"blockHash",
- it.hash.toHexString()
+ it.hash.encodeHex()
)
},
"rawExecutionTracesVersion",
@@ -340,7 +341,7 @@ class TracesGeneratorJsonRpcClientV1Test {
)
)
- val blockIdAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGetTracesCounters(blockIdAndHash)
resultFuture.get()
@@ -366,9 +367,9 @@ class TracesGeneratorJsonRpcClientV1Test {
)
val blocks = listOf(
- BlockNumberAndHash(1U, Bytes32.random()),
- BlockNumberAndHash(2U, Bytes32.random()),
- BlockNumberAndHash(3U, Bytes32.random())
+ BlockNumberAndHash(1U, ByteArrayExt.random32()),
+ BlockNumberAndHash(2U, ByteArrayExt.random32()),
+ BlockNumberAndHash(3U, ByteArrayExt.random32())
)
val resultFuture =
tracesGeneratorClient.rollupGenerateConflatedTracesToFile(blocks)
@@ -442,7 +443,7 @@ class TracesGeneratorJsonRpcClientV1Test {
)
)
- val blockIdAndHash = BlockNumberAndHash(1U, Bytes32.random())
+ val blockIdAndHash = BlockNumberAndHash(1U, ByteArrayExt.random32())
val resultFuture = tracesGeneratorClient.rollupGenerateConflatedTracesToFile(listOf(blockIdAndHash))
assertThat(resultFuture.get()).isInstanceOf(Ok::class.java)
diff --git a/coordinator/clients/type2-state-manager-client/build.gradle b/coordinator/clients/type2-state-manager-client/build.gradle
deleted file mode 100644
index cb2f4eeb7..000000000
--- a/coordinator/clients/type2-state-manager-client/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-plugins {
- id 'net.consensys.zkevm.kotlin-library-conventions'
-}
-
-dependencies {
- implementation project(':coordinator:core')
- implementation project(':jvm-libs:generic:json-rpc')
- implementation project(':jvm-libs:linea:metrics:micrometer')
- implementation project(':jvm-libs:generic:extensions:futures')
-
- implementation "tech.pegasys.teku.internal:bytes:${libs.versions.teku.get()}"
- implementation "io.vertx:vertx-core"
- implementation "io.vertx:vertx-web"
- implementation "io.vertx:vertx-lang-kotlin"
- implementation "com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}"
- implementation "com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}"
- implementation "com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}"
-
- testImplementation "io.vertx:vertx-junit5"
- testImplementation "com.github.tomakehurst:wiremock-jre8:${libs.versions.wiremock.get()}"
-}
diff --git a/coordinator/clients/type2-state-manager-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClient.kt b/coordinator/clients/type2-state-manager-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClient.kt
deleted file mode 100644
index 2e9b35903..000000000
--- a/coordinator/clients/type2-state-manager-client/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClient.kt
+++ /dev/null
@@ -1,135 +0,0 @@
-package net.consensys.zkevm.coordinator.clients
-
-import com.fasterxml.jackson.databind.node.ArrayNode
-import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
-import com.github.michaelbull.result.Result
-import com.github.michaelbull.result.mapEither
-import io.vertx.core.Vertx
-import io.vertx.core.json.JsonObject
-import net.consensys.linea.async.toSafeFuture
-import net.consensys.linea.errors.ErrorResponse
-import net.consensys.linea.jsonrpc.JsonRpcErrorResponse
-import net.consensys.linea.jsonrpc.JsonRpcRequestListParams
-import net.consensys.linea.jsonrpc.JsonRpcSuccessResponse
-import net.consensys.linea.jsonrpc.client.JsonRpcClient
-import net.consensys.linea.jsonrpc.client.JsonRpcRequestRetryer
-import net.consensys.linea.jsonrpc.client.RequestRetryConfig
-import net.consensys.zkevm.toULong
-import org.apache.logging.log4j.LogManager
-import org.apache.logging.log4j.Logger
-import org.apache.tuweni.bytes.Bytes32
-import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
-import java.util.concurrent.atomic.AtomicInteger
-
-class Type2StateManagerJsonRpcClient(
- private val rpcClient: JsonRpcClient,
- private val config: Config
-) : Type2StateManagerClient {
- private val log: Logger = LogManager.getLogger(this::class.java)
- private val objectMapper = jacksonObjectMapper()
- private var id = AtomicInteger(0)
-
- data class Config(
- val requestRetry: RequestRetryConfig,
- val zkStateManagerVersion: String
- )
-
- constructor(
- vertx: Vertx,
- rpcClient: JsonRpcClient,
- config: Config,
- retryConfig: RequestRetryConfig,
- log: Logger = LogManager.getLogger(Type2StateManagerJsonRpcClient::class.java)
- ) : this(
- rpcClient = JsonRpcRequestRetryer(
- vertx,
- rpcClient,
- config = JsonRpcRequestRetryer.Config(
- methodsToRetry = retryableMethods,
- requestRetry = retryConfig
- ),
- log = log
- ),
- config = config
- )
-
- override fun rollupGetZkEVMStateMerkleProof(
- startBlockNumber: UInt64,
- endBlockNumber: UInt64
- ): SafeFuture<
- Result>> {
- if (startBlockNumber > endBlockNumber) {
- throw IllegalArgumentException(
- "endBlockNumber must be greater than startBlockNumber: " +
- "startBlockNumber = $startBlockNumber endBlockNumber = $endBlockNumber"
- )
- }
-
- val jsonRequest =
- JsonRpcRequestListParams(
- "2.0",
- id.incrementAndGet(),
- "rollup_getZkEVMStateMerkleProofV0",
- listOf(
- JsonObject.of(
- "startBlockNumber",
- startBlockNumber.toULong().toLong(),
- "endBlockNumber",
- endBlockNumber.toULong().toLong(),
- "zkStateManagerVersion",
- config.zkStateManagerVersion
- )
- )
- )
-
- return rpcClient
- .makeRequest(jsonRequest).toSafeFuture()
- .thenApply { responseResult ->
- responseResult.mapEither(this::parseZkEVMStateMerkleProofResponse, this::mapErrorResponse)
- }
- }
-
- private fun mapErrorResponse(
- jsonRpcErrorResponse: JsonRpcErrorResponse
- ): ErrorResponse {
- val errorType =
- try {
- Type2StateManagerErrorType.valueOf(
- jsonRpcErrorResponse.error.message.substringBefore('-').trim()
- )
- } catch (_: Exception) {
- log.error(
- "State manager found unrecognised JSON-RPC response error: {}",
- jsonRpcErrorResponse.error
- )
- Type2StateManagerErrorType.UNKNOWN
- }
-
- return ErrorResponse(
- errorType,
- listOfNotNull(
- jsonRpcErrorResponse.error.message,
- jsonRpcErrorResponse.error.data?.toString()
- )
- .joinToString(": ")
- )
- }
-
- private fun parseZkEVMStateMerkleProofResponse(
- jsonRpcResponse: JsonRpcSuccessResponse
- ): GetZkEVMStateMerkleProofResponse {
- val json = objectMapper.readTree((jsonRpcResponse.result as JsonObject).toString())
-
- return GetZkEVMStateMerkleProofResponse(
- zkStateManagerVersion = json.get("zkStateManagerVersion").asText(),
- zkStateMerkleProof = json.get("zkStateMerkleProof") as ArrayNode,
- zkParentStateRootHash = Bytes32.fromHexString(json.get("zkParentStateRootHash").asText()),
- zkEndStateRootHash = Bytes32.fromHexString(json.get("zkEndStateRootHash").asText())
- )
- }
-
- companion object {
- internal val retryableMethods = setOf("rollup_getZkEVMStateMerkleProofV0")
- }
-}
diff --git a/coordinator/clients/type2-state-manager-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClientTest.kt b/coordinator/clients/type2-state-manager-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClientTest.kt
deleted file mode 100644
index a907b11cb..000000000
--- a/coordinator/clients/type2-state-manager-client/src/test/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerJsonRpcClientTest.kt
+++ /dev/null
@@ -1,257 +0,0 @@
-package net.consensys.zkevm.coordinator.clients
-
-import com.fasterxml.jackson.databind.node.ArrayNode
-import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
-import com.github.michaelbull.result.Err
-import com.github.michaelbull.result.Ok
-import com.github.tomakehurst.wiremock.WireMockServer
-import com.github.tomakehurst.wiremock.client.WireMock.containing
-import com.github.tomakehurst.wiremock.client.WireMock.ok
-import com.github.tomakehurst.wiremock.client.WireMock.post
-import com.github.tomakehurst.wiremock.core.WireMockConfiguration.options
-import io.micrometer.core.instrument.simple.SimpleMeterRegistry
-import io.vertx.core.Vertx
-import io.vertx.core.json.JsonObject
-import io.vertx.junit5.VertxExtension
-import net.consensys.linea.async.get
-import net.consensys.linea.errors.ErrorResponse
-import net.consensys.linea.jsonrpc.client.RequestRetryConfig
-import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
-import org.apache.tuweni.bytes.Bytes32
-import org.assertj.core.api.Assertions.assertThat
-import org.assertj.core.api.Assertions.assertThatIllegalArgumentException
-import org.junit.jupiter.api.AfterEach
-import org.junit.jupiter.api.BeforeEach
-import org.junit.jupiter.api.Test
-import org.junit.jupiter.api.extension.ExtendWith
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
-import java.net.URI
-import java.nio.file.Path
-import kotlin.time.Duration.Companion.milliseconds
-import kotlin.time.Duration.Companion.seconds
-
-@ExtendWith(VertxExtension::class)
-class Type2StateManagerJsonRpcClientTest {
- private lateinit var wiremock: WireMockServer
- private lateinit var type2StateManagerJsonRpcClient: Type2StateManagerJsonRpcClient
- private lateinit var meterRegistry: SimpleMeterRegistry
-
- private fun wiremockStubForPost(response: JsonObject) {
- wiremock.stubFor(
- post("/")
- .withHeader("Content-Type", containing("application/json"))
- .willReturn(
- ok()
- .withHeader("Content-type", "application/json")
- .withBody(response.toString().toByteArray())
- )
- )
- }
-
- @BeforeEach
- fun setup(vertx: Vertx) {
- wiremock = WireMockServer(options().dynamicPort())
- wiremock.start()
- meterRegistry = SimpleMeterRegistry()
-
- val rpcClientFactory = VertxHttpJsonRpcClientFactory(vertx, meterRegistry)
- val vertxHttpJsonRpcClient = rpcClientFactory.createWithRetries(
- URI("http://127.0.0.1:" + wiremock.port()).toURL(),
- methodsToRetry = Type2StateManagerJsonRpcClient.retryableMethods,
- retryConfig = RequestRetryConfig(
- maxRetries = 2u,
- timeout = 2.seconds,
- 10.milliseconds,
- 1u
- )
- )
- val clientConfig = Type2StateManagerJsonRpcClient.Config(
- requestRetry = RequestRetryConfig(
- maxRetries = 1u,
- backoffDelay = 10.milliseconds
- ),
- zkStateManagerVersion = "0.0.1-dev-3e607237"
- )
- type2StateManagerJsonRpcClient =
- Type2StateManagerJsonRpcClient(
- vertxHttpJsonRpcClient,
- clientConfig
- )
- }
-
- @AfterEach
- fun tearDown(vertx: Vertx) {
- val vertxStopFuture = vertx.close()
- wiremock.stop()
- vertxStopFuture.get()
- }
-
- @Test
- fun getZkEVMStateMerkleProof() {
- val testFilePath = "../../../testdata/type2state-manager/state-proof.json"
- val json = jacksonObjectMapper().readTree(Path.of(testFilePath).toFile())
- val zkStateManagerVersion = json.get("zkStateManagerVersion").asText()
- val zkStateMerkleProof = json.get("zkStateMerkleProof") as ArrayNode
- val zkParentStateRootHash = json.get("zkParentStateRootHash").asText()
- val zkEndStateRootHash = json.get("zkEndStateRootHash").asText()
- val startBlockNumber = 50L
- val endBlockNumber = 100L
-
- val response =
- JsonObject.of(
- "jsonrpc",
- "2.0",
- "id",
- "1",
- "result",
- mapOf(
- "zkParentStateRootHash" to zkParentStateRootHash,
- "zkEndStateRootHash" to zkEndStateRootHash,
- "zkStateMerkleProof" to zkStateMerkleProof,
- "zkStateManagerVersion" to zkStateManagerVersion
- )
- )
-
- wiremockStubForPost(response)
-
- val resultFuture =
- type2StateManagerJsonRpcClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber),
- UInt64.valueOf(endBlockNumber)
- )
- resultFuture.get()
-
- assertThat(resultFuture)
- .isCompletedWithValue(
- Ok(
- GetZkEVMStateMerkleProofResponse(
- zkStateManagerVersion = zkStateManagerVersion,
- zkStateMerkleProof = zkStateMerkleProof,
- zkParentStateRootHash = Bytes32.fromHexString(zkParentStateRootHash),
- zkEndStateRootHash = Bytes32.fromHexString(zkEndStateRootHash)
- )
- )
- )
- }
-
- @Test
- fun error_block_missing_getZkEVMStateMerkleProof() {
- val errorMessage = "BLOCK_MISSING_IN_CHAIN - block 1 is missing"
- val startBlockNumber = 50L
- val endBlockNumber = 100L
-
- val response =
- JsonObject.of(
- "jsonrpc",
- "2.0",
- "id",
- "1",
- "error",
- mapOf("code" to "-32600", "message" to errorMessage)
- )
-
- wiremockStubForPost(response)
-
- val resultFuture =
- type2StateManagerJsonRpcClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber),
- UInt64.valueOf(endBlockNumber)
- )
- resultFuture.get()
-
- assertThat(resultFuture)
- .isCompletedWithValue(
- Err(ErrorResponse(Type2StateManagerErrorType.BLOCK_MISSING_IN_CHAIN, errorMessage))
- )
- }
-
- @Test
- fun error_unsupported_version_getZkEVMStateMerkleProof() {
- val startBlockNumber = 50L
- val endBlockNumber = 100L
- val errorMessage = "UNSUPPORTED_VERSION"
- val errorData =
- mapOf(
- "requestedVersion" to "0.0.1-dev-3e607217",
- "supportedVersion" to "0.0.1-dev-3e607237"
- )
-
- val response =
- JsonObject.of(
- "jsonrpc",
- "2.0",
- "id",
- "1",
- "error",
- mapOf("code" to "-32602", "message" to errorMessage, "data" to errorData)
- )
-
- wiremockStubForPost(response)
-
- val resultFuture =
- type2StateManagerJsonRpcClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber),
- UInt64.valueOf(endBlockNumber)
- )
- resultFuture.get()
-
- assertThat(resultFuture)
- .isCompletedWithValue(
- Err(
- ErrorResponse(
- Type2StateManagerErrorType.UNSUPPORTED_VERSION,
- "$errorMessage: $errorData"
- )
- )
- )
- }
-
- @Test
- fun error_unknown_getZkEVMStateMerkleProof() {
- val startBlockNumber = 50L
- val endBlockNumber = 100L
- val errorMessage = "BRA_BRA_BRA_SOME_UNKNOWN_ERROR"
- val errorData = mapOf("xyz" to "1234", "abc" to 100L)
-
- val response =
- JsonObject.of(
- "jsonrpc",
- "2.0",
- "id",
- "1",
- "error",
- mapOf("code" to "-999", "message" to errorMessage, "data" to errorData)
- )
-
- wiremockStubForPost(response)
-
- val resultFuture =
- type2StateManagerJsonRpcClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber),
- UInt64.valueOf(endBlockNumber)
- )
- resultFuture.get()
-
- assertThat(resultFuture)
- .isCompletedWithValue(
- Err(ErrorResponse(Type2StateManagerErrorType.UNKNOWN, "$errorMessage: $errorData"))
- )
- }
-
- @Test
- fun error_invalid_start_end_block_number_getZkEVMStateMerkleProof() {
- val startBlockNumber = 100L
- val endBlockNumber = 50L
-
- assertThatIllegalArgumentException()
- .isThrownBy {
- val resultFuture =
- type2StateManagerJsonRpcClient.rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber),
- UInt64.valueOf(endBlockNumber)
- )
- resultFuture.get()
- }
- .withMessageContaining("endBlockNumber must be greater than startBlockNumber")
- }
-}
diff --git a/coordinator/core/build.gradle b/coordinator/core/build.gradle
index d4691ded3..12693c881 100644
--- a/coordinator/core/build.gradle
+++ b/coordinator/core/build.gradle
@@ -9,6 +9,7 @@ dependencies {
api project(':jvm-libs:linea:core:domain-models')
api project(':jvm-libs:linea:core:metrics')
api project(':jvm-libs:linea:core:long-running-service')
+ api project(':jvm-libs:linea:clients:linea-state-manager')
api project(':jvm-libs:linea:core:traces')
api project(':jvm-libs:generic:errors')
api project(':jvm-libs:generic:extensions:kotlin')
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BatchExecutionProverRequestResponse.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BatchExecutionProverRequestResponse.kt
index 900827efd..8ddd48320 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BatchExecutionProverRequestResponse.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BatchExecutionProverRequestResponse.kt
@@ -1,6 +1,7 @@
package net.consensys.zkevm.coordinator.clients
-import net.consensys.zkevm.domain.BlockInterval
+import build.linea.clients.GetZkEVMStateMerkleProofResponse
+import build.linea.domain.BlockInterval
import net.consensys.zkevm.toULong
import tech.pegasys.teku.ethereum.executionclient.schema.ExecutionPayloadV1
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BlobCompressionProverRequestResponse.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BlobCompressionProverRequestResponse.kt
index a2dc06ca6..2bf32a247 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BlobCompressionProverRequestResponse.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/BlobCompressionProverRequestResponse.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.coordinator.clients
-import net.consensys.zkevm.domain.BlockInterval
-import net.consensys.zkevm.domain.BlockIntervals
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.ethereum.coordination.blob.ShnarfResult
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerClient.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerClient.kt
deleted file mode 100644
index 37a16477b..000000000
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/Type2StateManagerClient.kt
+++ /dev/null
@@ -1,28 +0,0 @@
-package net.consensys.zkevm.coordinator.clients
-
-import com.fasterxml.jackson.databind.node.ArrayNode
-import com.github.michaelbull.result.Result
-import net.consensys.linea.errors.ErrorResponse
-import org.apache.tuweni.bytes.Bytes32
-import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
-
-enum class Type2StateManagerErrorType {
- UNKNOWN,
- UNSUPPORTED_VERSION,
- BLOCK_MISSING_IN_CHAIN
-}
-
-data class GetZkEVMStateMerkleProofResponse(
- val zkStateMerkleProof: ArrayNode,
- val zkParentStateRootHash: Bytes32,
- val zkEndStateRootHash: Bytes32,
- val zkStateManagerVersion: String
-)
-
-interface Type2StateManagerClient {
- fun rollupGetZkEVMStateMerkleProof(
- startBlockNumber: UInt64,
- endBlockNumber: UInt64
- ): SafeFuture>>
-}
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/smartcontract/LineaRollupSmartContractClient.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/smartcontract/LineaRollupSmartContractClient.kt
index 638029329..a319411b4 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/smartcontract/LineaRollupSmartContractClient.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/coordinator/clients/smartcontract/LineaRollupSmartContractClient.kt
@@ -7,7 +7,8 @@ import net.consensys.zkevm.ethereum.gaspricing.GasPriceCaps
import tech.pegasys.teku.infrastructure.async.SafeFuture
enum class LineaContractVersion : Comparable {
- V5 // "EIP4844 multiple blobs per tx support - version in all networks",
+ V5, // "EIP4844 multiple blobs per tx support - version in all networks"
+ V6 // more efficient data submission and new events for state recovery
}
interface LineaRollupSmartContractClientReadOnly : ContractVersionProvider {
@@ -30,12 +31,14 @@ interface LineaRollupSmartContractClientReadOnly : ContractVersionProvider
/**
- * Get the final block number of a shnarf
+ * Checks if a blob's shnarf is already present in the smart contract
+ * It meant blob was sent to l1 and accepted by the smart contract.
+ * Note: snarf in the future may be cleanned up after finalization.
*/
- fun findBlobFinalBlockNumberByShnarf(
+ fun isBlobShnarfPresent(
blockParameter: BlockParameter = BlockParameter.Tag.LATEST,
shnarf: ByteArray
- ): SafeFuture
+ ): SafeFuture
/**
* Gets Type 2 StateRootHash for Linea Block
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Aggregation.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Aggregation.kt
index 90f765de5..417ff331a 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Aggregation.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Aggregation.kt
@@ -1,8 +1,10 @@
package net.consensys.zkevm.domain
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
import kotlinx.datetime.Instant
-typealias BlobsToAggregate = BlockIntervalData
+typealias BlobsToAggregate = BlockInterval
/**
* Represents an Aggregation request to the Prover
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Blob.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Blob.kt
index aee335bf6..7bd83f835 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Blob.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Blob.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.domain
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
import kotlinx.datetime.Instant
import net.consensys.linea.CommonDomainFunctions
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Conflation.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Conflation.kt
index 70bf2dd47..2a3f6fd51 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Conflation.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/Conflation.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.domain
+import build.linea.domain.BlockInterval
import kotlinx.datetime.Instant
import net.consensys.isSortedBy
import net.consensys.linea.CommonDomainFunctions
@@ -7,50 +8,6 @@ import net.consensys.linea.traces.TracesCounters
import net.consensys.zkevm.toULong
import tech.pegasys.teku.ethereum.executionclient.schema.ExecutionPayloadV1
-/**
- * Represents a block interval, with inclusive start and end block numbers
- * @property startBlockNumber start block number, inclusive
- * @property endBlockNumber end block number, inclusive
- */
-interface BlockInterval {
- val startBlockNumber: ULong
- val endBlockNumber: ULong
- val blocksRange: ULongRange
- get() = startBlockNumber..endBlockNumber
- fun intervalString(): String = CommonDomainFunctions.blockIntervalString(startBlockNumber, endBlockNumber)
-
- companion object {
- fun between(
- startBlockNumber: ULong,
- endBlockNumber: ULong
- ): BlockInterval {
- return BlockIntervalData(startBlockNumber, endBlockNumber)
- }
- }
-}
-
-fun List.toBlockIntervalsString(): String {
- return this.joinToString(
- separator = ", ",
- prefix = "[",
- postfix = "]$size",
- transform = BlockInterval::intervalString
- )
-}
-
-fun List.filterOutWithEndBlockNumberBefore(
- endBlockNumberInclusive: ULong
-): List {
- return this.filter { int -> int.endBlockNumber > endBlockNumberInclusive }
-}
-
-fun assertConsecutiveIntervals(intervals: List) {
- require(intervals.isSortedBy { it.startBlockNumber }) { "Intervals must be sorted by startBlockNumber" }
- require(intervals.zipWithNext().all { (a, b) -> a.endBlockNumber + 1u == b.startBlockNumber }) {
- "Intervals must be consecutive: intervals=${intervals.toBlockIntervalsString()}"
- }
-}
-
data class BlocksConflation(
val blocks: List,
val conflationResult: ConflationCalculationResult
@@ -148,52 +105,3 @@ data class BlockCounters(
"blockRLPEncoded=${blockRLPEncoded.size}bytes)"
}
}
-
-/**
- * Represents a block interval
- * @property startBlockNumber starting block number inclusive
- * @property endBlockNumber ending block number inclusive
- */
-data class BlockIntervalData(
- override val startBlockNumber: ULong,
- override val endBlockNumber: ULong
-) : BlockInterval
-
-/**
- * Data class that represents sequential blocks intervals for either Conflations, Blobs or Aggregations.
- * Example:
- * conflations: [100..110], [111..120], [121..130] --> BlockIntervals(100, [110, 120, 130])
- * Blobs with
- * Blob1 2 conflations above: [100..110], [111..120]
- * Blob2 1 conflations: [121..130]
- * --> BlockIntervals(100, [120, 130])
- */
-data class BlockIntervals(
- val startingBlockNumber: ULong,
- val upperBoundaries: List
-) {
- // This default constructor is to avoid the parse error when deserializing
- constructor() : this(0UL, listOf())
-
- fun toIntervalList(): List {
- var previousBlockNumber = startingBlockNumber
- val intervals = mutableListOf()
- upperBoundaries.forEach {
- intervals.add(BlockIntervalData(previousBlockNumber, it))
- previousBlockNumber = it + 1u
- }
- return intervals
- }
-
- fun toBlockInterval(): BlockInterval {
- return BlockIntervalData(startingBlockNumber, upperBoundaries.last())
- }
-}
-
-fun List.toBlockIntervals(): BlockIntervals {
- require(isNotEmpty()) { "BlockIntervals list must not be empty" }
- return BlockIntervals(
- startingBlockNumber = first().startBlockNumber,
- upperBoundaries = map { it.endBlockNumber }
- )
-}
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/ProofIndex.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/ProofIndex.kt
index e6130f88c..bdd1b408b 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/ProofIndex.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/domain/ProofIndex.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.domain
+import build.linea.domain.BlockInterval
+
data class ProofIndex(
override val startBlockNumber: ULong,
override val endBlockNumber: ULong,
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByTargetBlockNumbers.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByTargetBlockNumbers.kt
index 0c711fde6..865f60b62 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByTargetBlockNumbers.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByTargetBlockNumbers.kt
@@ -1,8 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.aggregation
+import build.linea.domain.BlockInterval
import net.consensys.zkevm.domain.BlobCounters
import net.consensys.zkevm.domain.BlobsToAggregate
-import net.consensys.zkevm.domain.BlockInterval
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorService.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorService.kt
index f093b6b37..f12a90350 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorService.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorService.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.aggregation
+import build.linea.domain.BlockIntervals
+import build.linea.domain.toBlockIntervalsString
import io.vertx.core.Vertx
import kotlinx.datetime.Clock
import net.consensys.linea.metrics.MetricsFacade
@@ -10,10 +12,8 @@ import net.consensys.zkevm.coordinator.clients.ProofAggregationProverClientV2
import net.consensys.zkevm.domain.Aggregation
import net.consensys.zkevm.domain.BlobAndBatchCounters
import net.consensys.zkevm.domain.BlobsToAggregate
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ProofIndex
import net.consensys.zkevm.domain.ProofsToAggregate
-import net.consensys.zkevm.domain.toBlockIntervalsString
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
import net.consensys.zkevm.persistence.AggregationsRepository
import org.apache.logging.log4j.LogManager
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinator.kt
index bda75ba7a..ae05a29cb 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinator.kt
@@ -1,5 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
+import build.linea.domain.toBlockIntervalsString
import io.vertx.core.Handler
import io.vertx.core.Vertx
import kotlinx.datetime.Instant
@@ -11,10 +14,7 @@ import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
import net.consensys.zkevm.domain.Blob
import net.consensys.zkevm.domain.BlobRecord
import net.consensys.zkevm.domain.BlobStatus
-import net.consensys.zkevm.domain.BlockInterval
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ConflationCalculationResult
-import net.consensys.zkevm.domain.toBlockIntervalsString
import net.consensys.zkevm.ethereum.coordination.conflation.BlobCreationHandler
import net.consensys.zkevm.persistence.BlobsRepository
import org.apache.logging.log4j.LogManager
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofHandler.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofHandler.kt
index 62c7426e2..9b95f5de9 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofHandler.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofHandler.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.domain.BlockInterval
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
-import net.consensys.zkevm.domain.BlockInterval
import tech.pegasys.teku.infrastructure.async.SafeFuture
data class BlobCompressionProofUpdate(
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobShnarfCalulator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobShnarfCalulator.kt
index 27344a3eb..9350bea78 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobShnarfCalulator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobShnarfCalulator.kt
@@ -1,11 +1,11 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.domain.BlockIntervals
import net.consensys.decodeHex
import net.consensys.encodeHex
import net.consensys.linea.blob.GoNativeBlobShnarfCalculator
import net.consensys.linea.blob.GoNativeShnarfCalculatorFactory
import net.consensys.linea.blob.ShnarfCalculatorVersion
-import net.consensys.zkevm.domain.BlockIntervals
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import java.util.Base64
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculator.kt
index 62d44bd55..954743e0f 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculator.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
import com.github.michaelbull.result.getOrThrow
import com.github.michaelbull.result.map
import com.github.michaelbull.result.onSuccess
@@ -7,8 +9,6 @@ import com.github.michaelbull.result.recover
import com.github.michaelbull.result.runCatching
import net.consensys.encodeHex
import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.domain.BlockInterval
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.persistence.BlobsRepository
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/SafeBlockProvider.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/SafeBlockProvider.kt
index 86a5390dc..88f0ed6ba 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/SafeBlockProvider.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/blockcreation/SafeBlockProvider.kt
@@ -2,15 +2,38 @@ package net.consensys.zkevm.ethereum.coordination.blockcreation
import kotlinx.datetime.Instant
import net.consensys.zkevm.toULong
-import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.ethereum.executionclient.schema.ExecutionPayloadV1
import tech.pegasys.teku.infrastructure.async.SafeFuture
data class BlockHeaderSummary(
val number: ULong,
- val hash: Bytes32,
+ val hash: ByteArray,
val timestamp: Instant
-)
+) {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as BlockHeaderSummary
+
+ if (number != other.number) return false
+ if (!hash.contentEquals(other.hash)) return false
+ if (timestamp != other.timestamp) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = number.hashCode()
+ result = 31 * result + hash.contentHashCode()
+ result = 31 * result + timestamp.hashCode()
+ return result
+ }
+
+ override fun toString(): String {
+ return "BlockHeaderSummary(number=$number, hash=${hash.contentToString()}, timestamp=$timestamp)"
+ }
+}
interface SafeBlockProvider {
fun getLatestSafeBlock(): SafeFuture
@@ -18,7 +41,7 @@ interface SafeBlockProvider {
return getLatestSafeBlock().thenApply {
BlockHeaderSummary(
it.blockNumber.toULong(),
- it.blockHash,
+ it.blockHash.toArray(),
Instant.fromEpochSeconds(it.timestamp.longValue())
)
}
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinator.kt
index c390c7f8e..8f990dd5e 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinator.kt
@@ -38,7 +38,7 @@ class BlockToBatchSubmissionCoordinator(
.rollupGetTracesCounters(
BlockNumberAndHash(
blockEvent.executionPayload.blockNumber.toULong(),
- blockEvent.executionPayload.blockHash
+ blockEvent.executionPayload.blockHash.toArray()
)
)
.thenCompose { result ->
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculator.kt
index faf5f3318..912ee571d 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculator.kt
@@ -1,11 +1,11 @@
package net.consensys.zkevm.ethereum.coordination.conflation
+import build.linea.domain.toBlockIntervalsString
import net.consensys.linea.CommonDomainFunctions.blockIntervalString
import net.consensys.zkevm.domain.Blob
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.domain.ConflationTrigger
-import net.consensys.zkevm.domain.toBlockIntervalsString
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ProofGeneratingConflationHandlerImpl.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ProofGeneratingConflationHandlerImpl.kt
index a5bb3dd56..01539ca61 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ProofGeneratingConflationHandlerImpl.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ProofGeneratingConflationHandlerImpl.kt
@@ -58,7 +58,7 @@ class ProofGeneratingConflationHandlerImpl(
private fun conflationToProofCreation(conflation: BlocksConflation): SafeFuture<*> {
val blockNumbersAndHash = conflation.blocks.map {
- BlockNumberAndHash(it.blockNumber.toULong(), it.blockHash)
+ BlockNumberAndHash(it.blockNumber.toULong(), it.blockHash.toArray())
}
val blockIntervalString = conflation.conflationResult.intervalString()
return tracesProductionCoordinator
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinator.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinator.kt
index 8a70fd554..2d37ad07d 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinator.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinator.kt
@@ -1,8 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.conflation
+import build.linea.clients.GetZkEVMStateMerkleProofResponse
import net.consensys.linea.BlockNumberAndHash
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
import tech.pegasys.teku.infrastructure.async.SafeFuture
data class BlocksTracesConflated(
diff --git a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImpl.kt b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImpl.kt
index 4e45ccef9..5ac363dea 100644
--- a/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImpl.kt
+++ b/coordinator/core/src/main/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImpl.kt
@@ -1,5 +1,9 @@
package net.consensys.zkevm.ethereum.coordination.conflation
+import build.linea.clients.GetStateMerkleProofRequest
+import build.linea.clients.GetZkEVMStateMerkleProofResponse
+import build.linea.clients.StateManagerClientV1
+import build.linea.domain.BlockInterval
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.Result
@@ -9,19 +13,15 @@ import com.github.michaelbull.result.mapBoth
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.errors.ErrorResponse
import net.consensys.zkevm.coordinator.clients.GenerateTracesResponse
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
import net.consensys.zkevm.coordinator.clients.TracesConflationClientV1
import net.consensys.zkevm.coordinator.clients.TracesServiceErrorType
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerClient
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerErrorType
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
import tech.pegasys.teku.infrastructure.async.SafeFuture
-import tech.pegasys.teku.infrastructure.unsigned.UInt64
class TracesConflationCoordinatorImpl(
private val tracesConflationClient: TracesConflationClientV1,
- private val zkStateClient: Type2StateManagerClient
+ private val zkStateClient: StateManagerClientV1
) : TracesConflationCoordinator {
private val log: Logger = LogManager.getLogger(this::class.java)
private fun requestConflatedTraces(
@@ -45,22 +45,7 @@ class TracesConflationCoordinatorImpl(
startBlockNumber: ULong,
endBlockNumber: ULong
): SafeFuture {
- return zkStateClient
- .rollupGetZkEVMStateMerkleProof(
- UInt64.valueOf(startBlockNumber.toLong()),
- UInt64.valueOf(endBlockNumber.toLong())
- )
- .thenCompose { result:
- Result>
- ->
- result.mapBoth(
- { SafeFuture.completedFuture(it) },
- {
- log.error("Type2State manager returned error={}", it)
- SafeFuture.failedFuture(it.asException("State manager error: ${it.message}"))
- }
- )
- }
+ return zkStateClient.makeRequest(GetStateMerkleProofRequest(BlockInterval(startBlockNumber, endBlockNumber)))
}
override fun conflateExecutionTraces(
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/coordination/blob/GoBackedCalculateShnarfCalculatorTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/coordination/blob/GoBackedCalculateShnarfCalculatorTest.kt
index b7eb3890e..b1db17fc0 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/coordination/blob/GoBackedCalculateShnarfCalculatorTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/coordination/blob/GoBackedCalculateShnarfCalculatorTest.kt
@@ -1,10 +1,10 @@
package net.consensys.zkevm.coordination.blob
+import build.linea.domain.BlockIntervals
import net.consensys.decodeHex
import net.consensys.encodeHex
import net.consensys.linea.blob.CalculateShnarfResult
import net.consensys.linea.blob.GoNativeBlobShnarfCalculator
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.ethereum.coordination.blob.GoBackedBlobShnarfCalculator
import net.consensys.zkevm.ethereum.coordination.blob.ShnarfResult
import org.apache.tuweni.bytes.Bytes32
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByDeadlineTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByDeadlineTest.kt
index c867d5bcf..7bc2b05a0 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByDeadlineTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/AggregationTriggerCalculatorByDeadlineTest.kt
@@ -2,11 +2,11 @@ package net.consensys.zkevm.ethereum.coordination.aggregation
import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.zkevm.domain.BlobCounters
import net.consensys.zkevm.domain.BlobsToAggregate
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockHeaderSummary
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.mockito.kotlin.mock
@@ -36,7 +36,7 @@ class AggregationTriggerCalculatorByDeadlineTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = latestBlockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = latestBlockTimestamp
)
)
@@ -184,7 +184,7 @@ class AggregationTriggerCalculatorByDeadlineTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = 15u,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = firstBlobEndBlockTimeStamp
)
)
@@ -217,7 +217,7 @@ class AggregationTriggerCalculatorByDeadlineTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = latestBlockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = latestBlockTimestamp
)
)
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/GlobalAggregationCalculatorTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/GlobalAggregationCalculatorTest.kt
index b4d8efa51..b75be3890 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/GlobalAggregationCalculatorTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/GlobalAggregationCalculatorTest.kt
@@ -2,6 +2,7 @@ package net.consensys.zkevm.ethereum.coordination.aggregation
import io.micrometer.core.instrument.simple.SimpleMeterRegistry
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.FakeFixedClock
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.metrics.micrometer.MicrometerMetricsFacade
@@ -9,7 +10,6 @@ import net.consensys.zkevm.domain.BlobCounters
import net.consensys.zkevm.domain.BlobsToAggregate
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockHeaderSummary
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
@@ -112,7 +112,7 @@ class GlobalAggregationCalculatorTest {
BlockHeaderSummary(
number = blockNumber,
timestamp = timestamp,
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
)
@@ -319,7 +319,7 @@ class GlobalAggregationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = blob2.endBlockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = blob2.endBlockTimestamp
)
)
@@ -429,7 +429,7 @@ class GlobalAggregationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = 30u,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = Instant.fromEpochMilliseconds(250)
)
)
@@ -497,7 +497,7 @@ class GlobalAggregationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = firstBlobEndBlockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = firstBlobEndBlockTimeStamp
)
)
@@ -521,7 +521,7 @@ class GlobalAggregationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = secondBlobEndBlockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = secondBlobEndTimestamp
)
)
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorServiceTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorServiceTest.kt
index 7782b7ba7..4698897bd 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorServiceTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/aggregation/ProofAggregationCoordinatorServiceTest.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.ethereum.coordination.aggregation
+import build.linea.domain.BlockIntervals
import io.vertx.core.Vertx
import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
@@ -9,7 +10,6 @@ import net.consensys.zkevm.domain.Aggregation
import net.consensys.zkevm.domain.BlobAndBatchCounters
import net.consensys.zkevm.domain.BlobCounters
import net.consensys.zkevm.domain.BlobsToAggregate
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ProofIndex
import net.consensys.zkevm.domain.ProofToFinalize
import net.consensys.zkevm.domain.ProofsToAggregate
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculatorTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculatorTest.kt
index 29db3eed4..cb7faf0a0 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculatorTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/blob/RollingBlobShnarfCalculatorTest.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.domain.BlockIntervals
import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.persistence.BlobsRepository
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinatorTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinatorTest.kt
index e2fe654d3..17451a100 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinatorTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/BlockToBatchSubmissionCoordinatorTest.kt
@@ -24,7 +24,7 @@ import org.mockito.kotlin.verify
import org.mockito.kotlin.whenever
import tech.pegasys.teku.ethereum.executionclient.schema.randomExecutionPayload
import tech.pegasys.teku.infrastructure.async.SafeFuture
-import kotlin.time.Duration.Companion.milliseconds
+import kotlin.time.Duration.Companion.seconds
import kotlin.time.toJavaDuration
@ExtendWith(VertxExtension::class)
@@ -49,7 +49,7 @@ class BlockToBatchSubmissionCoordinatorTest {
it.rollupGetTracesCounters(
BlockNumberAndHash(
randomExecutionPayload.blockNumber.toULong(),
- randomExecutionPayload.blockHash
+ randomExecutionPayload.blockHash.toArray()
)
)
).thenReturn(
@@ -79,7 +79,7 @@ class BlockToBatchSubmissionCoordinatorTest {
val captor = argumentCaptor()
Assertions.assertThat(blockToBatchSubmissionCoordinator.acceptBlock(baseBlock)).isCompleted
- Awaitility.await().atMost(200.milliseconds.toJavaDuration())
+ Awaitility.await().atMost(1.seconds.toJavaDuration())
.untilAsserted {
verify(testLogger, times(1)).error(
eq("Failed to conflate block={} errorMessage={}"),
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationCalculatorByTimeDeadlineTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationCalculatorByTimeDeadlineTest.kt
index 598fef472..abd7c6476 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationCalculatorByTimeDeadlineTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationCalculatorByTimeDeadlineTest.kt
@@ -2,12 +2,12 @@ package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.linea.traces.fakeTracesCountersV1
import net.consensys.zkevm.domain.BlockCounters
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockHeaderSummary
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
import org.apache.logging.log4j.Logger
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.awaitility.Awaitility.await
import org.junit.jupiter.api.BeforeEach
@@ -39,7 +39,7 @@ class ConflationCalculatorByTimeDeadlineTest {
BlockHeaderSummary(
number = 1u,
timestamp = blockTimestamp,
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
}
@@ -96,7 +96,7 @@ class ConflationCalculatorByTimeDeadlineTest {
BlockHeaderSummary(
number = 3u,
timestamp = block2Timestamp.plus(config.conflationDeadline).plus(5.seconds),
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
)
@@ -117,7 +117,7 @@ class ConflationCalculatorByTimeDeadlineTest {
BlockHeaderSummary(
number = 2u,
timestamp = block2Timestamp,
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
)
@@ -148,7 +148,7 @@ class ConflationCalculatorByTimeDeadlineTest {
BlockHeaderSummary(
number = 2u,
timestamp = block2Timestamp,
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
)
@@ -171,7 +171,7 @@ class ConflationCalculatorByTimeDeadlineTest {
BlockHeaderSummary(
number = 1u,
timestamp = blockTimestamp,
- hash = Bytes32.random()
+ hash = ByteArrayExt.random32()
)
)
)
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationServiceImplTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationServiceImplTest.kt
index 48b440e9e..13e6428d7 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationServiceImplTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/ConflationServiceImplTest.kt
@@ -43,9 +43,9 @@ class ConflationServiceImplTest {
@Test
fun `emits event with blocks when calculator emits conflation`() {
- val payload1 = executionPayloadV1(blockNumber = 1)
- val payload2 = executionPayloadV1(blockNumber = 2)
- val payload3 = executionPayloadV1(blockNumber = 3)
+ val payload1 = executionPayloadV1(blockNumber = 1, gasLimit = 20_000_000UL)
+ val payload2 = executionPayloadV1(blockNumber = 2, gasLimit = 20_000_000UL)
+ val payload3 = executionPayloadV1(blockNumber = 3, gasLimit = 20_000_000UL)
val payload1Time = Instant.parse("2021-01-01T00:00:00Z")
val payloadCounters1 = BlockCounters(
blockNumber = 1UL,
@@ -100,7 +100,7 @@ class ConflationServiceImplTest {
val moduleTracesCounter = 10u
assertThat(numberOfBlocks % numberOfThreads).isEqualTo(0)
val expectedConflations = numberOfBlocks / conflationBlockLimit.toInt() - 1
- val blocks = (1..numberOfBlocks).map { executionPayloadV1(blockNumber = it.toLong()) }
+ val blocks = (1..numberOfBlocks).map { executionPayloadV1(blockNumber = it.toLong(), gasLimit = 20_000_000UL) }
val fixedTracesCounters = fakeTracesCountersV1(moduleTracesCounter)
val blockTime = Instant.parse("2021-01-01T00:00:00Z")
val conflationEvents = mutableListOf()
@@ -152,7 +152,7 @@ class ConflationServiceImplTest {
val failingConflationCalculator: TracesConflationCalculator = mock()
whenever(failingConflationCalculator.newBlock(any())).thenThrow(expectedException)
conflationService = ConflationServiceImpl(failingConflationCalculator, mock(defaultAnswer = RETURNS_DEEP_STUBS))
- val block = executionPayloadV1(blockNumber = 1)
+ val block = executionPayloadV1(blockNumber = 1, gasLimit = 20_000_000UL)
assertThatThrownBy {
conflationService.newBlock(
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculatorTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculatorTest.kt
index 18cd80b62..8d9fd5adc 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculatorTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlobAwareConflationCalculatorTest.kt
@@ -1,6 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.FakeFixedClock
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.linea.traces.fakeTracesCountersV1
@@ -12,7 +13,6 @@ import net.consensys.zkevm.ethereum.coordination.blob.BlobCompressor
import net.consensys.zkevm.ethereum.coordination.blob.FakeBlobCompressor
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockHeaderSummary
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
@@ -357,7 +357,7 @@ class GlobalBlobAwareConflationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = block5Counters.blockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = block5Counters.blockTimestamp
)
)
@@ -903,7 +903,7 @@ class GlobalBlobAwareConflationCalculatorTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = block5Counters.blockNumber,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = block5Counters.blockTimestamp
)
)
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlockConflationCalculatorIntTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlockConflationCalculatorIntTest.kt
index db9825f5c..63cd0c0ce 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlockConflationCalculatorIntTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/GlobalBlockConflationCalculatorIntTest.kt
@@ -1,6 +1,7 @@
package net.consensys.zkevm.ethereum.coordination.conflation
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.FakeFixedClock
import net.consensys.linea.metrics.MetricsFacade
import net.consensys.linea.traces.TracesCountersV1
@@ -11,7 +12,6 @@ import net.consensys.zkevm.domain.ConflationTrigger
import net.consensys.zkevm.ethereum.coordination.blob.FakeBlobCompressor
import net.consensys.zkevm.ethereum.coordination.blockcreation.BlockHeaderSummary
import net.consensys.zkevm.ethereum.coordination.blockcreation.SafeBlockProvider
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
@@ -277,7 +277,7 @@ class GlobalBlockConflationCalculatorIntTest {
SafeFuture.completedFuture(
BlockHeaderSummary(
number = 7uL,
- hash = Bytes32.random(),
+ hash = ByteArrayExt.random32(),
timestamp = block7Counters.blockTimestamp
)
)
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImplTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImplTest.kt
index 6634fcd26..2efc77cde 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImplTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/TracesConflationCoordinatorImplTest.kt
@@ -3,8 +3,8 @@ package net.consensys.zkevm.ethereum.coordination.conflation
import com.github.michaelbull.result.Err
import com.github.michaelbull.result.Ok
import com.github.michaelbull.result.getError
+import net.consensys.ByteArrayExt
import net.consensys.linea.BlockNumberAndHash
-import org.apache.tuweni.bytes.Bytes32
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
@@ -20,12 +20,12 @@ class TracesConflationCoordinatorImplTest {
@Test fun `assertBlocksList return error when there is gap in block numbers`() {
val blocks = listOf(
- BlockNumberAndHash(15u, Bytes32.random()),
- BlockNumberAndHash(14u, Bytes32.random()),
+ BlockNumberAndHash(15u, ByteArrayExt.random32()),
+ BlockNumberAndHash(14u, ByteArrayExt.random32()),
// gap on 13
- BlockNumberAndHash(12u, Bytes32.random()),
- BlockNumberAndHash(11u, Bytes32.random()),
- BlockNumberAndHash(10u, Bytes32.random())
+ BlockNumberAndHash(12u, ByteArrayExt.random32()),
+ BlockNumberAndHash(11u, ByteArrayExt.random32()),
+ BlockNumberAndHash(10u, ByteArrayExt.random32())
)
assertBlocksList(blocks).let { result ->
assertThat(result).isInstanceOf(Err::class.java)
@@ -36,10 +36,10 @@ class TracesConflationCoordinatorImplTest {
@Test fun `assertBlocksList returns sorted list when all blocks are consecutive`() {
val blocks = listOf(
- BlockNumberAndHash(13u, Bytes32.random()),
- BlockNumberAndHash(12u, Bytes32.random()),
- BlockNumberAndHash(11u, Bytes32.random()),
- BlockNumberAndHash(10u, Bytes32.random())
+ BlockNumberAndHash(13u, ByteArrayExt.random32()),
+ BlockNumberAndHash(12u, ByteArrayExt.random32()),
+ BlockNumberAndHash(11u, ByteArrayExt.random32()),
+ BlockNumberAndHash(10u, ByteArrayExt.random32())
)
assertThat(assertBlocksList(blocks)).isEqualTo(Ok(blocks.sortedBy { it.number }))
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/upgrade/SwitchAwareConflationHandlerTest.kt b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/upgrade/SwitchAwareConflationHandlerTest.kt
index d3f32cc29..fe15ed83a 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/upgrade/SwitchAwareConflationHandlerTest.kt
+++ b/coordinator/core/src/test/kotlin/net/consensys/zkevm/ethereum/coordination/conflation/upgrade/SwitchAwareConflationHandlerTest.kt
@@ -29,7 +29,7 @@ class SwitchAwareConflationHandlerTest {
private fun generateArbitraryConflation(startBlockNumber: ULong, blocksLong: UInt): BlocksConflation {
val executionPayloads = (startBlockNumber..startBlockNumber + blocksLong).map {
- executionPayloadV1(blockNumber = it.toLong())
+ executionPayloadV1(blockNumber = it.toLong(), gasLimit = 20_000_000UL)
}
val conflationCalculationResult = ConflationCalculationResult(
diff --git a/coordinator/core/src/testFixtures/kotlin/net/consensys/zkevm/domain/BlobRecord.kt b/coordinator/core/src/testFixtures/kotlin/net/consensys/zkevm/domain/BlobRecord.kt
index 97c7c4b8d..226309458 100644
--- a/coordinator/core/src/testFixtures/kotlin/net/consensys/zkevm/domain/BlobRecord.kt
+++ b/coordinator/core/src/testFixtures/kotlin/net/consensys/zkevm/domain/BlobRecord.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.domain
+import build.linea.domain.BlockIntervals
import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
import net.consensys.linea.blob.ShnarfCalculatorVersion
diff --git a/coordinator/ethereum/blob-submitter/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/finalization/BlobAndAggregationFinalizationIntTest.kt b/coordinator/ethereum/blob-submitter/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/finalization/BlobAndAggregationFinalizationIntTest.kt
index 305e06cb6..32e93d859 100644
--- a/coordinator/ethereum/blob-submitter/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/finalization/BlobAndAggregationFinalizationIntTest.kt
+++ b/coordinator/ethereum/blob-submitter/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/finalization/BlobAndAggregationFinalizationIntTest.kt
@@ -61,12 +61,12 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
vertx: Vertx,
smartContractVersion: LineaContractVersion
) {
- if (smartContractVersion != LineaContractVersion.V5) {
+ if (listOf(LineaContractVersion.V5, LineaContractVersion.V6).contains(smartContractVersion).not()) {
// V6 with prover V3 is soon comming, so we will need to update/extend this test setup
- throw IllegalArgumentException("Only V5 contract version is supported")
+ throw IllegalArgumentException("unsupported contract version=$smartContractVersion!")
}
val rollupDeploymentFuture = ContractsManager.get()
- .deployLineaRollup(numberOfOperators = 2, contractVersion = LineaContractVersion.V5)
+ .deployLineaRollup(numberOfOperators = 2, contractVersion = smartContractVersion)
// load files from FS while smc deploy
loadBlobsAndAggregations(
blobsResponsesDir = "$testDataDir/compression/responses",
@@ -90,10 +90,10 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
)
aggregationsRepository = AggregationsRepositoryImpl(PostgresAggregationsDao(sqlClient, fakeClock))
- val lineaRollupContractForDataSubmissionV4 = rollupDeploymentResult.rollupOperatorClient
+ val lineaRollupContractForDataSubmissionV5 = rollupDeploymentResult.rollupOperatorClient
@Suppress("DEPRECATION")
- val alreadySubmittedBlobFilter = L1ShnarfBasedAlreadySubmittedBlobsFilter(lineaRollupContractForDataSubmissionV4)
+ val alreadySubmittedBlobFilter = L1ShnarfBasedAlreadySubmittedBlobsFilter(lineaRollupContractForDataSubmissionV5)
blobSubmissionCoordinator = run {
BlobSubmissionCoordinator.create(
@@ -105,7 +105,7 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
),
blobsRepository = blobsRepository,
aggregationsRepository = aggregationsRepository,
- lineaSmartContractClient = lineaRollupContractForDataSubmissionV4,
+ lineaSmartContractClient = lineaRollupContractForDataSubmissionV5,
alreadySubmittedBlobsFilter = alreadySubmittedBlobFilter,
gasPriceCapProvider = FakeGasPriceCapProvider(),
vertx = vertx,
@@ -115,9 +115,10 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
aggregationFinalizationCoordinator = run {
lineaRollupContractForAggregationSubmission = MakeFileDelegatedContractsManager
- .connectToLineaRollupContractV5(
+ .connectToLineaRollupContract(
rollupDeploymentResult.contractAddress,
rollupDeploymentResult.rollupOperators[1].txManager
+
)
val aggregationSubmitter = AggregationSubmitterImpl(
@@ -141,15 +142,6 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
}
}
- @Test
- @Timeout(3, timeUnit = TimeUnit.MINUTES)
- fun `submission works with contract V5`(
- vertx: Vertx,
- testContext: VertxTestContext
- ) {
- testSubmission(vertx, testContext, LineaContractVersion.V5)
- }
-
private fun testSubmission(
vertx: Vertx,
testContext: VertxTestContext,
@@ -180,4 +172,22 @@ class BlobAndAggregationFinalizationIntTest : CleanDbTestSuiteParallel() {
testContext.completeNow()
}.whenException(testContext::failNow)
}
+
+ @Test
+ @Timeout(3, timeUnit = TimeUnit.MINUTES)
+ fun `submission works with contract V5`(
+ vertx: Vertx,
+ testContext: VertxTestContext
+ ) {
+ testSubmission(vertx, testContext, LineaContractVersion.V5)
+ }
+
+ @Test
+ @Timeout(3, timeUnit = TimeUnit.MINUTES)
+ fun `submission works with contract V6`(
+ vertx: Vertx,
+ testContext: VertxTestContext
+ ) {
+ testSubmission(vertx, testContext, LineaContractVersion.V6)
+ }
}
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinator.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinator.kt
index 8ea0e8c3b..29d63f93d 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinator.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinator.kt
@@ -1,5 +1,8 @@
package net.consensys.zkevm.ethereum.submission
+import build.linea.domain.filterOutWithEndBlockNumberBefore
+import build.linea.domain.toBlockIntervals
+import build.linea.domain.toBlockIntervalsString
import io.vertx.core.Vertx
import kotlinx.datetime.Clock
import net.consensys.linea.async.AsyncFilter
@@ -9,9 +12,6 @@ import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartCon
import net.consensys.zkevm.domain.Aggregation
import net.consensys.zkevm.domain.BlobRecord
import net.consensys.zkevm.domain.ProofToFinalize
-import net.consensys.zkevm.domain.filterOutWithEndBlockNumberBefore
-import net.consensys.zkevm.domain.toBlockIntervals
-import net.consensys.zkevm.domain.toBlockIntervalsString
import net.consensys.zkevm.ethereum.gaspricing.GasPriceCapProvider
import net.consensys.zkevm.persistence.AggregationsRepository
import net.consensys.zkevm.persistence.BlobsRepository
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelper.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelper.kt
index a3b42c042..792fea54e 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelper.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelper.kt
@@ -1,9 +1,9 @@
package net.consensys.zkevm.ethereum.submission
-import net.consensys.zkevm.domain.BlockInterval
-import net.consensys.zkevm.domain.BlockIntervals
-import net.consensys.zkevm.domain.assertConsecutiveIntervals
-import net.consensys.zkevm.domain.toBlockIntervalsString
+import build.linea.domain.BlockInterval
+import build.linea.domain.BlockIntervals
+import build.linea.domain.assertConsecutiveIntervals
+import build.linea.domain.toBlockIntervalsString
import org.apache.logging.log4j.Logger
import kotlin.math.min
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmitterAsEIP4844MultipleBlobsPerTx.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmitterAsEIP4844MultipleBlobsPerTx.kt
index bc46ae570..2c8a920bf 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmitterAsEIP4844MultipleBlobsPerTx.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmitterAsEIP4844MultipleBlobsPerTx.kt
@@ -1,8 +1,8 @@
package net.consensys.zkevm.ethereum.submission
+import build.linea.domain.toBlockIntervalsString
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.domain.toBlockIntervalsString
import net.consensys.zkevm.ethereum.gaspricing.GasPriceCapProvider
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobsGrouperForSubmission.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobsGrouperForSubmission.kt
index 59132767c..1fe37f605 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobsGrouperForSubmission.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/BlobsGrouperForSubmission.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.submission
+import build.linea.domain.BlockIntervals
import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.domain.BlockIntervals
fun interface BlobsGrouperForSubmission {
fun chunkBlobs(
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/ContractUpgradeSubmissionLatchFilter.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/ContractUpgradeSubmissionLatchFilter.kt
index 61fb6ba26..a3710ea08 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/ContractUpgradeSubmissionLatchFilter.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/ContractUpgradeSubmissionLatchFilter.kt
@@ -1,9 +1,9 @@
package net.consensys.zkevm.ethereum.submission
+import build.linea.domain.BlockInterval
import net.consensys.linea.async.AsyncFilter
import net.consensys.zkevm.coordinator.clients.smartcontract.ContractVersionProvider
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaContractVersion
-import net.consensys.zkevm.domain.BlockInterval
import org.apache.logging.log4j.LogManager
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.util.concurrent.atomic.AtomicBoolean
diff --git a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilter.kt b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilter.kt
index e96216d17..d0915147c 100644
--- a/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilter.kt
+++ b/coordinator/ethereum/blob-submitter/src/main/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilter.kt
@@ -20,7 +20,15 @@ class L1ShnarfBasedAlreadySubmittedBlobsFilter(
blobRecords: List
): SafeFuture> {
val blockByShnarfQueryFutures = blobRecords.map { blobRecord ->
- lineaRollup.findBlobFinalBlockNumberByShnarf(shnarf = blobRecord.expectedShnarf)
+ lineaRollup
+ .isBlobShnarfPresent(shnarf = blobRecord.expectedShnarf)
+ .thenApply { isShnarfPresent ->
+ if (isShnarfPresent) {
+ blobRecord.endBlockNumber
+ } else {
+ null
+ }
+ }
}
return SafeFuture.collectAll(blockByShnarfQueryFutures.stream())
diff --git a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinatorTest.kt b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinatorTest.kt
index ae36c1c0a..bdb155097 100644
--- a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinatorTest.kt
+++ b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionCoordinatorTest.kt
@@ -1,15 +1,15 @@
package net.consensys.zkevm.ethereum.submission
+import build.linea.domain.BlockIntervals
+import build.linea.domain.toBlockIntervals
import io.vertx.core.Vertx
import net.consensys.FakeFixedClock
import net.consensys.linea.async.AsyncFilter
import net.consensys.zkevm.coordinator.clients.smartcontract.BlockAndNonce
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
import net.consensys.zkevm.domain.BlobRecord
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.createAggregation
import net.consensys.zkevm.domain.createBlobRecords
-import net.consensys.zkevm.domain.toBlockIntervals
import net.consensys.zkevm.persistence.AggregationsRepository
import net.consensys.zkevm.persistence.BlobsRepository
import org.apache.logging.log4j.LogManager
diff --git a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelperKtTest.kt b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelperKtTest.kt
index 055c543b8..6b9b5d602 100644
--- a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelperKtTest.kt
+++ b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/BlobSubmissionHelperKtTest.kt
@@ -1,7 +1,7 @@
package net.consensys.zkevm.ethereum.submission
-import net.consensys.zkevm.domain.BlockIntervalData
-import net.consensys.zkevm.domain.BlockIntervals
+import build.linea.domain.BlockIntervalData
+import build.linea.domain.BlockIntervals
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertThrows
diff --git a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilterTest.kt b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilterTest.kt
index 992c52b8a..d4eaf1efe 100644
--- a/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilterTest.kt
+++ b/coordinator/ethereum/blob-submitter/src/test/kotlin/net/consensys/zkevm/ethereum/submission/L1ShnarfBasedAlreadySubmittedBlobsFilterTest.kt
@@ -26,13 +26,13 @@ class L1ShnarfBasedAlreadySubmittedBlobsFilterTest {
val blobs = listOf(blob1, blob2, blob3, blob4, blob5, blob6, blob7)
val l1SmcClient = mock()
- whenever(l1SmcClient.findBlobFinalBlockNumberByShnarf(any(), any()))
+ whenever(l1SmcClient.isBlobShnarfPresent(any(), any()))
.thenAnswer { invocation ->
val shnarfQueried = invocation.getArgument(1)
val endBlockNumber = when {
- shnarfQueried.contentEquals(blob3.expectedShnarf) -> blob3.endBlockNumber
- shnarfQueried.contentEquals(blob5.expectedShnarf) -> blob5.endBlockNumber
- else -> null
+ shnarfQueried.contentEquals(blob3.expectedShnarf) -> true
+ shnarfQueried.contentEquals(blob5.expectedShnarf) -> true
+ else -> false
}
SafeFuture.completedFuture(endBlockNumber)
}
diff --git a/coordinator/ethereum/finalization-monitor/src/test/kotlin/net/consensys/zkevm/ethereum/finalization/FinalizationMonitorImplTest.kt b/coordinator/ethereum/finalization-monitor/src/test/kotlin/net/consensys/zkevm/ethereum/finalization/FinalizationMonitorImplTest.kt
index 7b5ec28fe..eadb01924 100644
--- a/coordinator/ethereum/finalization-monitor/src/test/kotlin/net/consensys/zkevm/ethereum/finalization/FinalizationMonitorImplTest.kt
+++ b/coordinator/ethereum/finalization-monitor/src/test/kotlin/net/consensys/zkevm/ethereum/finalization/FinalizationMonitorImplTest.kt
@@ -3,6 +3,8 @@ package net.consensys.zkevm.ethereum.finalization
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import io.vertx.junit5.VertxTestContext
+import net.consensys.ByteArrayExt
+import net.consensys.encodeHex
import net.consensys.linea.BlockParameter
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClientReadOnly
import org.apache.tuweni.bytes.Bytes
@@ -52,15 +54,15 @@ class FinalizationMonitorImplTest {
fun start_startsPollingProcess(vertx: Vertx, testContext: VertxTestContext) {
whenever(contractMock.finalizedL2BlockNumber(eq(BlockParameter.Tag.FINALIZED)))
.thenReturn(SafeFuture.completedFuture(expectedBlockNumber))
- val expectedStateRootHash = Bytes32.random()
+ val expectedStateRootHash = ByteArrayExt.random32()
whenever(contractMock.blockStateRootHash(any(), any()))
- .thenReturn(SafeFuture.completedFuture(expectedStateRootHash.toArray()))
+ .thenReturn(SafeFuture.completedFuture(expectedStateRootHash))
- val expectedBlockHash = Bytes32.random()
+ val expectedBlockHash = ByteArrayExt.random32()
val mockBlockByNumberReturn = mock()
val mockBlock = mock()
whenever(mockBlockByNumberReturn.block).thenReturn(mockBlock)
- whenever(mockBlock.hash).thenReturn(expectedBlockHash.toHexString())
+ whenever(mockBlock.hash).thenReturn(expectedBlockHash.encodeHex())
whenever(mockL2Client.ethGetBlockByNumber(any(), any()).sendAsync()).thenAnswer {
SafeFuture.completedFuture(mockBlockByNumberReturn)
}
diff --git a/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/L1EventQuerierIntegrationTest.kt b/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/L1EventQuerierIntegrationTest.kt
index 2898d4df2..40dc15dc0 100644
--- a/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/L1EventQuerierIntegrationTest.kt
+++ b/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/L1EventQuerierIntegrationTest.kt
@@ -8,6 +8,7 @@ import net.consensys.linea.contract.LineaRollup
import net.consensys.linea.contract.LineaRollupAsyncFriendly
import net.consensys.toBigInteger
import net.consensys.toULong
+import net.consensys.zkevm.coordinator.clients.smartcontract.LineaContractVersion
import net.consensys.zkevm.ethereum.ContractsManager
import net.consensys.zkevm.ethereum.Web3jClientManager
import org.apache.tuweni.bytes.Bytes32
@@ -35,7 +36,9 @@ class L1EventQuerierIntegrationTest {
@BeforeEach
fun beforeEach() {
- val deploymentResult = ContractsManager.get().deployLineaRollup().get()
+ val deploymentResult = ContractsManager.get()
+ .deployLineaRollup(contractVersion = LineaContractVersion.V5)
+ .get()
testLineaRollupContractAddress = deploymentResult.contractAddress
web3Client = Web3jClientManager.l1Client
@Suppress("DEPRECATION")
diff --git a/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/MessageServiceIntegrationTest.kt b/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/MessageServiceIntegrationTest.kt
index e99a2e9a3..949dcfb35 100644
--- a/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/MessageServiceIntegrationTest.kt
+++ b/coordinator/ethereum/message-anchoring/src/integrationTest/kotlin/net.consensys.zkevm.ethereum.coordination.messageanchoring/MessageServiceIntegrationTest.kt
@@ -10,6 +10,7 @@ import net.consensys.linea.contract.LineaRollup
import net.consensys.linea.contract.LineaRollupAsyncFriendly
import net.consensys.toBigInteger
import net.consensys.toULong
+import net.consensys.zkevm.coordinator.clients.smartcontract.LineaContractVersion
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
import net.consensys.zkevm.ethereum.ContractsManager
import net.consensys.zkevm.ethereum.Web3jClientManager
@@ -47,7 +48,9 @@ class MessageServiceIntegrationTest {
private lateinit var l2Contract: L2MessageService
private fun deployContracts() {
- val l1RollupDeploymentResult = ContractsManager.get().deployLineaRollup().get()
+ val l1RollupDeploymentResult = ContractsManager.get()
+ .deployLineaRollup(contractVersion = LineaContractVersion.V5)
+ .get()
@Suppress("DEPRECATION")
l1ContractLegacyClient = l1RollupDeploymentResult.rollupOperatorClientLegacy
l1ContractClient = l1RollupDeploymentResult.rollupOperatorClient
diff --git a/coordinator/ethereum/models-helper/build.gradle b/coordinator/ethereum/models-helper/build.gradle
index db0af2225..10cd8f109 100644
--- a/coordinator/ethereum/models-helper/build.gradle
+++ b/coordinator/ethereum/models-helper/build.gradle
@@ -5,6 +5,7 @@ plugins {
dependencies {
api (project(":coordinator:core"))
api project(":jvm-libs:linea:teku-execution-client")
+ implementation "tech.pegasys.teku.internal:bytes:${libs.versions.teku.get()}"
implementation "org.hyperledger.besu:besu-datatypes:${libs.versions.besu.get()}"
implementation "org.hyperledger.besu:evm:${libs.versions.besu.get()}"
implementation "org.hyperledger.besu.internal:rlp:${libs.versions.besu.get()}"
diff --git a/coordinator/ethereum/test-utils/build.gradle b/coordinator/ethereum/test-utils/build.gradle
index 4f0b4e040..7a6848fff 100644
--- a/coordinator/ethereum/test-utils/build.gradle
+++ b/coordinator/ethereum/test-utils/build.gradle
@@ -14,6 +14,8 @@ dependencies {
implementation("org.web3j:core:${libs.versions.web3j.get()}") {
exclude group: 'org.slf4j', module: 'slf4j-nop'
}
+ implementation "com.sksamuel.hoplite:hoplite-core:${libs.versions.hoplite.get()}"
+ implementation "com.sksamuel.hoplite:hoplite-toml:${libs.versions.hoplite.get()}"
implementation "com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}"
implementation "com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}"
implementation "com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}"
diff --git a/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/ContractsManager.kt b/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/ContractsManager.kt
index 171aa9ff7..5f7925db6 100644
--- a/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/ContractsManager.kt
+++ b/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/ContractsManager.kt
@@ -1,11 +1,14 @@
package net.consensys.zkevm.ethereum
+import com.sksamuel.hoplite.ConfigLoaderBuilder
+import com.sksamuel.hoplite.addFileSource
import net.consensys.linea.contract.AsyncFriendlyTransactionManager
import net.consensys.linea.contract.EIP1559GasProvider
import net.consensys.linea.contract.LineaRollupAsyncFriendly
import net.consensys.linea.contract.StaticGasProvider
import net.consensys.linea.contract.l1.Web3JLineaRollupSmartContractClient
import net.consensys.linea.contract.l2.L2MessageServiceGasLimitEstimate
+import net.consensys.linea.testing.filesystem.findPathTo
import net.consensys.linea.web3j.SmartContractErrors
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaContractVersion
import net.consensys.zkevm.coordinator.clients.smartcontract.LineaRollupSmartContractClient
@@ -45,17 +48,17 @@ interface ContractsManager {
*/
fun deployLineaRollup(
numberOfOperators: Int = 1,
- contractVersion: LineaContractVersion = LineaContractVersion.V5
+ contractVersion: LineaContractVersion
): SafeFuture
fun deployL2MessageService(): SafeFuture
fun deployRollupAndL2MessageService(
dataCompressionAndProofAggregationMigrationBlock: ULong = 1000UL,
- numberOfOperators: Int = 1
+ numberOfOperators: Int = 1,
+ l1ContractVersion: LineaContractVersion = LineaContractVersion.V5
): SafeFuture
- @Deprecated("Use connectToLineaRollupContractV5 instead")
fun connectToLineaRollupContract(
contractAddress: String,
transactionManager: AsyncFriendlyTransactionManager,
@@ -64,18 +67,8 @@ interface ContractsManager {
maxFeePerGas = 11_000uL,
maxPriorityFeePerGas = 10_000uL,
gasLimit = 1_000_000uL
- )
- ): LineaRollupAsyncFriendly
-
- fun connectToLineaRollupContractV5(
- contractAddress: String,
- transactionManager: AsyncFriendlyTransactionManager,
- gasProvider: ContractEIP1559GasProvider = StaticGasProvider(
- L1AccountManager.chainId,
- maxFeePerGas = 11_000uL,
- maxPriorityFeePerGas = 10_000uL,
- gasLimit = 1_000_000uL
- )
+ ),
+ smartContractErrors: SmartContractErrors? = null
): LineaRollupSmartContractClient
fun connectL2MessageService(
@@ -94,14 +87,35 @@ interface ContractsManager {
smartContractErrors: SmartContractErrors = emptyMap()
): L2MessageServiceGasLimitEstimate
+ @Deprecated("Use connectToLineaRollupContract instead")
+ fun connectToLineaRollupContractLegacy(
+ contractAddress: String,
+ transactionManager: AsyncFriendlyTransactionManager,
+ gasProvider: ContractEIP1559GasProvider = StaticGasProvider(
+ L1AccountManager.chainId,
+ maxFeePerGas = 11_000uL,
+ maxPriorityFeePerGas = 10_000uL,
+ gasLimit = 1_000_000uL
+ )
+ ): LineaRollupAsyncFriendly
+
companion object {
- // TODO: think of better get the Instance
fun get(): ContractsManager = MakeFileDelegatedContractsManager
}
}
object MakeFileDelegatedContractsManager : ContractsManager {
val log = LoggerFactory.getLogger(MakeFileDelegatedContractsManager::class.java)
+ val lineaRollupContractErrors = findPathTo("config")!!
+ .resolve("common/smart-contract-errors.toml")
+ .let { filePath ->
+ data class ErrorsFile(val smartContractErrors: Map)
+ ConfigLoaderBuilder.default()
+ .addFileSource(filePath.toAbsolutePath().toString())
+ .build()
+ .loadConfigOrThrow()
+ .smartContractErrors
+ }
override fun deployLineaRollup(
numberOfOperators: Int,
@@ -133,12 +147,14 @@ object MakeFileDelegatedContractsManager : ContractsManager {
AccountTransactionManager(it, L1AccountManager.getTransactionManager(it))
}
- @Suppress("DEPRECATION")
val rollupOperatorClient = connectToLineaRollupContract(
deploymentResult.address,
- accountsTxManagers.first().txManager
+ accountsTxManagers.first().txManager,
+ smartContractErrors = lineaRollupContractErrors
)
- val rollupOperatorClientV4 = connectToLineaRollupContractV5(
+
+ @Suppress("DEPRECATION")
+ val rollupOperatorClientLegacy = connectToLineaRollupContractLegacy(
deploymentResult.address,
accountsTxManagers.first().txManager
)
@@ -147,8 +163,8 @@ object MakeFileDelegatedContractsManager : ContractsManager {
contractDeploymentAccount = contractDeploymentAccount,
contractDeploymentBlockNumber = deploymentResult.blockNumber.toULong(),
rollupOperators = accountsTxManagers,
- rollupOperatorClientLegacy = rollupOperatorClient,
- rollupOperatorClient = rollupOperatorClientV4
+ rollupOperatorClientLegacy = rollupOperatorClientLegacy,
+ rollupOperatorClient = rollupOperatorClient
)
}
return future
@@ -174,9 +190,10 @@ object MakeFileDelegatedContractsManager : ContractsManager {
override fun deployRollupAndL2MessageService(
dataCompressionAndProofAggregationMigrationBlock: ULong,
- numberOfOperators: Int
+ numberOfOperators: Int,
+ l1ContractVersion: LineaContractVersion
): SafeFuture {
- return deployLineaRollup(numberOfOperators)
+ return deployLineaRollup(numberOfOperators, l1ContractVersion)
.thenCombine(deployL2MessageService()) { lineaRollupDeploymentResult, l2MessageServiceDeploymentResult ->
ContactsDeploymentResult(
lineaRollup = lineaRollupDeploymentResult,
@@ -185,32 +202,18 @@ object MakeFileDelegatedContractsManager : ContractsManager {
}
}
- @Deprecated("Use connectToLineaRollupContractV5 instead")
override fun connectToLineaRollupContract(
contractAddress: String,
transactionManager: AsyncFriendlyTransactionManager,
- gasProvider: ContractEIP1559GasProvider
- ): LineaRollupAsyncFriendly {
- return LineaRollupAsyncFriendly.load(
- contractAddress,
- Web3jClientManager.l1Client,
- transactionManager,
- gasProvider,
- emptyMap()
- )
- }
-
- override fun connectToLineaRollupContractV5(
- contractAddress: String,
- transactionManager: AsyncFriendlyTransactionManager,
- gasProvider: ContractEIP1559GasProvider
+ gasProvider: ContractEIP1559GasProvider,
+ smartContractErrors: SmartContractErrors?
): LineaRollupSmartContractClient {
return Web3JLineaRollupSmartContractClient.load(
contractAddress,
Web3jClientManager.l1Client,
transactionManager,
gasProvider,
- emptyMap()
+ smartContractErrors ?: lineaRollupContractErrors
)
}
@@ -229,4 +232,33 @@ object MakeFileDelegatedContractsManager : ContractsManager {
smartContractErrors
)
}
+
+ @Deprecated("Use connectToLineaRollupContract instead")
+ override fun connectToLineaRollupContractLegacy(
+ contractAddress: String,
+ transactionManager: AsyncFriendlyTransactionManager,
+ gasProvider: ContractEIP1559GasProvider
+ ): LineaRollupAsyncFriendly {
+ return LineaRollupAsyncFriendly.load(
+ contractAddress,
+ Web3jClientManager.l1Client,
+ transactionManager,
+ gasProvider,
+ emptyMap()
+ )
+ }
+}
+
+fun main() {
+ data class SmartContractErrors(val smartContractErrors: Map)
+
+ val lineaRollupContractErrors = findPathTo("config")!!
+ .resolve("common/smart-contract-errors.toml")
+ .let { filePath ->
+ ConfigLoaderBuilder.default()
+ .addFileSource(filePath.toAbsolutePath().toString())
+ .build()
+ .loadConfigOrThrow()
+ }
+ println(lineaRollupContractErrors)
}
diff --git a/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/MakefileContractDeploymentHelper.kt b/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/MakefileContractDeploymentHelper.kt
index c616b0ea9..56607668f 100644
--- a/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/MakefileContractDeploymentHelper.kt
+++ b/coordinator/ethereum/test-utils/src/main/kotlin/net/consensys/zkevm/ethereum/MakefileContractDeploymentHelper.kt
@@ -143,10 +143,10 @@ fun makeDeployLineaRollup(
// "HARDHAT_DISABLE_CACHE" to "true"
)
deploymentPrivateKey?.let { env["DEPLOYMENT_PRIVATE_KEY"] = it }
- val command = if (contractVersion == LineaContractVersion.V5) {
- "make deploy-linea-rollup"
- } else {
- throw IllegalArgumentException("Unsupported contract version: $contractVersion")
+ val command = when (contractVersion) {
+ LineaContractVersion.V5 -> "make deploy-linea-rollup"
+ LineaContractVersion.V6 -> "make deploy-linea-rollup-v6"
+ else -> throw IllegalArgumentException("Unsupported contract version: $contractVersion")
}
return deployContract(
diff --git a/coordinator/persistence/aggregation/build.gradle b/coordinator/persistence/aggregation/build.gradle
index bbe37e23d..e56724529 100644
--- a/coordinator/persistence/aggregation/build.gradle
+++ b/coordinator/persistence/aggregation/build.gradle
@@ -11,6 +11,7 @@ dependencies {
testImplementation(project(":coordinator:persistence:batch"))
testImplementation(project(":coordinator:persistence:blob"))
testImplementation(project(":coordinator:persistence:db-common"))
+ testImplementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
testImplementation(testFixtures(project(":coordinator:core")))
testImplementation(testFixtures(project(":coordinator:persistence:db-common")))
testImplementation(testFixtures(project(":jvm-libs:generic:persistence:db")))
diff --git a/coordinator/persistence/aggregation/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/aggregation/AggregationsPostgresDaoTest.kt b/coordinator/persistence/aggregation/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/aggregation/AggregationsPostgresDaoTest.kt
index e0e439292..4fe0878e6 100644
--- a/coordinator/persistence/aggregation/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/aggregation/AggregationsPostgresDaoTest.kt
+++ b/coordinator/persistence/aggregation/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/aggregation/AggregationsPostgresDaoTest.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.persistence.dao.aggregation
+import build.linea.domain.BlockIntervals
import io.vertx.junit5.VertxExtension
import io.vertx.sqlclient.Row
import io.vertx.sqlclient.RowSet
@@ -15,7 +16,6 @@ import net.consensys.zkevm.domain.BlobAndBatchCounters
import net.consensys.zkevm.domain.BlobCounters
import net.consensys.zkevm.domain.BlobRecord
import net.consensys.zkevm.domain.BlobStatus
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ProofToFinalize
import net.consensys.zkevm.domain.createAggregation
import net.consensys.zkevm.domain.createBatch
diff --git a/coordinator/persistence/aggregation/src/main/kotlin/net/consensys/zkevm/persistence/dao/aggregation/PostgresAggregationsDao.kt b/coordinator/persistence/aggregation/src/main/kotlin/net/consensys/zkevm/persistence/dao/aggregation/PostgresAggregationsDao.kt
index c900f2753..6b5ccba9a 100644
--- a/coordinator/persistence/aggregation/src/main/kotlin/net/consensys/zkevm/persistence/dao/aggregation/PostgresAggregationsDao.kt
+++ b/coordinator/persistence/aggregation/src/main/kotlin/net/consensys/zkevm/persistence/dao/aggregation/PostgresAggregationsDao.kt
@@ -1,5 +1,7 @@
package net.consensys.zkevm.persistence.dao.aggregation
+import build.linea.domain.BlockIntervals
+import build.linea.domain.toBlockIntervalsString
import io.vertx.core.Future
import io.vertx.pgclient.PgException
import io.vertx.sqlclient.Row
@@ -13,9 +15,7 @@ import net.consensys.zkevm.coordinator.clients.prover.serialization.ProofToFinal
import net.consensys.zkevm.domain.Aggregation
import net.consensys.zkevm.domain.BlobAndBatchCounters
import net.consensys.zkevm.domain.BlobCounters
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ProofToFinalize
-import net.consensys.zkevm.domain.toBlockIntervalsString
import net.consensys.zkevm.persistence.db.DuplicatedRecordException
import net.consensys.zkevm.persistence.db.SQLQueryLogger
import org.apache.logging.log4j.Level
diff --git a/coordinator/persistence/aggregation/src/test/kotlin/net/consensys/zkevm/persistence/dao/aggregation/RetryingPostgresAggregationsDaoTest.kt b/coordinator/persistence/aggregation/src/test/kotlin/net/consensys/zkevm/persistence/dao/aggregation/RetryingPostgresAggregationsDaoTest.kt
index 3a706901e..7fc455d11 100644
--- a/coordinator/persistence/aggregation/src/test/kotlin/net/consensys/zkevm/persistence/dao/aggregation/RetryingPostgresAggregationsDaoTest.kt
+++ b/coordinator/persistence/aggregation/src/test/kotlin/net/consensys/zkevm/persistence/dao/aggregation/RetryingPostgresAggregationsDaoTest.kt
@@ -1,12 +1,12 @@
package net.consensys.zkevm.persistence.dao.aggregation
+import build.linea.domain.BlockIntervals
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import kotlinx.datetime.Instant
import net.consensys.FakeFixedClock
import net.consensys.zkevm.domain.Aggregation
import net.consensys.zkevm.domain.BlobAndBatchCounters
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.blobCounters
import net.consensys.zkevm.domain.createAggregation
import net.consensys.zkevm.domain.createProofToFinalize
diff --git a/coordinator/persistence/blob/build.gradle b/coordinator/persistence/blob/build.gradle
index d199412e7..9124e8f1f 100644
--- a/coordinator/persistence/blob/build.gradle
+++ b/coordinator/persistence/blob/build.gradle
@@ -13,6 +13,7 @@ dependencies {
testImplementation("com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}")
testImplementation("com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}")
testImplementation("com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}")
+ testImplementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
testImplementation("tech.pegasys.teku.internal:executionclient:${libs.versions.teku.get()}")
testImplementation(project(":coordinator:persistence:db-common"))
testImplementation(testFixtures(project(":coordinator:core")))
diff --git a/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinatorIntTest.kt b/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinatorIntTest.kt
index 390c04d10..43b29f7fe 100644
--- a/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinatorIntTest.kt
+++ b/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/ethereum/coordination/blob/BlobCompressionProofCoordinatorIntTest.kt
@@ -1,5 +1,8 @@
package net.consensys.zkevm.ethereum.coordination.blob
+import build.linea.clients.GetZkEVMStateMerkleProofResponse
+import build.linea.clients.StateManagerClientV1
+import build.linea.domain.BlockIntervals
import com.fasterxml.jackson.databind.node.ArrayNode
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.michaelbull.result.Ok
@@ -8,14 +11,12 @@ import io.vertx.junit5.VertxExtension
import io.vertx.junit5.VertxTestContext
import kotlinx.datetime.Clock
import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
import net.consensys.linea.traces.TracesCountersV1
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.coordinator.clients.BlobCompressionProofRequest
import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
-import net.consensys.zkevm.coordinator.clients.GetZkEVMStateMerkleProofResponse
-import net.consensys.zkevm.coordinator.clients.Type2StateManagerClient
import net.consensys.zkevm.domain.Blob
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.domain.ConflationTrigger
import net.consensys.zkevm.domain.createBlobRecord
@@ -74,7 +75,7 @@ class BlobCompressionProofCoordinatorIntTest : CleanDbTestSuiteParallel() {
)
private var expectedBlobCompressionProofResponse: BlobCompressionProof? = null
- private val zkStateClientMock = mock()
+ private val zkStateClientMock = mock()
private val blobCompressionProverClientMock = mock()
private val blobZkStateProvider = mock()
private lateinit var mockShnarfCalculator: BlobShnarfCalculator
@@ -95,15 +96,15 @@ class BlobCompressionProofCoordinatorIntTest : CleanDbTestSuiteParallel() {
connection = sqlClient,
clock = fixedClock
)
- whenever(zkStateClientMock.rollupGetZkEVMStateMerkleProof(any(), any()))
+ whenever(zkStateClientMock.rollupGetStateMerkleProof(any()))
.thenAnswer {
SafeFuture.completedFuture(
Ok(
GetZkEVMStateMerkleProofResponse(
zkStateManagerVersion = zkStateManagerVersion,
zkStateMerkleProof = zkStateMerkleProof,
- zkParentStateRootHash = Bytes32.random(),
- zkEndStateRootHash = Bytes32.random()
+ zkParentStateRootHash = ByteArrayExt.random32(),
+ zkEndStateRootHash = ByteArrayExt.random32()
)
)
)
diff --git a/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/blob/BlobsPostgresDaoTest.kt b/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/blob/BlobsPostgresDaoTest.kt
index b65fe6966..c5e20c028 100644
--- a/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/blob/BlobsPostgresDaoTest.kt
+++ b/coordinator/persistence/blob/src/integrationTest/kotlin/net/consensys/zkevm/persistence/dao/blob/BlobsPostgresDaoTest.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.persistence.dao.blob
+import build.linea.domain.BlockIntervals
import io.vertx.junit5.VertxExtension
import io.vertx.sqlclient.PreparedQuery
import io.vertx.sqlclient.Row
@@ -14,7 +15,6 @@ import net.consensys.trimToSecondPrecision
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.domain.BlobRecord
import net.consensys.zkevm.domain.BlobStatus
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.createBlobRecord
import net.consensys.zkevm.persistence.db.DbHelper
import net.consensys.zkevm.persistence.db.DuplicatedRecordException
diff --git a/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/ethereum/coordinator/blob/BlobCompressionProofCoordinatorTest.kt b/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/ethereum/coordinator/blob/BlobCompressionProofCoordinatorTest.kt
index cdcd2e0ac..416356d30 100644
--- a/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/ethereum/coordinator/blob/BlobCompressionProofCoordinatorTest.kt
+++ b/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/ethereum/coordinator/blob/BlobCompressionProofCoordinatorTest.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.ethereum.coordinator.blob
+import build.linea.domain.BlockIntervals
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import net.consensys.FakeFixedClock
@@ -8,7 +9,6 @@ import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.coordinator.clients.BlobCompressionProofRequest
import net.consensys.zkevm.coordinator.clients.BlobCompressionProverClientV2
import net.consensys.zkevm.domain.Blob
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.ConflationCalculationResult
import net.consensys.zkevm.domain.ConflationTrigger
import net.consensys.zkevm.ethereum.coordination.blob.BlobCompressionProofCoordinator
diff --git a/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/persistence/dao/blob/RetryingBlobsPostgresDaoTest.kt b/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/persistence/dao/blob/RetryingBlobsPostgresDaoTest.kt
index 2e663249d..1339ff8ad 100644
--- a/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/persistence/dao/blob/RetryingBlobsPostgresDaoTest.kt
+++ b/coordinator/persistence/blob/src/test/kotlin/net/consensys/zkevm/persistence/dao/blob/RetryingBlobsPostgresDaoTest.kt
@@ -1,5 +1,6 @@
package net.consensys.zkevm.persistence.dao.blob
+import build.linea.domain.BlockIntervals
import io.vertx.core.Vertx
import io.vertx.junit5.VertxExtension
import net.consensys.FakeFixedClock
@@ -7,7 +8,6 @@ import net.consensys.setFirstByteToZero
import net.consensys.trimToSecondPrecision
import net.consensys.zkevm.coordinator.clients.BlobCompressionProof
import net.consensys.zkevm.domain.BlobStatus
-import net.consensys.zkevm.domain.BlockIntervals
import net.consensys.zkevm.domain.createBlobRecord
import net.consensys.zkevm.persistence.db.PersistenceRetryer
import org.junit.jupiter.api.BeforeEach
diff --git a/docker/compose-local-dev-traces-v2.overrides.yml b/docker/compose-local-dev-traces-v2.overrides.yml
index e4397f965..1670c503a 100644
--- a/docker/compose-local-dev-traces-v2.overrides.yml
+++ b/docker/compose-local-dev-traces-v2.overrides.yml
@@ -7,7 +7,7 @@ services:
JAVA_OPTS: -XX:+UnlockExperimentalVMOptions -XX:-UseG1GC -XX:+UseZGC
volumes:
- ../config/common/traces-limits-besu-v2.toml:/var/lib/besu/traces-limits.toml:ro
-
+
l2-node-besu:
environment:
JAVA_OPTS: -XX:+UnlockExperimentalVMOptions -XX:-UseG1GC -XX:+UseZGC
@@ -27,7 +27,7 @@ services:
hostname: traces-node-v2
container_name: traces-node-v2
image: consensys/linea-besu:24.9-delivery32
- profiles: [ "l2", "l2-bc", "debug" ]
+ profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
traces-node-v2-plugin-downloader:
condition: service_completed_successfully
@@ -52,7 +52,7 @@ services:
- /bin/bash
- -c
- |
- /opt/besu/bin/besu \
+ /opt/besu/bin/besu-untuned \
--config-file=/var/lib/besu/traces-node-v2.config.toml \
--genesis-file=/var/lib/besu/genesis.json \
--bootnodes=enode://14408801a444dafc44afbccce2eb755f902aed3b5743fed787b3c790e021fef28b8c827ed896aa4e8fb46e22bd67c39f994a73768b4b382f8597b0d44370e15d@11.11.11.101:30303
diff --git a/docker/compose.yml b/docker/compose.yml
index fa8bccecc..77b85e0aa 100644
--- a/docker/compose.yml
+++ b/docker/compose.yml
@@ -24,7 +24,7 @@ services:
hostname: sequencer
container_name: sequencer
image: consensys/linea-besu:${SEQUENCER_TAG:-24.10-delivery34}
- profiles: [ "l2", "l2-bc", "debug" ]
+ profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
linea-besu-sequencer-plugin-downloader:
condition: service_completed_successfully
@@ -49,7 +49,7 @@ services:
- /bin/bash
- -c
- |
- /opt/besu/bin/besu \
+ /opt/besu/bin/besu-untuned \
--config-file=/var/lib/besu/sequencer.config.toml \
--node-private-key-file="/var/lib/besu/key" \
--plugin-linea-l1-polling-interval="PT12S" \
@@ -90,7 +90,7 @@ services:
hostname: l2-node
image: consensys/linea-geth:${ZKGETH_TAG:-0588665}
platform: linux/amd64
- profiles: [ "l2", "l2-bc", "debug" ]
+ profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
sequencer:
condition: service_healthy
@@ -118,12 +118,12 @@ services:
networks:
linea:
ipv4_address: 11.11.11.209
-
+
l2-node-besu:
hostname: l2-node-besu
container_name: l2-node-besu
image: consensys/linea-besu:24.10-delivery34
- profiles: [ "l2", "l2-bc", "debug" ]
+ profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
sequencer:
condition: service_healthy
@@ -145,7 +145,7 @@ services:
- /bin/bash
- -c
- |
- /opt/besu/bin/besu \
+ /opt/besu/bin/besu-untuned \
--config-file=/var/lib/besu/l2-node-besu.config.toml \
--genesis-file=/var/lib/besu/genesis.json \
--plugin-linea-l1-polling-interval="PT12S" \
@@ -172,7 +172,7 @@ services:
hostname: traces-node
image: consensys/linea-geth:${ZKGETH_TAG:-0588665}
platform: linux/amd64
- profiles: [ "l2", "l2-bc", "debug" ]
+ profiles: [ "l2", "l2-bc", "debug", "external-to-monorepo" ]
depends_on:
sequencer:
condition: service_healthy
@@ -308,7 +308,7 @@ services:
transaction-exclusion-api:
hostname: transaction-exclusion-api
container_name: transaction-exclusion-api
- image: consensys/linea-transaction-exclusion-api:${TRANSACTION_EXCLUSION_API_TAG:-d227ddf}
+ image: consensys/linea-transaction-exclusion-api:${TRANSACTION_EXCLUSION_API_TAG:-cd58916}
profiles: [ "l2", "debug" ]
restart: on-failure
depends_on:
@@ -367,7 +367,7 @@ services:
hostname: web3signer
container_name: web3signer
image: consensys/web3signer:23.3-jdk17
- profiles: [ "l2", "debug" ]
+ profiles: [ "l2", "debug", "external-to-monorepo" ]
ports:
- "9000:9000"
command:
@@ -396,7 +396,7 @@ services:
image: postgres:14.5
hostname: postgres
container_name: postgres
- profiles: [ "l2", "debug" ]
+ profiles: [ "l2", "debug", "external-to-monorepo" ]
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
@@ -425,8 +425,8 @@ services:
l1-el-node:
container_name: l1-el-node
hostname: l1-el-node
- image: hyperledger/besu:24.6.0
- profiles: [ "l1", "debug" ]
+ image: hyperledger/besu:24.10.0
+ profiles: [ "l1", "debug", "external-to-monorepo" ]
depends_on:
l1-node-genesis-generator:
condition: service_completed_successfully
@@ -437,7 +437,11 @@ services:
retries: 120
environment:
LOG4J_CONFIGURATION_FILE: /var/lib/besu/log4j.xml
- command: [ "--config-file=/config/config.toml" ]
+ entrypoint:
+ - /bin/bash
+ - -c
+ - |
+ /opt/besu/bin/besu-untuned --config-file=/config/config.toml
volumes:
- ./config/l1-node/el/besu.key:/config/keys/besu.key:ro
- ./config/l1-node/el/config.toml:/config/config.toml:ro
@@ -458,7 +462,7 @@ services:
container_name: l1-cl-node
hostname: l1-cl-node
image: consensys/teku:24.2.0
- profiles: [ "l1", "debug" ]
+ profiles: [ "l1", "debug", "external-to-monorepo" ]
depends_on:
l1-el-node:
condition: service_healthy
@@ -484,7 +488,7 @@ services:
l1-node-genesis-generator:
build:
context: ./config/l1-node/
- profiles: [ "l1", "debug" ]
+ profiles: [ "l1", "debug", "external-to-monorepo" ]
entrypoint:
- /bin/sh
- -c
@@ -532,7 +536,7 @@ services:
image: consensys/linea-besu:linea-delivery-27
hostname: zkbesu-shomei
container_name: zkbesu-shomei
- profiles: [ "l2", "l2-bc" ]
+ profiles: [ "l2", "l2-bc", "external-to-monorepo" ]
depends_on:
shomei-plugin-downloader:
condition: service_completed_successfully
@@ -560,7 +564,7 @@ services:
- /bin/bash
- -c
- |
- /opt/besu/bin/besu \
+ /opt/besu/bin/besu-untuned \
--config-file=/var/lib/besu/zkbesu-config.toml \
--genesis-file=/var/lib/besu/genesis.json \
--plugin-shomei-http-host="11.11.11.114" \
@@ -578,7 +582,7 @@ services:
image: consensys/linea-shomei:2.2.0
hostname: shomei
container_name: shomei
- profiles: [ "l2", "l2-bc" ]
+ profiles: [ "l2", "l2-bc", "external-to-monorepo" ]
depends_on:
zkbesu-shomei:
condition: service_started
@@ -614,7 +618,7 @@ services:
image: consensys/linea-shomei:2.2.0
hostname: shomei-frontend
container_name: shomei-frontend
- profiles: [ "l2", "l2-bc" ]
+ profiles: [ "l2", "l2-bc", "external-to-monorepo" ]
depends_on:
zkbesu-shomei:
condition: service_started
@@ -655,7 +659,7 @@ services:
shomei-plugin-downloader:
container_name: shomei-plugin-downloader
- profiles: ["l2", "l2-bc"]
+ profiles: [ "l2", "l2-bc", "external-to-monorepo" ]
image: busybox:1.36.1
command: [ "sh", "/file-downloader.sh", "https://github.com/Consensys/besu-shomei-plugin/releases/download/v0.3.1/besu-shomei-plugin-v0.3.1.jar", "/shomei-plugin" ]
volumes:
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 146354e45..130696aa9 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -19,7 +19,8 @@ netty = "4.1.92.Final"
picoli = "4.7.1"
restassured = "5.3.0"
teku = "23.1.1"
-tuweni = "2.3.1"
+tuweni = "2.4.2"
+kotlinResult = "1.1.16"
vertx = "4.5.0"
web3j = "4.12.0"
wiremock = "3.0.1"
diff --git a/jvm-libs/generic/extensions/kotlin/build.gradle b/jvm-libs/generic/extensions/kotlin/build.gradle
index 8ca54d2c3..6ebb46c25 100644
--- a/jvm-libs/generic/extensions/kotlin/build.gradle
+++ b/jvm-libs/generic/extensions/kotlin/build.gradle
@@ -5,6 +5,7 @@ plugins {
dependencies {
api "org.jetbrains.kotlinx:kotlinx-datetime:${libs.versions.kotlinxDatetime.get()}"
+ testImplementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
}
jar {
diff --git a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/URIExtensions.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/URIExtensions.kt
new file mode 100644
index 000000000..a4453ea7b
--- /dev/null
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/URIExtensions.kt
@@ -0,0 +1,16 @@
+package build.linea
+
+import java.net.URI
+
+fun URI.getPortWithSchemeDefaults(): Int {
+ return if (port != -1) {
+ port
+ } else {
+ when (scheme.lowercase()) {
+ "http" -> 80
+ "https" -> 443
+ // Focous on HTTP as it is what we need for now
+ else -> throw IllegalArgumentException("Unsupported scheme: $scheme")
+ }
+ }
+}
diff --git a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/UrlExtensions.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/UrlExtensions.kt
new file mode 100644
index 000000000..30a5e0f8e
--- /dev/null
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/UrlExtensions.kt
@@ -0,0 +1,16 @@
+package build.linea
+
+import java.net.URI
+
+fun URI.getPortWithSchemaDefaults(): Int {
+ return if (port != -1) {
+ port
+ } else {
+ when (scheme.lowercase()) {
+ "http" -> 80
+ "https" -> 443
+ // Focous on HTTP as it what we need for now
+ else -> throw IllegalArgumentException("Unsupported scheme: $scheme")
+ }
+ }
+}
diff --git a/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/jvm/ResourcesUtil.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/jvm/ResourcesUtil.kt
similarity index 95%
rename from jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/jvm/ResourcesUtil.kt
rename to jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/jvm/ResourcesUtil.kt
index 232c8d5c2..6d7a2f461 100644
--- a/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/jvm/ResourcesUtil.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/build/linea/jvm/ResourcesUtil.kt
@@ -1,4 +1,4 @@
-package net.consensys.jvm
+package build.linea.jvm
import java.io.File
import java.nio.file.Files
diff --git a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/ByteArrayExtensions.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/ByteArrayExtensions.kt
index 9e9fd6ec2..343224c7d 100644
--- a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/ByteArrayExtensions.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/ByteArrayExtensions.kt
@@ -1,5 +1,8 @@
package net.consensys
+import java.math.BigInteger
+import java.util.HexFormat
+
fun ByteArray.assertSize(expectedSize: UInt, fieldName: String = ""): ByteArray = apply {
require(size == expectedSize.toInt()) { "$fieldName expected to have $expectedSize bytes, but got $size" }
}
@@ -48,3 +51,34 @@ fun ByteArray.sliceOf(
fun ByteArray.sliceOf32(sliceNumber: Int): ByteArray {
return this.sliceOf(sliceSize = 32, sliceNumber)
}
+
+fun ByteArray.encodeHex(prefix: Boolean = true): String {
+ val hexStr = HexFormat.of().formatHex(this)
+ if (prefix) {
+ return "0x$hexStr"
+ } else {
+ return hexStr
+ }
+}
+
+fun ByteArray.toULongFromLast8Bytes(lenient: Boolean = false): ULong {
+ if (!lenient && size < 8) {
+ throw IllegalArgumentException("ByteArray size should be >= 8 to convert to ULong")
+ }
+ val significantBytes = this.sliceArray((this.size - 8).coerceAtLeast(0) until this.size)
+ return BigInteger(1, significantBytes).toULong()
+}
+
+/**
+ * This a temporary extension to ByteArray.
+ * We expect Kotlin to add Companion to ByteArray in the future, like it did for Int and Byte.
+ * This extension object ByteArrayE will be removed once that happens
+ * and it's function's migrated to ByteArray.Companion.
+ */
+object ByteArrayExt {
+ fun random(size: Int): ByteArray {
+ return kotlin.random.Random.nextBytes(size)
+ }
+
+ fun random32(): ByteArray = random(32)
+}
diff --git a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/StringExtensions.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/StringExtensions.kt
new file mode 100644
index 000000000..68f846f90
--- /dev/null
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/StringExtensions.kt
@@ -0,0 +1,12 @@
+package net.consensys
+
+import java.util.HexFormat
+
+fun String.decodeHex(): ByteArray {
+ check(length % 2 == 0) { "Must have an even length" }
+ return HexFormat.of().parseHex(removePrefix("0x"))
+}
+
+fun String.containsAny(strings: List, ignoreCase: Boolean): Boolean {
+ return strings.any { this.contains(it, ignoreCase) }
+}
diff --git a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/TypingsExtensions.kt b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/TypingsExtensions.kt
index 4a1d2505d..71f87c3b2 100644
--- a/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/TypingsExtensions.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/main/kotlin/net/consensys/TypingsExtensions.kt
@@ -50,21 +50,6 @@ fun ULong.toGWei(): Double = this.toDouble().toGWei()
*/
fun ULong.Companion.fromHexString(value: String): ULong = value.replace("0x", "").toULong(16)
-// ULong extensions
-fun ByteArray.encodeHex(prefix: Boolean = true): String =
- "${if (prefix) "0x" else ""}${joinToString(separator = "") { eachByte -> "%02x".format(eachByte) }}"
-
-fun String.decodeHex(): ByteArray {
- check(length % 2 == 0) { "Must have an even length" }
- return removePrefix("0x").chunked(2)
- .map { it.toInt(16).toByte() }
- .toByteArray()
-}
-
-fun String.containsAny(strings: List, ignoreCase: Boolean): Boolean {
- return strings.any { this.contains(it, ignoreCase) }
-}
-
fun > ClosedRange.toIntervalString(): String {
val size = if (start <= endInclusive) {
this.endInclusive.toString().toBigDecimal() - this.start.toString().toBigDecimal() + 1.toBigDecimal()
diff --git a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/URIExtensionsTest.kt b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/URIExtensionsTest.kt
new file mode 100644
index 000000000..18701f6e2
--- /dev/null
+++ b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/URIExtensionsTest.kt
@@ -0,0 +1,20 @@
+package build.linea
+
+import org.assertj.core.api.Assertions.assertThat
+import org.assertj.core.api.Assertions.assertThatThrownBy
+import org.junit.jupiter.api.Test
+import java.net.URI
+
+class URIExtensionsTest {
+ @Test
+ fun `getPortWithSchemaDefaults`() {
+ assertThat(URI.create("http://example.com").getPortWithSchemeDefaults()).isEqualTo(80)
+ assertThat(URI.create("https://example.com").getPortWithSchemeDefaults()).isEqualTo(443)
+ assertThat(URI.create("http://example.com:8080").getPortWithSchemeDefaults()).isEqualTo(8080)
+ assertThat(URI.create("https://example.com:8080").getPortWithSchemeDefaults()).isEqualTo(8080)
+ assertThat(URI.create("myschema://example.com:8080").getPortWithSchemeDefaults()).isEqualTo(8080)
+ assertThatThrownBy { (URI.create("mySchema://example.com").getPortWithSchemeDefaults()) }
+ .isInstanceOf(IllegalArgumentException::class.java)
+ .hasMessage("Unsupported scheme: mySchema")
+ }
+}
diff --git a/jvm-libs/linea/blob-compressor/src/test/kotlin/net/consensys/jvm/ResourcesUtilTest.kt b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/jvm/ResourcesUtilTest.kt
similarity index 92%
rename from jvm-libs/linea/blob-compressor/src/test/kotlin/net/consensys/jvm/ResourcesUtilTest.kt
rename to jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/jvm/ResourcesUtilTest.kt
index 8f732e861..a2afd0cc9 100644
--- a/jvm-libs/linea/blob-compressor/src/test/kotlin/net/consensys/jvm/ResourcesUtilTest.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/build/linea/jvm/ResourcesUtilTest.kt
@@ -1,6 +1,6 @@
-package net.consensys.jvm
+package build.linea.jvm
-import net.consensys.jvm.ResourcesUtil.copyResourceToTmpDir
+import build.linea.jvm.ResourcesUtil.copyResourceToTmpDir
import org.assertj.core.api.AssertionsForClassTypes.assertThat
import org.junit.jupiter.api.Test
import java.nio.file.Files
diff --git a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/ByteArrayExtensionsTest.kt b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/ByteArrayExtensionsTest.kt
index 91baea7be..7e468d7db 100644
--- a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/ByteArrayExtensionsTest.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/ByteArrayExtensionsTest.kt
@@ -9,9 +9,11 @@ class ByteArrayExtensionsTest {
@Test
fun `ByteArray#encodeHex`() {
assertThat(byteArrayOf().encodeHex()).isEqualTo("0x")
+ assertThat(byteArrayOf().encodeHex(false)).isEqualTo("")
assertThat(byteArrayOf(0).encodeHex()).isEqualTo("0x00")
assertThat(byteArrayOf(1).encodeHex()).isEqualTo("0x01")
assertThat(byteArrayOf(0x12, 0x34, 0x56).encodeHex()).isEqualTo("0x123456")
+ assertThat(byteArrayOf(0x12, 0x34, 0x56).encodeHex(false)).isEqualTo("123456")
}
@Test
@@ -109,4 +111,13 @@ class ByteArrayExtensionsTest {
.isInstanceOf(AssertionError::class.java)
.hasMessage("slice 64..95 is out of array size=80")
}
+
+ @Test
+ fun toULongFromLast8Bytes() {
+ assertThat(byteArrayOf(0x00).toULongFromLast8Bytes(lenient = true)).isEqualTo(0uL)
+ assertThat(byteArrayOf(0x01).toULongFromLast8Bytes(lenient = true)).isEqualTo(1uL)
+ val max = ByteArray(32) { 0xff.toByte() }
+ assertThat(max.toULongFromLast8Bytes()).isEqualTo(ULong.MAX_VALUE)
+ assertThat(max.apply { set(31, 0xfe.toByte()) }.toULongFromLast8Bytes()).isEqualTo(ULong.MAX_VALUE - 1UL)
+ }
}
diff --git a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/StringExtensionsTest.kt b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/StringExtensionsTest.kt
new file mode 100644
index 000000000..28549f425
--- /dev/null
+++ b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/StringExtensionsTest.kt
@@ -0,0 +1,30 @@
+package net.consensys
+
+import org.assertj.core.api.Assertions.assertThat
+import org.junit.jupiter.api.Test
+
+class StringExtensionsTest {
+ @Test
+ fun `String#decodeHex`() {
+ assertThat("0x".decodeHex()).isEmpty()
+ assertThat("".decodeHex()).isEmpty()
+ assertThat("0x".decodeHex()).isEmpty()
+ assertThat("0x00".decodeHex()).isEqualTo(byteArrayOf(0))
+ assertThat("0x01".decodeHex()).isEqualTo(byteArrayOf(1))
+ assertThat("0x123456".decodeHex()).isEqualTo(byteArrayOf(0x12, 0x34, 0x56))
+ assertThat("123456".decodeHex()).isEqualTo(byteArrayOf(0x12, 0x34, 0x56))
+ }
+
+ @Test
+ fun `String#containsAny`() {
+ val stringList = listOf(
+ "This is a TEST",
+ "lorem ipsum"
+ )
+
+ assertThat("this is a test string ignoring cases".containsAny(stringList, ignoreCase = true)).isTrue()
+ assertThat("this is a test string without matching cases".containsAny(stringList, ignoreCase = false)).isFalse()
+ assertThat("this includes lorem ipsum".containsAny(stringList, ignoreCase = true)).isTrue()
+ assertThat("this string won't match".containsAny(stringList, ignoreCase = true)).isFalse()
+ }
+}
diff --git a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/TypingsExtensionsTest.kt b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/TypingsExtensionsTest.kt
index 8cfc36ce9..ab0314374 100644
--- a/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/TypingsExtensionsTest.kt
+++ b/jvm-libs/generic/extensions/kotlin/src/test/kotlin/net/consensys/TypingsExtensionsTest.kt
@@ -58,25 +58,4 @@ class TypingsExtensionsTest {
assertThat((0..-1).toIntervalString()).isEqualTo("[0..-1]2")
assertThat((0..-10).toIntervalString()).isEqualTo("[0..-10]11")
}
-
- @Test
- fun `String#decodeHex`() {
- assertThat("0x".decodeHex()).isEmpty()
- assertThat("0x00".decodeHex()).isEqualTo(byteArrayOf(0))
- assertThat("0x01".decodeHex()).isEqualTo(byteArrayOf(1))
- assertThat("0x123456".decodeHex()).isEqualTo(byteArrayOf(0x12, 0x34, 0x56))
- }
-
- @Test
- fun `String#containsAny`() {
- val stringList = listOf(
- "This is a TEST",
- "lorem ipsum"
- )
-
- assertThat("this is a test string ignoring cases".containsAny(stringList, ignoreCase = true)).isTrue()
- assertThat("this is a test string without matching cases".containsAny(stringList, ignoreCase = false)).isFalse()
- assertThat("this includes lorem ipsum".containsAny(stringList, ignoreCase = true)).isTrue()
- assertThat("this string won't match".containsAny(stringList, ignoreCase = true)).isFalse()
- }
}
diff --git a/jvm-libs/linea/blob-compressor/src/test/resources/root-resource.txt b/jvm-libs/generic/extensions/kotlin/src/test/resources/root-resource.txt
similarity index 100%
rename from jvm-libs/linea/blob-compressor/src/test/resources/root-resource.txt
rename to jvm-libs/generic/extensions/kotlin/src/test/resources/root-resource.txt
diff --git a/jvm-libs/linea/blob-compressor/src/test/resources/test/folder/nested-resource.txt b/jvm-libs/generic/extensions/kotlin/src/test/resources/test/folder/nested-resource.txt
similarity index 100%
rename from jvm-libs/linea/blob-compressor/src/test/resources/test/folder/nested-resource.txt
rename to jvm-libs/generic/extensions/kotlin/src/test/resources/test/folder/nested-resource.txt
diff --git a/jvm-libs/linea/blob-compressor/src/test/resources/test/folder2/nested-resource.txt b/jvm-libs/generic/extensions/kotlin/src/test/resources/test/folder2/nested-resource.txt
similarity index 100%
rename from jvm-libs/linea/blob-compressor/src/test/resources/test/folder2/nested-resource.txt
rename to jvm-libs/generic/extensions/kotlin/src/test/resources/test/folder2/nested-resource.txt
diff --git a/jvm-libs/generic/extensions/tuweni/build.gradle b/jvm-libs/generic/extensions/tuweni/build.gradle
new file mode 100644
index 000000000..abbc14918
--- /dev/null
+++ b/jvm-libs/generic/extensions/tuweni/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+dependencies {
+ api "io.tmio:tuweni-bytes:${libs.versions.tuweni.get()}"
+ implementation(project(':jvm-libs:generic:extensions:kotlin'))
+ testImplementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
+}
diff --git a/jvm-libs/generic/extensions/tuweni/src/main/kotlin/build/linea/tuweni/Bytes32.kt b/jvm-libs/generic/extensions/tuweni/src/main/kotlin/build/linea/tuweni/Bytes32.kt
new file mode 100644
index 000000000..2c9336aa3
--- /dev/null
+++ b/jvm-libs/generic/extensions/tuweni/src/main/kotlin/build/linea/tuweni/Bytes32.kt
@@ -0,0 +1,9 @@
+package build.linea.tuweni
+
+import net.consensys.toULong
+import org.apache.tuweni.bytes.Bytes32
+import java.math.BigInteger
+
+fun ByteArray.toBytes32(): Bytes32 = Bytes32.wrap(this)
+fun ByteArray.sliceAsBytes32(sliceIndex: Int): Bytes32 = Bytes32.wrap(this, /*offset*/sliceIndex * 32)
+fun Bytes32.toULong(): ULong = BigInteger(this.toArray()).toULong()
diff --git a/jvm-libs/generic/extensions/tuweni/src/test/kotlin/build/linea/tuweni/Bytes32Test.kt b/jvm-libs/generic/extensions/tuweni/src/test/kotlin/build/linea/tuweni/Bytes32Test.kt
new file mode 100644
index 000000000..87d176137
--- /dev/null
+++ b/jvm-libs/generic/extensions/tuweni/src/test/kotlin/build/linea/tuweni/Bytes32Test.kt
@@ -0,0 +1,40 @@
+package build.linea.tuweni
+
+import net.consensys.toBigInteger
+import org.apache.tuweni.bytes.Bytes32
+import org.apache.tuweni.units.bigints.UInt256
+import org.assertj.core.api.Assertions.assertThat
+import org.assertj.core.api.Assertions.assertThatThrownBy
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import kotlin.random.Random
+
+class Bytes32Test {
+ @BeforeEach
+ fun setUp() {
+ // workaround: need this to load the functions otherwise JUNit gets stuck ¯\_(ăƒ„)_/¯
+ Random.Default.nextBytes(32).sliceAsBytes32(0)
+ UInt256.ZERO.toBytes().toULong()
+ }
+
+ @Test
+ fun testSliceAsBytes32() {
+ val bytes = Random.Default.nextBytes(3 * 32 - 1)
+ assertThat(bytes.sliceAsBytes32(0)).isEqualTo(Bytes32.wrap(bytes, 0))
+ assertThat(bytes.sliceAsBytes32(1)).isEqualTo(Bytes32.wrap(bytes, 32))
+ assertThatThrownBy { bytes.sliceAsBytes32(2) }
+ .isInstanceOf(IllegalArgumentException::class.java)
+ }
+
+ @Test
+ fun testToULong() {
+ UInt256.ZERO.toBytes()
+ .also { bytes -> assertThat(bytes.toULong()).isEqualTo(0uL) }
+ UInt256.valueOf(Long.MAX_VALUE)
+ .also { bytes -> assertThat(bytes.toULong()).isEqualTo(Long.MAX_VALUE.toULong()) }
+ UInt256.valueOf(Long.MAX_VALUE).add(UInt256.ONE)
+ .also { bytes -> assertThat(bytes.toULong()).isEqualTo(Long.MAX_VALUE.toULong() + 1UL) }
+ UInt256.valueOf(ULong.MAX_VALUE.toBigInteger())
+ .also { bytes -> assertThat(bytes.toULong()).isEqualTo(ULong.MAX_VALUE) }
+ }
+}
diff --git a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcRequestRetryerV2.kt b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcRequestRetryerV2.kt
index 0182324b0..1ceb0f42c 100644
--- a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcRequestRetryerV2.kt
+++ b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcRequestRetryerV2.kt
@@ -53,7 +53,6 @@ class JsonRpcRequestRetryerV2(
val lastException = AtomicReference()
val retriesCount = AtomicInteger(0)
val requestPredicate = Predicate> { result ->
- log.info("result: {}", result)
shallRetryRequestsClientBasePredicate.test(result) || shallRetryRequestPredicate.test(result)
}
diff --git a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/LoadBalancingJsonRpcClient.kt b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/LoadBalancingJsonRpcClient.kt
index 9820f8a46..10c48de85 100644
--- a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/LoadBalancingJsonRpcClient.kt
+++ b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/LoadBalancingJsonRpcClient.kt
@@ -48,12 +48,14 @@ private constructor(
}
}
}
+
private val log: Logger = LogManager.getLogger(this.javaClass)
private data class RpcClientContext(val rpcClient: JsonRpcClient, var inflightRequests: UInt)
private data class RpcRequestContext(
val request: JsonRpcRequest,
- val promise: Promise>
+ val promise: Promise>,
+ val resultMapper: (Any?) -> Any?
)
private val clientsPool: List = rpcClients.map { RpcClientContext(it, 0u) }
@@ -93,11 +95,12 @@ private constructor(
}
private fun enqueueRequest(
- request: JsonRpcRequest
+ request: JsonRpcRequest,
+ resultMapper: (Any?) -> Any?
): Future> {
val resultPromise: Promise> =
Promise.promise()
- waitingQueue.add(RpcRequestContext(request, resultPromise))
+ waitingQueue.add(RpcRequestContext(request, resultPromise, resultMapper))
return resultPromise.future()
}
@@ -105,7 +108,7 @@ private constructor(
request: JsonRpcRequest,
resultMapper: (Any?) -> Any?
): Future> {
- val result = enqueueRequest(request)
+ val result = enqueueRequest(request, resultMapper)
serveNextWaitingInTheQueue()
return result
}
@@ -114,21 +117,23 @@ private constructor(
rpcClientContext: RpcClientContext,
queuedRequest: RpcRequestContext
) {
- rpcClientContext.rpcClient.makeRequest(queuedRequest.request).onComplete { asyncResult ->
- try {
- writeLock.lock()
- rpcClientContext.inflightRequests--
- } finally {
- writeLock.unlock()
- }
- try {
- queuedRequest.promise.handle(asyncResult)
- } catch (e: Exception) {
- log.error("Response handler threw error:", e)
- } finally {
- serveNextWaitingInTheQueue()
+ rpcClientContext.rpcClient
+ .makeRequest(queuedRequest.request, queuedRequest.resultMapper)
+ .onComplete { asyncResult ->
+ try {
+ writeLock.lock()
+ rpcClientContext.inflightRequests--
+ } finally {
+ writeLock.unlock()
+ }
+ try {
+ queuedRequest.promise.handle(asyncResult)
+ } catch (e: Exception) {
+ log.error("Response handler threw error:", e)
+ } finally {
+ serveNextWaitingInTheQueue()
+ }
}
- }
}
fun close() {
diff --git a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/SequentialIdSupplier.kt b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/SequentialIdSupplier.kt
index 338d0b302..01e9bf426 100644
--- a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/SequentialIdSupplier.kt
+++ b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/SequentialIdSupplier.kt
@@ -2,9 +2,7 @@ package net.consensys.linea.jsonrpc.client
import java.util.concurrent.atomic.AtomicLong
import java.util.function.Supplier
-import javax.annotation.concurrent.ThreadSafe
-@ThreadSafe
class SequentialIdSupplier : Supplier {
private var id = AtomicLong(0)
diff --git a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientFactory.kt b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientFactory.kt
index 21687a62d..577174cc1 100644
--- a/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientFactory.kt
+++ b/jvm-libs/generic/json-rpc/src/main/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientFactory.kt
@@ -10,17 +10,37 @@ import io.vertx.core.http.HttpVersion
import org.apache.logging.log4j.Level
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
+import java.net.URI
import java.net.URL
import java.util.function.Predicate
import java.util.function.Supplier
+interface JsonRpcClientFactory {
+ /**
+ * Creates a JSON-RPC V2 Spec client.
+ * If multiple endpoints are provided, a load balancing client will be created with round-robin strategy.
+ */
+ fun createJsonRpcV2Client(
+ endpoints: List,
+ maxInflightRequestsPerClient: UInt? = null,
+ retryConfig: RequestRetryConfig,
+ httpVersion: HttpVersion? = null,
+ requestObjectMapper: ObjectMapper = objectMapper,
+ responseObjectMapper: ObjectMapper = objectMapper,
+ shallRetryRequestsClientBasePredicate: Predicate> = Predicate { it is Err },
+ log: Logger = LogManager.getLogger(VertxHttpJsonRpcClient::class.java),
+ requestResponseLogLevel: Level = Level.TRACE,
+ failuresLogLevel: Level = Level.DEBUG
+ ): JsonRpcV2Client
+}
+
class VertxHttpJsonRpcClientFactory(
private val vertx: Vertx,
private val meterRegistry: MeterRegistry,
private val requestResponseLogLevel: Level = Level.TRACE,
private val failuresLogLevel: Level = Level.DEBUG,
private val requestIdSupplier: Supplier = SequentialIdSupplier.singleton
-) {
+) : JsonRpcClientFactory {
fun create(
endpoint: URL,
maxPoolSize: Int? = null,
@@ -149,24 +169,27 @@ class VertxHttpJsonRpcClientFactory(
)
}
- fun createV2(
- vertx: Vertx,
- endpoints: Set,
- maxInflightRequestsPerClient: UInt? = null,
+ override fun createJsonRpcV2Client(
+ endpoints: List,
+ maxInflightRequestsPerClient: UInt?,
retryConfig: RequestRetryConfig,
- httpVersion: HttpVersion? = null,
- requestObjectMapper: ObjectMapper = objectMapper,
- responseObjectMapper: ObjectMapper = objectMapper,
- shallRetryRequestsClientBasePredicate: Predicate> = Predicate { it is Err },
- log: Logger = LogManager.getLogger(VertxHttpJsonRpcClient::class.java),
- requestResponseLogLevel: Level = this.requestResponseLogLevel,
- failuresLogLevel: Level = this.failuresLogLevel
+ httpVersion: HttpVersion?,
+ requestObjectMapper: ObjectMapper,
+ responseObjectMapper: ObjectMapper,
+ shallRetryRequestsClientBasePredicate: Predicate>,
+ log: Logger,
+ requestResponseLogLevel: Level,
+ failuresLogLevel: Level
): JsonRpcV2Client {
assert(endpoints.isNotEmpty()) { "endpoints set is empty " }
+ assert(endpoints.size == endpoints.toSet().size) {
+ "endpoints set contains duplicates: $endpoints"
+ }
+
// create base client
return if (maxInflightRequestsPerClient != null || endpoints.size > 1) {
createWithLoadBalancing(
- endpoints = endpoints,
+ endpoints = endpoints.map { it.toURL() }.toSet(),
maxInflightRequestsPerClient = maxInflightRequestsPerClient!!,
httpVersion = httpVersion,
requestObjectMapper = requestObjectMapper,
@@ -177,7 +200,7 @@ class VertxHttpJsonRpcClientFactory(
)
} else {
create(
- endpoint = endpoints.first(),
+ endpoint = endpoints.first().toURL(),
httpVersion = httpVersion,
requestObjectMapper = requestObjectMapper,
responseObjectMapper = responseObjectMapper,
diff --git a/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcV2ClientImplTest.kt b/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcV2ClientImplTest.kt
index c6a008993..cc9afa145 100644
--- a/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcV2ClientImplTest.kt
+++ b/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/JsonRpcV2ClientImplTest.kt
@@ -37,7 +37,6 @@ import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.math.BigInteger
import java.net.ConnectException
import java.net.URI
-import java.net.URL
import java.util.concurrent.ExecutionException
import java.util.function.Predicate
import kotlin.time.Duration
@@ -54,7 +53,6 @@ class JsonRpcV2ClientImplTest {
private lateinit var wiremock: WireMockServer
private val path = "/api/v1?appKey=1234"
private lateinit var meterRegistry: SimpleMeterRegistry
- private lateinit var endpoint: URL
private val defaultRetryConfig = retryConfig(maxRetries = 2u, timeout = 8.seconds, backoffDelay = 5.milliseconds)
private val defaultObjectMapper = jacksonObjectMapper()
@@ -78,7 +76,6 @@ class JsonRpcV2ClientImplTest {
)
private fun createClientAndSetupWireMockServer(
- vertx: Vertx,
responseObjectMapper: ObjectMapper = defaultObjectMapper,
requestObjectMapper: ObjectMapper = defaultObjectMapper,
retryConfig: RequestRetryConfig = defaultRetryConfig,
@@ -86,11 +83,9 @@ class JsonRpcV2ClientImplTest {
): JsonRpcV2Client {
wiremock = WireMockServer(WireMockConfiguration.options().dynamicPort())
wiremock.start()
- endpoint = URI(wiremock.baseUrl() + path).toURL()
- return factory.createV2(
- vertx = vertx,
- endpoints = setOf(endpoint),
+ return factory.createJsonRpcV2Client(
+ endpoints = listOf(URI(wiremock.baseUrl() + path)),
retryConfig = retryConfig,
requestObjectMapper = requestObjectMapper,
responseObjectMapper = responseObjectMapper,
@@ -103,7 +98,7 @@ class JsonRpcV2ClientImplTest {
this.vertx = vertx
this.meterRegistry = SimpleMeterRegistry()
this.factory = VertxHttpJsonRpcClientFactory(vertx, meterRegistry)
- this.client = createClientAndSetupWireMockServer(vertx)
+ this.client = createClientAndSetupWireMockServer()
}
@AfterEach
@@ -192,7 +187,7 @@ class JsonRpcV2ClientImplTest {
fun `request params shall use defined objectMapper and not affect json-rpc envelope`() {
val obj = User(name = "John", email = "email@example.com", address = "0x01ffbb".decodeHex(), value = 987UL)
- createClientAndSetupWireMockServer(vertx, requestObjectMapper = defaultObjectMapper).also { client ->
+ createClientAndSetupWireMockServer(requestObjectMapper = defaultObjectMapper).also { client ->
replyRequestWith(200, jsonRpcResultOk)
client.makeRequest(
method = "someMethod",
@@ -223,7 +218,7 @@ class JsonRpcV2ClientImplTest {
}
)
- createClientAndSetupWireMockServer(vertx, requestObjectMapper = objMapperWithNumbersAsHex).also { client ->
+ createClientAndSetupWireMockServer(requestObjectMapper = objMapperWithNumbersAsHex).also { client ->
replyRequestWith(200, jsonRpcResultOk)
client.makeRequest(
method = "someMethod",
@@ -456,7 +451,6 @@ class JsonRpcV2ClientImplTest {
@Test
fun `when it gets an error propagates to shallRetryRequestPredicate and retries while is true`() {
createClientAndSetupWireMockServer(
- vertx,
retryConfig = retryConfig(maxRetries = 10u)
).also { client ->
val responses = listOf(
@@ -507,7 +501,6 @@ class JsonRpcV2ClientImplTest {
@Test
fun `when it has connection error propagates to shallRetryRequestPredicate and retries while is true`() {
createClientAndSetupWireMockServer(
- vertx,
retryConfig = retryConfig(maxRetries = 10u)
).also { client ->
// stop the server to simulate connection error
@@ -543,7 +536,6 @@ class JsonRpcV2ClientImplTest {
@Test
fun `when it has connection error propagates to shallRetryRequestPredicate and retries until retry config elapses`() {
createClientAndSetupWireMockServer(
- vertx,
retryConfig = retryConfig(maxRetries = 2u, timeout = 8.seconds, backoffDelay = 5.milliseconds)
).also { client ->
// stop the server to simulate connection error
@@ -580,7 +572,6 @@ class JsonRpcV2ClientImplTest {
(it.value as String).startsWith("retry_a")
}
createClientAndSetupWireMockServer(
- vertx,
retryConfig = RequestRetryConfig(
maxRetries = 10u,
timeout = 5.minutes,
diff --git a/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientTest.kt b/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientTest.kt
index b7506bfbd..645ba823b 100644
--- a/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientTest.kt
+++ b/jvm-libs/generic/json-rpc/src/test/kotlin/net/consensys/linea/jsonrpc/client/VertxHttpJsonRpcClientTest.kt
@@ -18,6 +18,7 @@ import io.vertx.core.http.HttpClientOptions
import io.vertx.core.http.HttpVersion
import io.vertx.core.json.JsonArray
import io.vertx.core.json.JsonObject
+import net.consensys.decodeHex
import net.consensys.linea.async.get
import net.consensys.linea.async.toSafeFuture
import net.consensys.linea.jsonrpc.JsonRpcError
@@ -27,7 +28,6 @@ import net.consensys.linea.jsonrpc.JsonRpcSuccessResponse
import org.apache.logging.log4j.Level
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
-import org.apache.tuweni.bytes.Bytes
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.AfterEach
import org.junit.jupiter.api.BeforeEach
@@ -81,7 +81,7 @@ class VertxHttpJsonRpcClientTest {
JsonObject()
.put("name", "Alice")
.put("email", "alice@wonderland.io")
- .put("address", Bytes.fromHexString("0xaabbccdd").toArray())
+ .put("address", "0xaabbccdd".decodeHex())
)
client.makeRequest(JsonRpcRequestListParams("2.0", 1, "addUser", params)).get()
diff --git a/jvm-libs/generic/vertx-helper/build.gradle b/jvm-libs/generic/vertx-helper/build.gradle
index d2cf40bb7..7e77bfca7 100644
--- a/jvm-libs/generic/vertx-helper/build.gradle
+++ b/jvm-libs/generic/vertx-helper/build.gradle
@@ -3,6 +3,7 @@ plugins {
}
dependencies {
+ implementation project(':jvm-libs:generic:extensions:kotlin')
implementation project(':jvm-libs:generic:extensions:futures')
implementation "io.vertx:vertx-core"
implementation "io.vertx:vertx-web"
diff --git a/jvm-libs/generic/vertx-helper/src/main/kotlin/net/consensys/linea/vertx/ClientOptions.kt b/jvm-libs/generic/vertx-helper/src/main/kotlin/net/consensys/linea/vertx/ClientOptions.kt
new file mode 100644
index 000000000..3f46023c9
--- /dev/null
+++ b/jvm-libs/generic/vertx-helper/src/main/kotlin/net/consensys/linea/vertx/ClientOptions.kt
@@ -0,0 +1,13 @@
+package net.consensys.linea.vertx
+
+import build.linea.getPortWithSchemeDefaults
+import io.vertx.core.http.HttpClientOptions
+import java.net.URI
+
+fun T.setDefaultsFrom(uri: URI): T {
+ isSsl = uri.scheme.lowercase() == "https"
+ defaultHost = uri.host
+ defaultPort = uri.getPortWithSchemeDefaults()
+
+ return this
+}
diff --git a/jvm-libs/linea/blob-compressor/build.gradle b/jvm-libs/linea/blob-compressor/build.gradle
index c2281e817..0c9cb129f 100644
--- a/jvm-libs/linea/blob-compressor/build.gradle
+++ b/jvm-libs/linea/blob-compressor/build.gradle
@@ -10,7 +10,7 @@ apply from: rootProject.file("gradle/publishing.gradle")
dependencies {
implementation "net.java.dev.jna:jna:${libs.versions.jna.get()}"
- testImplementation project(":jvm-libs:generic:extensions:kotlin")
+ implementation project(":jvm-libs:generic:extensions:kotlin")
testImplementation project(":jvm-libs:linea:blob-shnarf-calculator")
}
diff --git a/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobCompressor.kt b/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobCompressor.kt
index bb21ba126..99a37939a 100644
--- a/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobCompressor.kt
+++ b/jvm-libs/linea/blob-compressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobCompressor.kt
@@ -1,8 +1,8 @@
package net.consensys.linea.blob
+import build.linea.jvm.ResourcesUtil.copyResourceToTmpDir
import com.sun.jna.Library
import com.sun.jna.Native
-import net.consensys.jvm.ResourcesUtil.copyResourceToTmpDir
interface GoNativeBlobCompressor {
diff --git a/jvm-libs/linea/blob-compressor/src/test/resources/net/consensys/linea/nativecompressor/rlp_blocks.bin b/jvm-libs/linea/blob-compressor/src/test/resources/net/consensys/linea/nativecompressor/rlp_blocks.bin
deleted file mode 100644
index 82c04c3ff..000000000
Binary files a/jvm-libs/linea/blob-compressor/src/test/resources/net/consensys/linea/nativecompressor/rlp_blocks.bin and /dev/null differ
diff --git a/jvm-libs/linea/blob-decompressor/build.gradle b/jvm-libs/linea/blob-decompressor/build.gradle
new file mode 100644
index 000000000..2a085ddb9
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/build.gradle
@@ -0,0 +1,58 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+ id 'net.consensys.zkevm.linea-native-libs-helper'
+ alias(libs.plugins.jreleaser)
+ id 'java-test-fixtures'
+}
+
+description = 'Java JNA wrapper for Linea Blob Decompressor Library implemented in GO Lang'
+apply from: rootProject.file("gradle/publishing.gradle")
+
+dependencies {
+ implementation "net.java.dev.jna:jna:${libs.versions.jna.get()}"
+ implementation project(":jvm-libs:generic:extensions:kotlin")
+
+ testImplementation project(":jvm-libs:linea:blob-compressor")
+ testImplementation(testFixtures(project(":jvm-libs:linea:blob-compressor")))
+ testImplementation(project(":jvm-libs:linea:testing:file-system"))
+ testImplementation("io.tmio:tuweni-bytes:${libs.versions.tuweni.get()}")
+ testImplementation("org.hyperledger.besu:besu-datatypes:${libs.versions.besu.get()}")
+ testImplementation "org.hyperledger.besu:evm:${libs.versions.besu.get()}"
+ testImplementation("org.hyperledger.besu.internal:core:${libs.versions.besu.get()}")
+ testImplementation("org.hyperledger.besu:plugin-api:${libs.versions.besu.get()}")
+ testImplementation("org.hyperledger.besu.internal:rlp:${libs.versions.besu.get()}")
+}
+
+jar {
+ dependsOn configurations.runtimeClasspath
+}
+
+test {
+ // we cannot have more 1 compressor per JVM, hence we disable parallel execution
+ // because multiple threads would cause issues with the native library
+ systemProperties["junit.jupiter.execution.parallel.enabled"] = false
+ maxParallelForks = 1
+}
+
+def libsZipDownloadOutputDir = project.parent.layout.buildDirectory.asFile.get().absolutePath
+
+task downloadNativeLibs {
+ doLast {
+ fetchLibFromZip("https://github.com/Consensys/linea-monorepo/releases/download/blob-libs-v1.1.0-test8/linea-blob-libs-v1.1.0-test8.zip", "blob_decompressor", libsZipDownloadOutputDir)
+ }
+}
+
+compileKotlin {
+ dependsOn tasks.downloadNativeLibs
+}
+
+task cleanResources(type: Delete) {
+ fileTree(project.layout.projectDirectory.dir('src/main/resources'))
+ .filter {
+ it.name.endsWith(".so") || it.name.endsWith(".dll") || it.name.endsWith(".dylib")
+ }.each {
+ delete it
+ }
+}
+
+clean.dependsOn cleanResources
diff --git a/jvm-libs/linea/blob-decompressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressor.kt b/jvm-libs/linea/blob-decompressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressor.kt
new file mode 100644
index 000000000..fa78dca2c
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/src/main/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressor.kt
@@ -0,0 +1,105 @@
+package net.consensys.linea.blob
+
+import build.linea.jvm.ResourcesUtil.copyResourceToTmpDir
+import com.sun.jna.Library
+import com.sun.jna.Native
+import java.nio.file.Path
+
+class DecompressionException(message: String) : RuntimeException(message)
+
+interface BlobDecompressor {
+ fun decompress(blob: ByteArray): ByteArray
+}
+
+internal class Adapter(
+ private val delegate: GoNativeBlobDecompressorJnaBinding,
+ private val maxExpectedCompressionRatio: Int = 10,
+ dictionaries: List
+) : BlobDecompressor {
+ init {
+ delegate.Init()
+
+ val paths = dictionaries.joinToString(separator = ":") { path -> path.toString() }
+
+ if (delegate.LoadDictionaries(paths) != dictionaries.size) {
+ throw DecompressionException("Failed to load dictionaries '$paths', error='${delegate.Error()}'")
+ }
+ }
+
+ override fun decompress(blob: ByteArray): ByteArray {
+ val decompressionBuffer = ByteArray(blob.size * maxExpectedCompressionRatio)
+ val decompressedSize = delegate.Decompress(blob, blob.size, decompressionBuffer, decompressionBuffer.size)
+ if (decompressedSize < 0) {
+ throw DecompressionException("Decompression failed, error='${delegate.Error()}'")
+ }
+ return decompressionBuffer.copyOf(decompressedSize)
+ }
+}
+
+internal interface GoNativeBlobDecompressorJnaBinding {
+
+ /**
+ * Init initializes the Decompressor. Must be run before anything else.
+ */
+ fun Init()
+
+ /**
+ * LoadDictionaries attempts to cache dictionaries from given paths, separated by colons,
+ * e.g. "../compressor_dict.bin:./other_dict"
+ * Returns the number of dictionaries successfully loaded, and -1 in case of failure, in which case Error() will
+ * return a description of the error.
+ *
+ * @param dictPaths a colon-separated list of paths to dictionaries, to be loaded into the decompressor
+ * @return the number of dictionaries loaded if successful, -1 if not.
+ */
+ fun LoadDictionaries(dictPaths: String): Int
+
+ /**
+
+ * Decompress processes a Linea blob and outputs an RLP encoded list of blocks.
+ * Due to information loss during pre-compression encoding, two pieces of information are represented "hackily":
+ * The block hash is in the ParentHash field.
+ * The transaction from address is in the signature.R field.
+ *
+ * Returns the number of bytes in out, or -1 in case of failure
+ * If -1 is returned, the Error() method will return a string describing the error.
+ *
+ * @param blob to be decompressed
+ * @param blob_len length of the blob
+ * @param out buffer to write the decompressed data
+ * @param out_max_len maximum length of the out buffer
+ * @return number of bytes in out, or -1 in case of failure
+ */
+ fun Decompress(blob: ByteArray, blob_len: Int, out: ByteArray, out_max_len: Int): Int
+
+ /**
+ * Error returns the last error message. Should be checked if Write returns false.
+ */
+ fun Error(): String?
+}
+
+internal interface GoNativeBlobDecompressorJnaLib : GoNativeBlobDecompressorJnaBinding, Library
+
+enum class BlobDecompressorVersion(val version: String) {
+ V1_1_0("v1.1.0")
+}
+
+class GoNativeBlobDecompressorFactory {
+ companion object {
+ private const val DICTIONARY_NAME = "compressor_dict.bin"
+ private val dictionaryPath = copyResourceToTmpDir(DICTIONARY_NAME)
+
+ private fun getLibFileName(version: String) = "blob_decompressor_jna_$version"
+
+ fun getInstance(
+ version: BlobDecompressorVersion
+ ): BlobDecompressor {
+ return Native.load(
+ Native.extractFromResourcePath(getLibFileName(version.version)).toString(),
+ GoNativeBlobDecompressorJnaLib::class.java
+ ).let {
+ Adapter(delegate = it, dictionaries = listOf(dictionaryPath))
+ }
+ }
+ }
+}
diff --git a/jvm-libs/linea/blob-decompressor/src/main/resources/.gitignore b/jvm-libs/linea/blob-decompressor/src/main/resources/.gitignore
new file mode 100644
index 000000000..272f436b7
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/src/main/resources/.gitignore
@@ -0,0 +1,4 @@
+linux-aarch64/*
+linux-x86-64/*
+darwin-aarch64/*
+darwin-x86-64/*
diff --git a/jvm-libs/linea/blob-decompressor/src/main/resources/compressor_dict.bin b/jvm-libs/linea/blob-decompressor/src/main/resources/compressor_dict.bin
new file mode 100644
index 000000000..62296498c
Binary files /dev/null and b/jvm-libs/linea/blob-decompressor/src/main/resources/compressor_dict.bin differ
diff --git a/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/BlobDecompressorDataDecodingTest.kt b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/BlobDecompressorDataDecodingTest.kt
new file mode 100644
index 000000000..38c514ca9
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/BlobDecompressorDataDecodingTest.kt
@@ -0,0 +1,50 @@
+package net.consensys.linea.blob
+
+import net.consensys.linea.testing.filesystem.findPathTo
+import org.apache.tuweni.bytes.Bytes
+import org.assertj.core.api.Assertions.assertThat
+import org.hyperledger.besu.ethereum.core.Block
+import org.hyperledger.besu.ethereum.mainnet.MainnetBlockHeaderFunctions
+import org.hyperledger.besu.ethereum.rlp.RLP
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Disabled
+import org.junit.jupiter.api.Test
+import kotlin.io.path.readBytes
+
+class BlobDecompressorDataDecodingTest {
+ private lateinit var decompressor: BlobDecompressor
+
+ @BeforeEach
+ fun beforeEach() {
+ decompressor = GoNativeBlobDecompressorFactory.getInstance(BlobDecompressorVersion.V1_1_0)
+ }
+
+ @Test
+ fun `can deserialize native lib testdata blobs`() {
+ val blob = findPathTo("prover")!!
+ .resolve("lib/compressor/blob/testdata/v0/sample-blob-0151eda71505187b5.bin")
+ .readBytes()
+ val decompressedBlob = decompressor.decompress(blob)
+ val blocksRlpEncoded = rlpDecodeAsListOfBytes(decompressedBlob)
+ assertThat(blocksRlpEncoded).hasSize(254)
+ // TODO: enable after Besu supports deserializing transactions without signatures validation
+ //
+ // blocksRlpEncoded.forEachIndexed { index, blockRlp ->
+ // val rlpInput = RLP.input(Bytes.wrap(blockRlp))
+ // val decodedBlock = Block.readFrom(rlpInput, MainnetBlockHeaderFunctions())
+ // println("$index: $decodedBlock")
+ // }
+ }
+
+ @Disabled("for local dev validation")
+ fun `can decode RLP`() {
+ val blockBytes = Bytes.wrap(
+ // INSERT HERE THE RLP ENCODED BLOCK
+ // 0x01ff.decodeHex()
+ )
+ RLP.validate(blockBytes)
+ val rlpInput = RLP.input(blockBytes)
+ val decodedBlock = Block.readFrom(rlpInput, MainnetBlockHeaderFunctions())
+ println(decodedBlock)
+ }
+}
diff --git a/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressorTest.kt b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressorTest.kt
new file mode 100644
index 000000000..0d1454d8e
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/GoNativeBlobDecompressorTest.kt
@@ -0,0 +1,42 @@
+package net.consensys.linea.blob
+
+import net.consensys.linea.nativecompressor.CompressorTestData
+import org.assertj.core.api.Assertions.assertThat
+import org.junit.jupiter.api.Assertions.assertTrue
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+
+class GoNativeBlobDecompressorTest {
+ private val blobCompressedLimit = 10 * 1024
+ private lateinit var compressor: GoNativeBlobCompressor
+ private lateinit var decompressor: BlobDecompressor
+
+ @BeforeEach
+ fun beforeEach() {
+ compressor = GoNativeBlobCompressorFactory
+ .getInstance(BlobCompressorVersion.V1_0_1)
+ .apply {
+ Init(
+ dataLimit = blobCompressedLimit,
+ dictPath = GoNativeBlobCompressorFactory.dictionaryPath.toAbsolutePath().toString()
+ )
+ Reset()
+ }
+ decompressor = GoNativeBlobDecompressorFactory.getInstance(BlobDecompressorVersion.V1_1_0)
+ }
+
+ @Test
+ fun `when blocks are compressed with compressor shall decompress them back`() {
+ val blocks = CompressorTestData.blocksRlpEncoded
+ assertTrue(compressor.Write(blocks[0], blocks[0].size))
+ assertTrue(compressor.Write(blocks[1], blocks[1].size))
+
+ val compressedData = ByteArray(compressor.Len())
+ compressor.Bytes(compressedData)
+
+ val decompressedBlob = decompressor.decompress(compressedData)
+ assertThat(decompressedBlob.size).isGreaterThan(compressedData.size)
+ val decompressedBlocks: List = rlpDecodeAsListOfBytes(decompressedBlob)
+ assertThat(decompressedBlocks).hasSize(2)
+ }
+}
diff --git a/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/RLPHelper.kt b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/RLPHelper.kt
new file mode 100644
index 000000000..ba8554bf5
--- /dev/null
+++ b/jvm-libs/linea/blob-decompressor/src/test/kotlin/net/consensys/linea/blob/RLPHelper.kt
@@ -0,0 +1,26 @@
+package net.consensys.linea.blob
+
+import org.apache.tuweni.bytes.Bytes
+import org.hyperledger.besu.ethereum.rlp.RLP
+
+internal fun rlpEncode(list: List): ByteArray {
+ return RLP.encode { rlpWriter ->
+ rlpWriter.startList()
+ list.forEach { bytes ->
+ rlpWriter.writeBytes(Bytes.wrap(bytes))
+ }
+ rlpWriter.endList()
+ }.toArray()
+}
+
+internal fun rlpDecodeAsListOfBytes(rlpEncoded: ByteArray): List {
+ val decodedBytes = mutableListOf()
+ RLP.input(Bytes.wrap(rlpEncoded), true).also { rlpInput ->
+ rlpInput.enterList()
+ while (!rlpInput.isEndOfCurrentList) {
+ decodedBytes.add(rlpInput.readBytes().toArray())
+ }
+ rlpInput.leaveList()
+ }
+ return decodedBytes
+}
diff --git a/jvm-libs/linea/clients/linea-state-manager/build.gradle b/jvm-libs/linea/clients/linea-state-manager/build.gradle
new file mode 100644
index 000000000..55a647453
--- /dev/null
+++ b/jvm-libs/linea/clients/linea-state-manager/build.gradle
@@ -0,0 +1,22 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+dependencies {
+ api project(':jvm-libs:linea:core:domain-models')
+ api project(':jvm-libs:linea:core:metrics')
+ api project(':jvm-libs:linea:core:client-interface')
+ api project(':jvm-libs:generic:json-rpc')
+ api project(':jvm-libs:generic:errors')
+ api project(':jvm-libs:generic:extensions:futures')
+ api project(':jvm-libs:generic:extensions:kotlin')
+ api "io.tmio:tuweni-bytes:${libs.versions.tuweni.get()}"
+
+ implementation "com.fasterxml.jackson.core:jackson-annotations:${libs.versions.jackson.get()}"
+ implementation "com.fasterxml.jackson.core:jackson-databind:${libs.versions.jackson.get()}"
+ implementation "com.fasterxml.jackson.module:jackson-module-kotlin:${libs.versions.jackson.get()}"
+
+ testImplementation(project(":jvm-libs:linea:testing:file-system"))
+ testImplementation "io.vertx:vertx-junit5"
+ testImplementation "com.github.tomakehurst:wiremock-jre8:${libs.versions.wiremock.get()}"
+}
diff --git a/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerClientV1.kt b/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerClientV1.kt
new file mode 100644
index 000000000..f0ca4a0e8
--- /dev/null
+++ b/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerClientV1.kt
@@ -0,0 +1,94 @@
+package build.linea.clients
+
+import build.linea.domain.BlockInterval
+import com.fasterxml.jackson.databind.node.ArrayNode
+import com.github.michaelbull.result.Result
+import net.consensys.encodeHex
+import net.consensys.linea.errors.ErrorResponse
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+
+enum class StateManagerErrorType : ClientError {
+ UNKNOWN,
+ UNSUPPORTED_VERSION,
+ BLOCK_MISSING_IN_CHAIN
+}
+
+sealed interface StateManagerRequest : ClientRequest
+sealed class GetChainHeadRequest() : StateManagerRequest
+
+data class GetStateMerkleProofRequest(val blockInterval: BlockInterval) :
+ StateManagerRequest,
+ BlockInterval by blockInterval
+
+sealed interface StateManagerResponse
+
+data class GetZkEVMStateMerkleProofResponse(
+ val zkStateMerkleProof: ArrayNode,
+ val zkParentStateRootHash: ByteArray,
+ val zkEndStateRootHash: ByteArray,
+ val zkStateManagerVersion: String
+) : StateManagerResponse {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as GetZkEVMStateMerkleProofResponse
+
+ if (zkStateMerkleProof != other.zkStateMerkleProof) return false
+ if (!zkParentStateRootHash.contentEquals(other.zkParentStateRootHash)) return false
+ if (!zkEndStateRootHash.contentEquals(other.zkEndStateRootHash)) return false
+ if (zkStateManagerVersion != other.zkStateManagerVersion) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = zkStateMerkleProof.hashCode()
+ result = 31 * result + zkParentStateRootHash.contentHashCode()
+ result = 31 * result + zkEndStateRootHash.contentHashCode()
+ result = 31 * result + zkStateManagerVersion.hashCode()
+ return result
+ }
+
+ override fun toString(): String {
+ return "GetZkEVMStateMerkleProofResponse(" +
+ "zkStateMerkleProof=$zkStateMerkleProof, zkParentStateRootHash=${zkParentStateRootHash.encodeHex()}, " +
+ "zkEndStateRootHash=${zkEndStateRootHash.encodeHex()}, " +
+ "zkStateManagerVersion='$zkStateManagerVersion')"
+ }
+}
+
+data class GetChainHeadResponse(val headBlockNumber: ULong) : StateManagerResponse
+
+interface StateManagerClientV1 : AsyncClient> {
+ /**
+ * Get the head block number of the chain.
+ * @return GetZkEVMStateMerkleProofResponse
+ * @throws ClientException with errorType StateManagerErrorType when know error occurs
+ */
+ fun rollupGetStateMerkleProof(
+ blockInterval: BlockInterval
+ ): SafeFuture = rollupGetStateMerkleProofWithTypedError(blockInterval)
+ .unwrapResultMonad()
+
+ /**
+ * This is for backward compatibility with the old version in the coordinator side.
+ * This error typing is not really usefull anymore
+ */
+ fun rollupGetStateMerkleProofWithTypedError(
+ blockInterval: BlockInterval
+ ): SafeFuture>>
+
+ fun rollupGetHeadBlockNumber(): SafeFuture
+
+ override fun makeRequest(request: ClientRequest): SafeFuture {
+ @Suppress("UNCHECKED_CAST")
+ return when (request) {
+ is GetStateMerkleProofRequest -> rollupGetStateMerkleProof(request.blockInterval) as SafeFuture
+ is GetChainHeadRequest -> rollupGetHeadBlockNumber()
+ .thenApply { GetChainHeadResponse(it) } as SafeFuture
+
+ else -> throw IllegalArgumentException("Unknown request type: $request")
+ }
+ }
+}
diff --git a/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerV1JsonRpcClient.kt b/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerV1JsonRpcClient.kt
new file mode 100644
index 000000000..a00ef9781
--- /dev/null
+++ b/jvm-libs/linea/clients/linea-state-manager/src/main/kotlin/build/linea/clients/StateManagerV1JsonRpcClient.kt
@@ -0,0 +1,133 @@
+package build.linea.clients
+
+import build.linea.domain.BlockInterval
+import com.fasterxml.jackson.databind.JsonNode
+import com.fasterxml.jackson.databind.node.ArrayNode
+import com.github.michaelbull.result.Err
+import com.github.michaelbull.result.Ok
+import com.github.michaelbull.result.Result
+import io.vertx.core.json.JsonObject
+import net.consensys.decodeHex
+import net.consensys.fromHexString
+import net.consensys.linea.errors.ErrorResponse
+import net.consensys.linea.jsonrpc.JsonRpcErrorResponseException
+import net.consensys.linea.jsonrpc.client.JsonRpcClientFactory
+import net.consensys.linea.jsonrpc.client.JsonRpcV2Client
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import org.apache.logging.log4j.LogManager
+import org.apache.logging.log4j.Logger
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+import java.net.URI
+
+class StateManagerV1JsonRpcClient(
+ private val rpcClient: JsonRpcV2Client,
+ private val zkStateManagerVersion: String,
+ private val log: Logger = LogManager.getLogger(StateManagerV1JsonRpcClient::class.java)
+) : StateManagerClientV1 {
+
+ companion object {
+ fun create(
+ rpcClientFactory: JsonRpcClientFactory,
+ endpoints: List,
+ maxInflightRequestsPerClient: UInt,
+ requestRetry: RequestRetryConfig,
+ zkStateManagerVersion: String,
+ logger: Logger = LogManager.getLogger(StateManagerV1JsonRpcClient::class.java)
+ ): StateManagerV1JsonRpcClient {
+ return StateManagerV1JsonRpcClient(
+ rpcClient = rpcClientFactory.createJsonRpcV2Client(
+ endpoints = endpoints,
+ maxInflightRequestsPerClient = maxInflightRequestsPerClient,
+ retryConfig = requestRetry,
+ log = logger,
+ shallRetryRequestsClientBasePredicate = { it is Err }
+ ),
+ zkStateManagerVersion = zkStateManagerVersion
+ )
+ }
+ }
+
+ override fun rollupGetHeadBlockNumber(): SafeFuture {
+ return rpcClient
+ .makeRequest(
+ method = "rollup_getZkEVMBlockNumber",
+ params = emptyList(),
+ resultMapper = { ULong.fromHexString(it as String) }
+ )
+ }
+
+ override fun rollupGetStateMerkleProof(blockInterval: BlockInterval): SafeFuture {
+ val params = listOf(
+ JsonObject.of(
+ "startBlockNumber",
+ blockInterval.startBlockNumber.toLong(),
+ "endBlockNumber",
+ blockInterval.endBlockNumber.toLong(),
+ "zkStateManagerVersion",
+ zkStateManagerVersion
+ )
+ )
+
+ return rpcClient
+ .makeRequest(
+ method = "rollup_getZkEVMStateMerkleProofV0",
+ params = params,
+ resultMapper = ::parseZkEVMStateMerkleProofResponse
+ )
+ }
+
+ override fun rollupGetStateMerkleProofWithTypedError(
+ blockInterval: BlockInterval
+ ): SafeFuture>> {
+ return rollupGetStateMerkleProof(blockInterval)
+ .handleComposed { result, th ->
+ if (th != null) {
+ if (th is JsonRpcErrorResponseException) {
+ SafeFuture.completedFuture(Err(mapErrorResponse(th)))
+ } else {
+ SafeFuture.failedFuture(th)
+ }
+ } else {
+ SafeFuture.completedFuture(Ok(result))
+ }
+ }
+ }
+
+ private fun mapErrorResponse(
+ jsonRpcErrorResponse: JsonRpcErrorResponseException
+ ): ErrorResponse {
+ val errorType =
+ try {
+ StateManagerErrorType.valueOf(
+ jsonRpcErrorResponse.rpcErrorMessage.substringBefore('-').trim()
+ )
+ } catch (_: Exception) {
+ log.error(
+ "State manager found unrecognised JSON-RPC response error: {}",
+ jsonRpcErrorResponse.rpcErrorMessage
+ )
+ StateManagerErrorType.UNKNOWN
+ }
+
+ return ErrorResponse(
+ errorType,
+ listOfNotNull(
+ jsonRpcErrorResponse.rpcErrorMessage,
+ jsonRpcErrorResponse.rpcErrorData?.toString()
+ )
+ .joinToString(": ")
+ )
+ }
+
+ private fun parseZkEVMStateMerkleProofResponse(
+ result: Any?
+ ): GetZkEVMStateMerkleProofResponse {
+ result as JsonNode
+ return GetZkEVMStateMerkleProofResponse(
+ zkStateManagerVersion = result.get("zkStateManagerVersion").asText(),
+ zkStateMerkleProof = result.get("zkStateMerkleProof") as ArrayNode,
+ zkParentStateRootHash = result.get("zkParentStateRootHash").asText().decodeHex(),
+ zkEndStateRootHash = result.get("zkEndStateRootHash").asText().decodeHex()
+ )
+ }
+}
diff --git a/jvm-libs/linea/clients/linea-state-manager/src/test/kotlin/build/linea/clients/StateManagerV1JsonRpcClientTest.kt b/jvm-libs/linea/clients/linea-state-manager/src/test/kotlin/build/linea/clients/StateManagerV1JsonRpcClientTest.kt
new file mode 100644
index 000000000..e47fff9ab
--- /dev/null
+++ b/jvm-libs/linea/clients/linea-state-manager/src/test/kotlin/build/linea/clients/StateManagerV1JsonRpcClientTest.kt
@@ -0,0 +1,211 @@
+package build.linea.clients
+
+import build.linea.domain.BlockInterval
+import com.fasterxml.jackson.databind.node.ArrayNode
+import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
+import com.github.michaelbull.result.Err
+import com.github.michaelbull.result.Ok
+import com.github.tomakehurst.wiremock.WireMockServer
+import com.github.tomakehurst.wiremock.client.WireMock.containing
+import com.github.tomakehurst.wiremock.client.WireMock.ok
+import com.github.tomakehurst.wiremock.client.WireMock.post
+import com.github.tomakehurst.wiremock.core.WireMockConfiguration.options
+import io.micrometer.core.instrument.simple.SimpleMeterRegistry
+import io.vertx.core.Vertx
+import io.vertx.junit5.VertxExtension
+import net.consensys.decodeHex
+import net.consensys.fromHexString
+import net.consensys.linea.async.get
+import net.consensys.linea.errors.ErrorResponse
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
+import net.consensys.linea.testing.filesystem.findPathTo
+import org.assertj.core.api.Assertions.assertThat
+import org.assertj.core.api.Assertions.assertThatThrownBy
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.extension.ExtendWith
+import java.net.URI
+import kotlin.time.Duration.Companion.milliseconds
+import kotlin.time.Duration.Companion.seconds
+import kotlin.time.toJavaDuration
+
+@ExtendWith(VertxExtension::class)
+class StateManagerV1JsonRpcClientTest {
+ private lateinit var wiremock: WireMockServer
+ private lateinit var stateManagerClient: StateManagerV1JsonRpcClient
+ private lateinit var meterRegistry: SimpleMeterRegistry
+
+ private fun wiremockStubForPost(response: String) {
+ wiremock.stubFor(
+ post("/")
+ .withHeader("Content-Type", containing("application/json"))
+ .willReturn(
+ ok()
+ .withHeader("Content-type", "application/json")
+ .withBody(response.toByteArray())
+ )
+ )
+ }
+
+ @BeforeEach
+ fun setup(vertx: Vertx) {
+ wiremock = WireMockServer(options().dynamicPort())
+ wiremock.start()
+ meterRegistry = SimpleMeterRegistry()
+ stateManagerClient = StateManagerV1JsonRpcClient.create(
+ rpcClientFactory = VertxHttpJsonRpcClientFactory(vertx, meterRegistry),
+ endpoints = listOf(URI(wiremock.baseUrl())),
+ maxInflightRequestsPerClient = 1u,
+ requestRetry = RequestRetryConfig(
+ maxRetries = 2u,
+ timeout = 2.seconds,
+ 10.milliseconds,
+ 1u
+ ),
+ zkStateManagerVersion = "0.1.2"
+ )
+ }
+
+ @AfterEach
+ fun tearDown(vertx: Vertx) {
+ val vertxStopFuture = vertx.close()
+ wiremock.stop()
+ vertxStopFuture.get()
+ }
+
+ @Test
+ fun getZkEVMStateMerkleProof_success() {
+ val testFilePath = findPathTo("testdata")!!.resolve("type2state-manager/state-proof.json")
+ val json = jacksonObjectMapper().readTree(testFilePath.toFile())
+ val zkStateManagerVersion = json.get("zkStateManagerVersion").asText()
+ val zkStateMerkleProof = json.get("zkStateMerkleProof") as ArrayNode
+ val zkParentStateRootHash = json.get("zkParentStateRootHash").asText()
+ val zkEndStateRootHash = json.get("zkEndStateRootHash").asText()
+
+ wiremockStubForPost(
+ """
+ {
+ "jsonrpc":"2.0",
+ "id":"1",
+ "result": {
+ "zkParentStateRootHash": "$zkParentStateRootHash",
+ "zkEndStateRootHash": "$zkEndStateRootHash",
+ "zkStateMerkleProof": $zkStateMerkleProof,
+ "zkStateManagerVersion": "$zkStateManagerVersion"
+ }
+ }
+ """
+ )
+
+ assertThat(stateManagerClient.rollupGetStateMerkleProofWithTypedError(BlockInterval(50UL, 100UL)))
+ .succeedsWithin(5.seconds.toJavaDuration())
+ .isEqualTo(
+ Ok(
+ GetZkEVMStateMerkleProofResponse(
+ zkStateManagerVersion = zkStateManagerVersion,
+ zkStateMerkleProof = zkStateMerkleProof,
+ zkParentStateRootHash = zkParentStateRootHash.decodeHex(),
+ zkEndStateRootHash = zkEndStateRootHash.decodeHex()
+ )
+ )
+ )
+ }
+
+ @Test
+ fun getZkEVMStateMerkleProof_error_block_missing() {
+ wiremockStubForPost(
+ """
+ {
+ "jsonrpc":"2.0",
+ "id":"1",
+ "error":{
+ "code":"-32600",
+ "message":"BLOCK_MISSING_IN_CHAIN - block 1 is missing"
+ }
+ }"""
+ )
+
+ assertThat(stateManagerClient.rollupGetStateMerkleProofWithTypedError(BlockInterval(50UL, 100UL)))
+ .succeedsWithin(5.seconds.toJavaDuration())
+ .isEqualTo(
+ Err(
+ ErrorResponse(
+ StateManagerErrorType.BLOCK_MISSING_IN_CHAIN,
+ "BLOCK_MISSING_IN_CHAIN - block 1 is missing"
+ )
+ )
+ )
+ }
+
+ @Test
+ fun getZkEVMStateMerkleProof_error_unsupported_version() {
+ val response = """
+ {
+ "jsonrpc":"2.0",
+ "id":"1",
+ "error":{
+ "code":"-32602",
+ "message":"UNSUPPORTED_VERSION",
+ "data": {
+ "requestedVersion": "0.1.2",
+ "supportedVersion": "0.0.1-dev-3e607237"
+ }
+ }
+ }"""
+
+ wiremockStubForPost(response)
+
+ assertThat(stateManagerClient.rollupGetStateMerkleProofWithTypedError(BlockInterval(50UL, 100UL)))
+ .succeedsWithin(5.seconds.toJavaDuration())
+ .isEqualTo(
+ Err(
+ ErrorResponse(
+ StateManagerErrorType.UNSUPPORTED_VERSION,
+ "UNSUPPORTED_VERSION: {requestedVersion=0.1.2, supportedVersion=0.0.1-dev-3e607237}"
+ )
+ )
+ )
+ }
+
+ @Test
+ fun getZkEVMStateMerkleProof_error_unknown() {
+ wiremockStubForPost(
+ """
+ {
+ "jsonrpc":"2.0",
+ "id":"1",
+ "error":{
+ "code":-999,
+ "message":"BRA_BRA_BRA_SOME_UNKNOWN_ERROR",
+ "data": {"xyz": "1234", "abc": 100}
+ }
+ }"""
+ )
+
+ assertThat(stateManagerClient.rollupGetStateMerkleProofWithTypedError(BlockInterval(50L, 100L)))
+ .succeedsWithin(5.seconds.toJavaDuration())
+ .isEqualTo(
+ Err(ErrorResponse(StateManagerErrorType.UNKNOWN, """BRA_BRA_BRA_SOME_UNKNOWN_ERROR: {xyz=1234, abc=100}"""))
+ )
+ }
+
+ @Test
+ fun rollupGetHeadBlockNumber_success_response() {
+ wiremockStubForPost("""{"jsonrpc":"2.0","id":1,"result":"0xf1"}""")
+
+ assertThat(stateManagerClient.rollupGetHeadBlockNumber().get())
+ .isEqualTo(ULong.fromHexString("0xf1"))
+ }
+
+ @Test
+ fun rollupGetHeadBlockNumber_error_response() {
+ val response = """{"jsonrpc":"2.0","id":1,"error":{"code": -32603, "message": "Internal error"}}"""
+
+ wiremockStubForPost(response)
+
+ assertThatThrownBy { stateManagerClient.rollupGetHeadBlockNumber().get() }
+ .hasMessageContaining("Internal error")
+ }
+}
diff --git a/jvm-libs/linea/core/client-interface/build.gradle b/jvm-libs/linea/core/client-interface/build.gradle
new file mode 100644
index 000000000..1b9412c59
--- /dev/null
+++ b/jvm-libs/linea/core/client-interface/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+dependencies {
+ implementation(project(':jvm-libs:generic:extensions:kotlin'))
+ implementation(project(':jvm-libs:linea:core:domain-models'))
+ implementation(project(':jvm-libs:generic:errors'))
+}
diff --git a/jvm-libs/linea/core/client-interface/src/main/kotlin/build/linea/clients/Client.kt b/jvm-libs/linea/core/client-interface/src/main/kotlin/build/linea/clients/Client.kt
new file mode 100644
index 000000000..6b0c3d157
--- /dev/null
+++ b/jvm-libs/linea/core/client-interface/src/main/kotlin/build/linea/clients/Client.kt
@@ -0,0 +1,37 @@
+package build.linea.clients
+
+import com.github.michaelbull.result.Err
+import com.github.michaelbull.result.Ok
+import com.github.michaelbull.result.Result
+import net.consensys.linea.errors.ErrorResponse
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+
+/**
+ * Marker interface for error types.
+ * Allow concrete clients to extend this interface to define their own error types.
+ */
+interface ClientError
+
+class ClientException(
+ override val message: String,
+ val errorType: ClientError?
+) :
+ RuntimeException(errorType?.let { "errorType=$it $message" } ?: message)
+
+interface Client {
+ fun makeRequest(request: Request): Response
+}
+
+interface ClientRequest
+interface AsyncClient {
+ fun makeRequest(request: ClientRequest): SafeFuture
+}
+
+fun SafeFuture>>.unwrapResultMonad(): SafeFuture {
+ return this.thenCompose {
+ when (it) {
+ is Ok -> SafeFuture.completedFuture(it.value)
+ is Err -> SafeFuture.failedFuture(ClientException(it.error.message, it.error.type))
+ }
+ }
+}
diff --git a/jvm-libs/linea/core/domain-models/build.gradle b/jvm-libs/linea/core/domain-models/build.gradle
index 215e5cf54..b8ad8171c 100644
--- a/jvm-libs/linea/core/domain-models/build.gradle
+++ b/jvm-libs/linea/core/domain-models/build.gradle
@@ -4,7 +4,6 @@ plugins {
dependencies {
implementation project(":jvm-libs:generic:extensions:kotlin")
- implementation "org.apache.tuweni:tuweni-units:${libs.versions.tuweni.get()}"
}
jar {
diff --git a/jvm-libs/linea/core/domain-models/src/main/kotlin/build/linea/domain/BlockInterval.kt b/jvm-libs/linea/core/domain-models/src/main/kotlin/build/linea/domain/BlockInterval.kt
new file mode 100644
index 000000000..62a06ebc8
--- /dev/null
+++ b/jvm-libs/linea/core/domain-models/src/main/kotlin/build/linea/domain/BlockInterval.kt
@@ -0,0 +1,126 @@
+package build.linea.domain
+
+import net.consensys.isSortedBy
+import net.consensys.linea.CommonDomainFunctions
+
+/**
+ * Represents a block interval, with inclusive start and end block numbers
+ * @property startBlockNumber start block number, inclusive
+ * @property endBlockNumber end block number, inclusive
+ */
+interface BlockInterval {
+ val startBlockNumber: ULong
+ val endBlockNumber: ULong
+ val blocksRange: ULongRange
+ get() = startBlockNumber..endBlockNumber
+
+ fun intervalString(): String = CommonDomainFunctions.blockIntervalString(startBlockNumber, endBlockNumber)
+
+ companion object {
+ operator fun invoke(
+ startBlockNumber: ULong,
+ endBlockNumber: ULong
+ ): BlockInterval {
+ return BlockIntervalData(startBlockNumber, endBlockNumber)
+ }
+
+ operator fun invoke(
+ startBlockNumber: Number,
+ endBlockNumber: Number
+ ): BlockInterval {
+ assert(startBlockNumber.toLong() >= 0 && endBlockNumber.toLong() >= 0) {
+ "startBlockNumber=${startBlockNumber.toLong()} and " +
+ "endBlockNumber=${endBlockNumber.toLong()} must be non-negative!"
+ }
+ return BlockIntervalData(startBlockNumber.toLong().toULong(), endBlockNumber.toLong().toULong())
+ }
+
+ // Todo: remove later
+ /**
+ * Please use BlockInterval(startBlockNumber, endBlockNumber) instead
+ */
+ fun between(
+ startBlockNumber: ULong,
+ endBlockNumber: ULong
+ ): BlockInterval {
+ return BlockIntervalData(startBlockNumber, endBlockNumber)
+ }
+ }
+}
+
+/**
+ * Represents a block interval
+ * @property startBlockNumber starting block number inclusive
+ * @property endBlockNumber ending block number inclusive
+ */
+data class BlockIntervalData(
+ override val startBlockNumber: ULong,
+ override val endBlockNumber: ULong
+) : BlockInterval {
+ init {
+ require(startBlockNumber <= endBlockNumber) {
+ "startBlockNumber=$startBlockNumber must be less than or equal to endBlockNumber$endBlockNumber"
+ }
+ }
+}
+
+fun List.toBlockIntervalsString(): String {
+ return this.joinToString(
+ separator = ", ",
+ prefix = "[",
+ postfix = "]$size",
+ transform = BlockInterval::intervalString
+ )
+}
+
+fun List.filterOutWithEndBlockNumberBefore(
+ endBlockNumberInclusive: ULong
+): List {
+ return this.filter { int -> int.endBlockNumber > endBlockNumberInclusive }
+}
+
+fun assertConsecutiveIntervals(intervals: List) {
+ require(intervals.isSortedBy { it.startBlockNumber }) { "Intervals must be sorted by startBlockNumber" }
+ require(intervals.zipWithNext().all { (a, b) -> a.endBlockNumber + 1u == b.startBlockNumber }) {
+ "Intervals must be consecutive: intervals=${intervals.toBlockIntervalsString()}"
+ }
+}
+
+/**
+ * Data class that represents sequential blocks intervals for either Conflations, Blobs or Aggregations.
+ * Example:
+ * conflations: [100..110], [111..120], [121..130] --> BlockIntervals(100, [110, 120, 130])
+ * Blobs with
+ * Blob1 2 conflations above: [100..110], [111..120]
+ * Blob2 1 conflations: [121..130]
+ * --> BlockIntervals(100, [120, 130])
+ */
+data class BlockIntervals(
+ val startingBlockNumber: ULong,
+ val upperBoundaries: List
+) {
+ // This default constructor is to avoid the parse error when deserializing
+ constructor() : this(0UL, listOf())
+
+ fun toIntervalList(): List {
+ var previousBlockNumber = startingBlockNumber
+ val intervals = mutableListOf()
+ upperBoundaries.forEach {
+ intervals.add(BlockIntervalData(previousBlockNumber, it))
+ previousBlockNumber = it + 1u
+ }
+ return intervals
+ }
+
+ fun toBlockInterval(): BlockInterval {
+ return BlockIntervalData(startingBlockNumber, upperBoundaries.last())
+ }
+}
+
+fun List.toBlockIntervals(): BlockIntervals {
+ require(isNotEmpty()) { "BlockIntervals list must not be empty" }
+ return BlockIntervals(
+ startingBlockNumber = first().startBlockNumber,
+ upperBoundaries = map { it.endBlockNumber }
+ )
+}
diff --git a/jvm-libs/linea/core/domain-models/src/main/kotlin/net/consensys/linea/BlockNumberAndHash.kt b/jvm-libs/linea/core/domain-models/src/main/kotlin/net/consensys/linea/BlockNumberAndHash.kt
index 4cd382c87..9750aaedb 100644
--- a/jvm-libs/linea/core/domain-models/src/main/kotlin/net/consensys/linea/BlockNumberAndHash.kt
+++ b/jvm-libs/linea/core/domain-models/src/main/kotlin/net/consensys/linea/BlockNumberAndHash.kt
@@ -1,8 +1,30 @@
package net.consensys.linea
-import org.apache.tuweni.bytes.Bytes32
+import net.consensys.encodeHex
data class BlockNumberAndHash(
val number: ULong,
- val hash: Bytes32
-)
+ val hash: ByteArray
+) {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as BlockNumberAndHash
+
+ if (number != other.number) return false
+ if (!hash.contentEquals(other.hash)) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = number.hashCode()
+ result = 31 * result + hash.contentHashCode()
+ return result
+ }
+
+ override fun toString(): String {
+ return "BlockNumberAndHash(number=$number, hash=${hash.encodeHex()})"
+ }
+}
diff --git a/coordinator/core/src/test/kotlin/net/consensys/zkevm/domain/BlockIntervalsTest.kt b/jvm-libs/linea/core/domain-models/src/test/kotlin/build/linea/domain/BlockIntervalsTest.kt
similarity index 98%
rename from coordinator/core/src/test/kotlin/net/consensys/zkevm/domain/BlockIntervalsTest.kt
rename to jvm-libs/linea/core/domain-models/src/test/kotlin/build/linea/domain/BlockIntervalsTest.kt
index 4fd73b46d..f76d706e0 100644
--- a/coordinator/core/src/test/kotlin/net/consensys/zkevm/domain/BlockIntervalsTest.kt
+++ b/jvm-libs/linea/core/domain-models/src/test/kotlin/build/linea/domain/BlockIntervalsTest.kt
@@ -1,4 +1,4 @@
-package net.consensys.zkevm.domain
+package build.linea.domain
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
diff --git a/jvm-libs/linea/core/traces/build.gradle b/jvm-libs/linea/core/traces/build.gradle
index ec8a39901..0f5cad399 100644
--- a/jvm-libs/linea/core/traces/build.gradle
+++ b/jvm-libs/linea/core/traces/build.gradle
@@ -4,7 +4,7 @@ plugins {
}
dependencies {
- implementation "org.apache.tuweni:tuweni-units:${libs.versions.tuweni.get()}"
+ implementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
implementation project(':jvm-libs:generic:extensions:kotlin')
testImplementation project(':jvm-libs:linea:testing:file-system')
testImplementation "com.sksamuel.hoplite:hoplite-core:${libs.versions.hoplite.get()}"
diff --git a/jvm-libs/linea/linea-contracts/l1-rollup/build.gradle b/jvm-libs/linea/linea-contracts/l1-rollup/build.gradle
index 06c6a12dc..f6422670d 100644
--- a/jvm-libs/linea/linea-contracts/l1-rollup/build.gradle
+++ b/jvm-libs/linea/linea-contracts/l1-rollup/build.gradle
@@ -16,8 +16,8 @@ dependencies {
web3jContractWrappers {
def contractAbi = layout.buildDirectory.dir("${rootProject.projectDir}/contracts/abi").get()
- .file("LineaRollupV5.0.abi").asFile.absolutePath
+ .file("LineaRollupV6.0.abi").asFile.absolutePath
- contractsPackage = "net.consensys.linea.contract"
- contracts = ["$contractAbi": "LineaRollup"]
+ contractsPackage = "build.linea.contract"
+ contracts = ["$contractAbi": "LineaRollupV6"]
}
diff --git a/jvm-libs/linea/testing/teku-helper/build.gradle b/jvm-libs/linea/testing/teku-helper/build.gradle
index 9fac0d672..74e7ffbb0 100644
--- a/jvm-libs/linea/testing/teku-helper/build.gradle
+++ b/jvm-libs/linea/testing/teku-helper/build.gradle
@@ -9,4 +9,6 @@ dependencies {
implementation "tech.pegasys.teku.internal:spec:${libs.versions.teku.get()}"
implementation "tech.pegasys.teku.internal:spec:${libs.versions.teku.get()}:test-fixtures"
+ implementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
+ implementation project(':jvm-libs:generic:extensions:kotlin')
}
diff --git a/jvm-libs/linea/testing/teku-helper/src/main/kotlin/tech/pegasys/teku/ethereum/executionclient/schema/ExecutionPayloadV1.kt b/jvm-libs/linea/testing/teku-helper/src/main/kotlin/tech/pegasys/teku/ethereum/executionclient/schema/ExecutionPayloadV1.kt
index 61f8f434a..bf862d111 100644
--- a/jvm-libs/linea/testing/teku-helper/src/main/kotlin/tech/pegasys/teku/ethereum/executionclient/schema/ExecutionPayloadV1.kt
+++ b/jvm-libs/linea/testing/teku-helper/src/main/kotlin/tech/pegasys/teku/ethereum/executionclient/schema/ExecutionPayloadV1.kt
@@ -1,5 +1,9 @@
package tech.pegasys.teku.ethereum.executionclient.schema
+import kotlinx.datetime.Clock
+import kotlinx.datetime.Instant
+import net.consensys.ByteArrayExt
+import net.consensys.toBigInteger
import org.apache.tuweni.bytes.Bytes
import org.apache.tuweni.bytes.Bytes32
import org.apache.tuweni.units.bigints.UInt256
@@ -7,6 +11,41 @@ import tech.pegasys.teku.infrastructure.bytes.Bytes20
import tech.pegasys.teku.infrastructure.unsigned.UInt64
import tech.pegasys.teku.spec.TestSpecFactory
import tech.pegasys.teku.spec.util.DataStructureUtil
+import java.math.BigInteger
+
+fun executionPayloadV1(
+ blockNumber: Long = 0,
+ parentHash: ByteArray = ByteArrayExt.random32(),
+ feeRecipient: ByteArray = ByteArrayExt.random(20),
+ stateRoot: ByteArray = ByteArrayExt.random32(),
+ receiptsRoot: ByteArray = ByteArrayExt.random32(),
+ logsBloom: ByteArray = ByteArrayExt.random32(),
+ prevRandao: ByteArray = ByteArrayExt.random32(),
+ gasLimit: ULong = 0UL,
+ gasUsed: ULong = 0UL,
+ timestamp: Instant = Clock.System.now(),
+ extraData: ByteArray = ByteArrayExt.random32(),
+ baseFeePerGas: BigInteger = BigInteger.valueOf(256),
+ blockHash: ByteArray = ByteArrayExt.random32(),
+ transactions: List = emptyList()
+): ExecutionPayloadV1 {
+ return ExecutionPayloadV1(
+ Bytes32.wrap(parentHash),
+ Bytes20(Bytes.wrap(feeRecipient)),
+ Bytes32.wrap(stateRoot),
+ Bytes32.wrap(receiptsRoot),
+ Bytes.wrap(logsBloom),
+ Bytes32.wrap(prevRandao),
+ UInt64.valueOf(blockNumber),
+ UInt64.valueOf(gasLimit.toBigInteger()),
+ UInt64.valueOf(gasUsed.toBigInteger()),
+ UInt64.valueOf(timestamp.epochSeconds),
+ Bytes.wrap(extraData),
+ UInt256.valueOf(baseFeePerGas),
+ Bytes32.wrap(blockHash),
+ transactions.map { Bytes.wrap(it) }
+ )
+}
fun executionPayloadV1(
blockNumber: Long = 0,
@@ -78,7 +117,7 @@ fun randomExecutionPayload(
)
}
-private val dataStructureUtil: DataStructureUtil = DataStructureUtil(TestSpecFactory.createMinimalBellatrix())
+val dataStructureUtil: DataStructureUtil = DataStructureUtil(TestSpecFactory.createMinimalBellatrix())
// Teku UInt64 has a bug allow negative number to be created
// random test payload creates such cases we need to fix it
diff --git a/jvm-libs/linea/web3j-extensions/build.gradle b/jvm-libs/linea/web3j-extensions/build.gradle
index b0312af6c..84d8c85c6 100644
--- a/jvm-libs/linea/web3j-extensions/build.gradle
+++ b/jvm-libs/linea/web3j-extensions/build.gradle
@@ -9,6 +9,7 @@ dependencies {
api project(':jvm-libs:generic:logging')
// For domain mappers
implementation project(":jvm-libs:generic:extensions:kotlin")
+ implementation "tech.pegasys.teku.internal:bytes:${libs.versions.teku.get()}"
implementation "tech.pegasys.teku.internal:jackson:${libs.versions.teku.get()}"
// Returned by domain mapper
api project(":jvm-libs:linea:teku-execution-client")
diff --git a/prover/backend/blobsubmission/blobcompression_test.go b/prover/backend/blobsubmission/blobcompression_test.go
index d25f64966..4a2c89c74 100644
--- a/prover/backend/blobsubmission/blobcompression_test.go
+++ b/prover/backend/blobsubmission/blobcompression_test.go
@@ -9,7 +9,7 @@ import (
"strings"
"testing"
- blob "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/consensys/linea-monorepo/prover/utils"
@@ -270,7 +270,7 @@ func TestKZGWithPoint(t *testing.T) {
}
// Compute all the prover fields
- snarkHash, err := blob.MiMCChecksumPackedData(blobBytes[:], fr381.Bits-1, blob.NoTerminalSymbol())
+ snarkHash, err := encode.MiMCChecksumPackedData(blobBytes[:], fr381.Bits-1, encode.NoTerminalSymbol())
assert.NoError(t, err)
xUnreduced := evaluationChallenge(snarkHash, blobHash[:])
diff --git a/prover/backend/blobsubmission/craft.go b/prover/backend/blobsubmission/craft.go
index ab5bbf331..b5d9dab66 100644
--- a/prover/backend/blobsubmission/craft.go
+++ b/prover/backend/blobsubmission/craft.go
@@ -7,9 +7,9 @@ import (
"hash"
"github.com/consensys/linea-monorepo/prover/crypto/mimc"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
- blob "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
"github.com/consensys/linea-monorepo/prover/utils"
"golang.org/x/crypto/sha3"
)
@@ -72,7 +72,7 @@ func CraftResponseCalldata(req *Request) (*Response, error) {
}
// Compute all the prover fields
- snarkHash, err := blob.MiMCChecksumPackedData(compressedStream, fr381.Bits-1, blob.NoTerminalSymbol())
+ snarkHash, err := encode.MiMCChecksumPackedData(compressedStream, fr381.Bits-1, encode.NoTerminalSymbol())
if err != nil {
return nil, fmt.Errorf("crafting response: could not compute snark hash: %w", err)
}
diff --git a/prover/backend/blobsubmission/craft_eip4844.go b/prover/backend/blobsubmission/craft_eip4844.go
index b1fa9e3d7..741104aec 100644
--- a/prover/backend/blobsubmission/craft_eip4844.go
+++ b/prover/backend/blobsubmission/craft_eip4844.go
@@ -4,6 +4,7 @@ import (
"crypto/sha256"
"errors"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
blob "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
@@ -91,7 +92,7 @@ func CraftResponse(req *Request) (*Response, error) {
}
// Compute all the prover fields
- snarkHash, err := blob.MiMCChecksumPackedData(append(compressedStream, make([]byte, blob.MaxUsableBytes-len(compressedStream))...), fr381.Bits-1, blob.NoTerminalSymbol())
+ snarkHash, err := encode.MiMCChecksumPackedData(append(compressedStream, make([]byte, blob.MaxUsableBytes-len(compressedStream))...), fr381.Bits-1, encode.NoTerminalSymbol())
if err != nil {
return nil, fmt.Errorf("crafting response: could not compute snark hash: %w", err)
}
diff --git a/prover/backend/ethereum/signature_test.go b/prover/backend/ethereum/signature_test.go
index 65d3faeb0..3a93dc26d 100644
--- a/prover/backend/ethereum/signature_test.go
+++ b/prover/backend/ethereum/signature_test.go
@@ -117,10 +117,11 @@ func TestTransactionSigning(t *testing.T) {
assert.Equal(t, from.Hex(), recovered.Hex(), "Mismatch of the recovered address")
// Simulates the decoding of the transaction
- var decodedTx types.Transaction
- err = DecodeTxFromBytes(bytes.NewReader(rlp), &decodedTx)
-
+ decodedTxData, err := DecodeTxFromBytes(bytes.NewReader(rlp))
require.NoError(t, err)
+
+ decodedTx := types.NewTx(decodedTxData)
+
assert.Equal(t, tx.To(), decodedTx.To())
assert.Equal(t, tx.Nonce(), decodedTx.Nonce())
assert.Equal(t, tx.Data(), decodedTx.Data())
diff --git a/prover/backend/ethereum/tx_encoding.go b/prover/backend/ethereum/tx_encoding.go
index e0b47dfd0..ee61323c9 100644
--- a/prover/backend/ethereum/tx_encoding.go
+++ b/prover/backend/ethereum/tx_encoding.go
@@ -97,25 +97,25 @@ const (
// than the transaction then the remaining bytes are discarded and only the
// first bytes are used to decode the transaction. The function returns the
// transactions and the number of bytes read.
-func DecodeTxFromBytes(b *bytes.Reader, tx *types.Transaction) (err error) {
+func DecodeTxFromBytes(b *bytes.Reader) (tx types.TxData, err error) {
var (
firstByte byte
)
if b.Len() == 0 {
- return fmt.Errorf("empty buffer")
+ return nil, fmt.Errorf("empty buffer")
}
if firstByte, err = b.ReadByte(); err != nil {
- return fmt.Errorf("could not read the first byte: %w", err)
+ return nil, fmt.Errorf("could not read the first byte: %w", err)
}
switch {
case firstByte == types.DynamicFeeTxType:
- return decodeDynamicFeeTx(b, tx)
+ return decodeDynamicFeeTx(b)
case firstByte == types.AccessListTxType:
- return decodeAccessListTx(b, tx)
+ return decodeAccessListTx(b)
// According to the RLP rule, `0xc0 + x` or `0xf7` indicates that the current
// item is a list and this is what's used to identify that the transaction is
// a legacy transaction or a EIP-155 transaction.
@@ -125,69 +125,69 @@ func DecodeTxFromBytes(b *bytes.Reader, tx *types.Transaction) (err error) {
// Set the byte-reader backward so that we can apply the rlp-decoder
// over it.
b.UnreadByte()
- return decodeLegacyTx(b, tx)
+ return decodeLegacyTx(b)
+ default:
+ return nil, fmt.Errorf("unexpected first byte: %x", firstByte)
}
-
- return fmt.Errorf("unexpected first byte: %x", firstByte)
}
// decodeDynamicFeeTx encodes a [types.DynamicFeeTx] into a [bytes.Reader] and
// returns an error if it did not pass.
-func decodeDynamicFeeTx(b *bytes.Reader, tx *types.Transaction) (err error) {
+func decodeDynamicFeeTx(b *bytes.Reader) (parsedTx *types.DynamicFeeTx, err error) {
decTx := []any{}
- if err := rlp.Decode(b, &decTx); err != nil {
- return fmt.Errorf("could not rlp decode transaction: %w", err)
+ if err = rlp.Decode(b, &decTx); err != nil {
+ return nil, fmt.Errorf("could not rlp decode transaction: %w", err)
}
if len(decTx) != dynFeeNumField {
- return fmt.Errorf("invalid number of field for a dynamic transaction")
+ return nil, fmt.Errorf("invalid number of field for a dynamic transaction")
}
- parsedTx := types.DynamicFeeTx{}
+ parsedTx = new(types.DynamicFeeTx)
+
err = errors.Join(
- tryCast(&parsedTx.ChainID, decTx[0], "chainID"),
- tryCast(&parsedTx.Nonce, decTx[1], "nonce"),
- tryCast(&parsedTx.GasTipCap, decTx[2], "gas-tip-cap"),
- tryCast(&parsedTx.GasFeeCap, decTx[3], "gas-fee-cap"),
- tryCast(&parsedTx.Gas, decTx[4], "gas"),
- tryCast(&parsedTx.To, decTx[5], "to"),
- tryCast(&parsedTx.Value, decTx[6], "value"),
- tryCast(&parsedTx.Data, decTx[7], "data"),
- tryCast(&parsedTx.AccessList, decTx[8], "access-list"),
+ TryCast(&parsedTx.ChainID, decTx[0], "chainID"),
+ TryCast(&parsedTx.Nonce, decTx[1], "nonce"),
+ TryCast(&parsedTx.GasTipCap, decTx[2], "gas-tip-cap"),
+ TryCast(&parsedTx.GasFeeCap, decTx[3], "gas-fee-cap"),
+ TryCast(&parsedTx.Gas, decTx[4], "gas"),
+ TryCast(&parsedTx.To, decTx[5], "to"),
+ TryCast(&parsedTx.Value, decTx[6], "value"),
+ TryCast(&parsedTx.Data, decTx[7], "data"),
+ TryCast(&parsedTx.AccessList, decTx[8], "access-list"),
)
- *tx = *types.NewTx(&parsedTx)
- return err
+
+ return
}
-// decodeAccessListTx decodes an [types.AccessListTx] from a [bytes.Reader]
+// decodeAccessListTx decodes a [types.AccessListTx] from a [bytes.Reader]
// and returns an error if it did not pass.
-func decodeAccessListTx(b *bytes.Reader, tx *types.Transaction) (err error) {
+func decodeAccessListTx(b *bytes.Reader) (parsedTx *types.AccessListTx, err error) {
decTx := []any{}
if err := rlp.Decode(b, &decTx); err != nil {
- return fmt.Errorf("could not rlp decode transaction: %w", err)
+ return nil, fmt.Errorf("could not rlp decode transaction: %w", err)
}
if len(decTx) != accessListTxNumField {
- return fmt.Errorf("invalid number of field for a dynamic transaction")
+ return nil, fmt.Errorf("invalid number of field for a dynamic transaction")
}
- parsedTx := types.AccessListTx{}
+ parsedTx = new(types.AccessListTx)
err = errors.Join(
- tryCast(&parsedTx.ChainID, decTx[0], "chainID"),
- tryCast(&parsedTx.Nonce, decTx[1], "nonce"),
- tryCast(&parsedTx.GasPrice, decTx[2], "gas-price"),
- tryCast(&parsedTx.Gas, decTx[3], "gas"),
- tryCast(&parsedTx.To, decTx[4], "to"),
- tryCast(&parsedTx.Value, decTx[5], "value"),
- tryCast(&parsedTx.Data, decTx[6], "data"),
- tryCast(&parsedTx.AccessList, decTx[7], "access-list"),
+ TryCast(&parsedTx.ChainID, decTx[0], "chainID"),
+ TryCast(&parsedTx.Nonce, decTx[1], "nonce"),
+ TryCast(&parsedTx.GasPrice, decTx[2], "gas-price"),
+ TryCast(&parsedTx.Gas, decTx[3], "gas"),
+ TryCast(&parsedTx.To, decTx[4], "to"),
+ TryCast(&parsedTx.Value, decTx[5], "value"),
+ TryCast(&parsedTx.Data, decTx[6], "data"),
+ TryCast(&parsedTx.AccessList, decTx[7], "access-list"),
)
- *tx = *types.NewTx(&parsedTx)
- return err
+ return
}
// decodeLegacyTx decodes a [types.LegacyTx] from a [bytes.Reader] and returns
@@ -197,36 +197,35 @@ func decodeAccessListTx(b *bytes.Reader, tx *types.Transaction) (err error) {
// not decoded although it could. The reason is that it is complicated to set
// it in the returned element as it "included" in the signature and we don't
// encode the signature.
-func decodeLegacyTx(b *bytes.Reader, tx *types.Transaction) (err error) {
+func decodeLegacyTx(b *bytes.Reader) (parsedTx *types.LegacyTx, err error) {
decTx := []any{}
- if err := rlp.Decode(b, &decTx); err != nil {
- return fmt.Errorf("could not rlp decode transaction: %w", err)
+ if err = rlp.Decode(b, &decTx); err != nil {
+ return nil, fmt.Errorf("could not rlp decode transaction: %w", err)
}
if len(decTx) != legacyTxNumField && len(decTx) != unprotectedTxNumField {
- return fmt.Errorf("unexpected number of field")
+ return nil, fmt.Errorf("unexpected number of field")
}
- parsedTx := types.LegacyTx{}
+ parsedTx = new(types.LegacyTx)
err = errors.Join(
- tryCast(&parsedTx.Nonce, decTx[0], "nonce"),
- tryCast(&parsedTx.GasPrice, decTx[1], "gas-price"),
- tryCast(&parsedTx.Gas, decTx[2], "gas"),
- tryCast(&parsedTx.To, decTx[3], "to"),
- tryCast(&parsedTx.Value, decTx[4], "value"),
- tryCast(&parsedTx.Data, decTx[5], "data"),
+ TryCast(&parsedTx.Nonce, decTx[0], "nonce"),
+ TryCast(&parsedTx.GasPrice, decTx[1], "gas-price"),
+ TryCast(&parsedTx.Gas, decTx[2], "gas"),
+ TryCast(&parsedTx.To, decTx[3], "to"),
+ TryCast(&parsedTx.Value, decTx[4], "value"),
+ TryCast(&parsedTx.Data, decTx[5], "data"),
)
- *tx = *types.NewTx(&parsedTx)
- return err
+ return
}
-// tryCast will attempt to set t with the underlying value of `from` will return
+// TryCast will attempt to set t with the underlying value of `from` will return
// an error if the type does not match. The explainer string is used to generate
// the error if any.
-func tryCast[T any](into *T, from any, explainer string) error {
+func TryCast[T any](into *T, from any, explainer string) error {
if into == nil || from == nil {
return fmt.Errorf("from or into is/are nil")
@@ -234,7 +233,7 @@ func tryCast[T any](into *T, from any, explainer string) error {
// The rlp encoding is not "type-aware", if the underlying field is an
// access-list, it will decode into []interface{} (and we recursively parse
- // it) otherwise, it always decode to `[]byte`
+ // it) otherwise, it always decodes to `[]byte`
if list, ok := (from).([]interface{}); ok {
var (
@@ -249,7 +248,7 @@ func tryCast[T any](into *T, from any, explainer string) error {
for i := range accessList {
err = errors.Join(
err,
- tryCast(&accessList[i], list[i], fmt.Sprintf("%v[%v]", explainer, i)),
+ TryCast(&accessList[i], list[i], fmt.Sprintf("%v[%v]", explainer, i)),
)
}
*into = (any(accessList)).(T)
@@ -258,8 +257,8 @@ func tryCast[T any](into *T, from any, explainer string) error {
case types.AccessTuple:
tuple := types.AccessTuple{}
err = errors.Join(
- tryCast(&tuple.Address, list[0], fmt.Sprintf("%v.%v", explainer, "address")),
- tryCast(&tuple.StorageKeys, list[1], fmt.Sprintf("%v.%v", explainer, "storage-key")),
+ TryCast(&tuple.Address, list[0], fmt.Sprintf("%v.%v", explainer, "address")),
+ TryCast(&tuple.StorageKeys, list[1], fmt.Sprintf("%v.%v", explainer, "storage-key")),
)
*into = (any(tuple)).(T)
return err
@@ -267,7 +266,7 @@ func tryCast[T any](into *T, from any, explainer string) error {
case []common.Hash:
hashes := make([]common.Hash, length)
for i := range hashes {
- tryCast(&hashes[i], list[i], fmt.Sprintf("%v[%v]", explainer, i))
+ TryCast(&hashes[i], list[i], fmt.Sprintf("%v[%v]", explainer, i))
}
*into = (any(hashes)).(T)
return err
@@ -285,7 +284,7 @@ func tryCast[T any](into *T, from any, explainer string) error {
switch intoAny.(type) {
case *common.Address:
// Parse the bytes as an UTF8 string (= direct casting in go).
- // Then, the string as an hexstring encoded address.
+ // Then, the string as a hex string encoded address.
address := common.BytesToAddress(fromBytes)
*into = any(&address).(T)
case common.Address:
@@ -295,7 +294,7 @@ func tryCast[T any](into *T, from any, explainer string) error {
*into = any(address).(T)
case common.Hash:
// Parse the bytes as an UTF8 string (= direct casting in go).
- // Then, the string as an hexstring encoded address.
+ // Then, the string as a hexstring encoded address.
hash := common.BytesToHash(fromBytes)
*into = any(hash).(T)
case *big.Int:
diff --git a/prover/circuits/blobdecompression/v0/assign_test.go b/prover/circuits/blobdecompression/v0/assign_test.go
index 12b50caf8..3c99b858f 100644
--- a/prover/circuits/blobdecompression/v0/assign_test.go
+++ b/prover/circuits/blobdecompression/v0/assign_test.go
@@ -7,6 +7,7 @@ import (
"testing"
v0 "github.com/consensys/linea-monorepo/prover/circuits/blobdecompression/v0"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
"github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
"github.com/consensys/gnark-crypto/ecc"
@@ -74,7 +75,9 @@ func mustGetTestCompressedData(t *testing.T) (resp blobsubmission.Response, blob
blobBytes, err = base64.StdEncoding.DecodeString(resp.CompressedData)
assert.NoError(t, err)
- _, _, _, err = blob.DecompressBlob(blobBytes, dict)
+ dictStore, err := dictionary.SingletonStore(dict, 0)
+ assert.NoError(t, err)
+ _, _, _, err = blob.DecompressBlob(blobBytes, dictStore)
assert.NoError(t, err)
return
diff --git a/prover/circuits/blobdecompression/v0/prelude.go b/prover/circuits/blobdecompression/v0/prelude.go
index af309d111..fa00d73e7 100644
--- a/prover/circuits/blobdecompression/v0/prelude.go
+++ b/prover/circuits/blobdecompression/v0/prelude.go
@@ -5,6 +5,7 @@ import (
"fmt"
"github.com/consensys/linea-monorepo/prover/circuits/internal"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
"github.com/consensys/gnark-crypto/ecc/bls12-377/fr"
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
@@ -55,7 +56,12 @@ func Assign(blobData, dict []byte, eip4844Enabled bool, x [32]byte, y fr381.Elem
return
}
- header, uncompressedData, _, err := blob.DecompressBlob(blobData, dict)
+ dictStore, err := dictionary.SingletonStore(dict, 0)
+ if err != nil {
+ err = fmt.Errorf("failed to create dictionary store %w", err)
+ return
+ }
+ header, uncompressedData, _, err := blob.DecompressBlob(blobData, dictStore)
if err != nil {
err = fmt.Errorf("decompression circuit assignment : could not decompress the data : %w", err)
return
diff --git a/prover/circuits/blobdecompression/v1/assign_test.go b/prover/circuits/blobdecompression/v1/assign_test.go
index db3af2cd9..8bf6c2ea1 100644
--- a/prover/circuits/blobdecompression/v1/assign_test.go
+++ b/prover/circuits/blobdecompression/v1/assign_test.go
@@ -5,6 +5,7 @@ package v1_test
import (
"encoding/base64"
"encoding/hex"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
"testing"
"github.com/consensys/gnark-crypto/ecc"
@@ -27,7 +28,9 @@ func prepareTestBlob(t require.TestingT) (c, a frontend.Circuit) {
func prepare(t require.TestingT, blobBytes []byte) (c *v1.Circuit, a frontend.Circuit) {
- _, payload, _, err := blobcompressorv1.DecompressBlob(blobBytes, blobtestutils.GetDict(t))
+ dictStore, err := dictionary.SingletonStore(blobtestutils.GetDict(t), 1)
+ assert.NoError(t, err)
+ _, payload, _, err := blobcompressorv1.DecompressBlob(blobBytes, dictStore)
assert.NoError(t, err)
resp, err := blobsubmission.CraftResponse(&blobsubmission.Request{
diff --git a/prover/circuits/blobdecompression/v1/circuit.go b/prover/circuits/blobdecompression/v1/circuit.go
index 466e1b21b..be1b00e8e 100644
--- a/prover/circuits/blobdecompression/v1/circuit.go
+++ b/prover/circuits/blobdecompression/v1/circuit.go
@@ -4,6 +4,8 @@ import (
"bytes"
"errors"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
"math/big"
"github.com/consensys/gnark-crypto/ecc"
@@ -235,7 +237,12 @@ func AssignFPI(blobBytes, dict []byte, eip4844Enabled bool, x [32]byte, y fr381.
return
}
- header, payload, _, err := blob.DecompressBlob(blobBytes, dict)
+ dictStore, err := dictionary.SingletonStore(dict, 1)
+ if err != nil {
+ err = fmt.Errorf("failed to create dictionary store %w", err)
+ return
+ }
+ header, payload, _, err := blob.DecompressBlob(blobBytes, dictStore)
if err != nil {
return
}
@@ -266,7 +273,7 @@ func AssignFPI(blobBytes, dict []byte, eip4844Enabled bool, x [32]byte, y fr381.
if len(blobBytes) != 128*1024 {
panic("blobBytes length is not 128*1024")
}
- fpi.SnarkHash, err = blob.MiMCChecksumPackedData(blobBytes, fr381.Bits-1, blob.NoTerminalSymbol()) // TODO if forced to remove the above check, pad with zeros
+ fpi.SnarkHash, err = encode.MiMCChecksumPackedData(blobBytes, fr381.Bits-1, encode.NoTerminalSymbol()) // TODO if forced to remove the above check, pad with zeros
return
}
diff --git a/prover/circuits/blobdecompression/v1/snark_test.go b/prover/circuits/blobdecompression/v1/snark_test.go
index c321a1756..d70724cc1 100644
--- a/prover/circuits/blobdecompression/v1/snark_test.go
+++ b/prover/circuits/blobdecompression/v1/snark_test.go
@@ -6,6 +6,8 @@ import (
"errors"
"testing"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
"github.com/consensys/linea-monorepo/prover/utils"
"github.com/consensys/linea-monorepo/prover/circuits/blobdecompression/v1/test_utils"
@@ -20,7 +22,7 @@ import (
"github.com/consensys/gnark/std/hash/mimc"
"github.com/consensys/gnark/test"
blob "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
- blobtesting "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
+ blobtestutils "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -30,7 +32,7 @@ func TestParseHeader(t *testing.T) {
maxBlobSize := 1024
blobs := [][]byte{
- blobtesting.GenTestBlob(t, 100000),
+ blobtestutils.GenTestBlob(t, 100000),
}
for _, blobData := range blobs {
@@ -48,14 +50,17 @@ func TestParseHeader(t *testing.T) {
test.NoTestEngine(),
}
+ dictStore, err := dictionary.SingletonStore(blobtestutils.GetDict(t), 1)
+ assert.NoError(t, err)
+
for _, blobData := range blobs {
- header, _, blocks, err := blob.DecompressBlob(blobData, blobtesting.GetDict(t))
+ header, _, blocks, err := blob.DecompressBlob(blobData, dictStore)
assert.NoError(t, err)
assert.LessOrEqual(t, len(blocks), MaxNbBatches, "too many batches")
- unpacked, err := blob.UnpackAlign(blobData, fr381.Bits-1, false)
+ unpacked, err := encode.UnpackAlign(blobData, fr381.Bits-1, false)
require.NoError(t, err)
assignment := &testParseHeaderCircuit{
@@ -88,9 +93,9 @@ func TestChecksumBatches(t *testing.T) {
var batchEndss [nbAssignments][]int
for i := range batchEndss {
- batchEndss[i] = make([]int, blobtesting.RandIntn(MaxNbBatches)+1)
+ batchEndss[i] = make([]int, blobtestutils.RandIntn(MaxNbBatches)+1)
for j := range batchEndss[i] {
- batchEndss[i][j] = 31 + blobtesting.RandIntn(62)
+ batchEndss[i][j] = 31 + blobtestutils.RandIntn(62)
if j > 0 {
batchEndss[i][j] += batchEndss[i][j-1]
}
@@ -161,7 +166,7 @@ func testChecksumBatches(t *testing.T, blob []byte, batchEndss ...[]int) {
Sums: sums,
NbBatches: len(batchEnds),
}
- assignment.Sums[blobtesting.RandIntn(len(batchEnds))] = 3
+ assignment.Sums[blobtestutils.RandIntn(len(batchEnds))] = 3
assert.Error(t, test.IsSolved(&circuit, &assignment, ecc.BLS12_377.ScalarField()))
@@ -224,7 +229,7 @@ func TestUnpackCircuit(t *testing.T) {
runTest := func(b []byte) {
var packedBuf bytes.Buffer
- _, err := blob.PackAlign(&packedBuf, b, fr381.Bits-1) // todo use two different slices
+ _, err := encode.PackAlign(&packedBuf, b, fr381.Bits-1) // todo use two different slices
assert.NoError(t, err)
circuit := unpackCircuit{
@@ -308,7 +313,7 @@ func TestBlobChecksum(t *testing.T) { // aka "snark hash"
assignment := testDataChecksumCircuit{
DataBytes: dataVarsPadded[:nPadded],
}
- assignment.Checksum, err = blob.MiMCChecksumPackedData(dataPadded[:nPadded], fr381.Bits-1, blob.NoTerminalSymbol())
+ assignment.Checksum, err = encode.MiMCChecksumPackedData(dataPadded[:nPadded], fr381.Bits-1, encode.NoTerminalSymbol())
assert.NoError(t, err)
assert.NoError(t, test.IsSolved(&circuit, &assignment, ecc.BLS12_377.ScalarField()))
@@ -338,9 +343,11 @@ func (c *testDataChecksumCircuit) Define(api frontend.API) error {
}
func TestDictHash(t *testing.T) {
- blobBytes := blobtesting.GenTestBlob(t, 1)
- dict := blobtesting.GetDict(t)
- header, _, _, err := blob.DecompressBlob(blobBytes, dict) // a bit roundabout, but the header field is not public
+ blobBytes := blobtestutils.GenTestBlob(t, 1)
+ dict := blobtestutils.GetDict(t)
+ dictStore, err := dictionary.SingletonStore(blobtestutils.GetDict(t), 1)
+ assert.NoError(t, err)
+ header, _, _, err := blob.DecompressBlob(blobBytes, dictStore) // a bit roundabout, but the header field is not public
assert.NoError(t, err)
circuit := testDataDictHashCircuit{
diff --git a/prover/lib/compressor/blob/blob.go b/prover/lib/compressor/blob/blob.go
index ba5bd8e27..377b0fbb5 100644
--- a/prover/lib/compressor/blob/blob.go
+++ b/prover/lib/compressor/blob/blob.go
@@ -1,15 +1,17 @@
package blob
import (
+ "bytes"
"errors"
"os"
"path/filepath"
"strings"
- fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
- "github.com/consensys/gnark-crypto/hash"
- "github.com/consensys/linea-monorepo/prover/circuits/blobdecompression/v0/compress"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ v0 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v0"
v1 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
+ "github.com/ethereum/go-ethereum/rlp"
)
func GetVersion(blob []byte) uint16 {
@@ -23,17 +25,6 @@ func GetVersion(blob []byte) uint16 {
return 0
}
-// DictionaryChecksum according to the given spec version
-func DictionaryChecksum(dict []byte, version uint16) ([]byte, error) {
- switch version {
- case 1:
- return v1.MiMCChecksumPackedData(dict, 8)
- case 0:
- return compress.ChecksumPaddedBytes(dict, len(dict), hash.MIMC_BLS12_377.New(), fr381.Bits), nil
- }
- return nil, errors.New("unsupported version")
-}
-
// GetRepoRootPath assumes that current working directory is within the repo
func GetRepoRootPath() (string, error) {
wd, err := os.Getwd()
@@ -57,3 +48,41 @@ func GetDict() ([]byte, error) {
dictPath := filepath.Join(repoRoot, "prover/lib/compressor/compressor_dict.bin")
return os.ReadFile(dictPath)
}
+
+// DecompressBlob takes in a Linea blob and outputs an RLP encoded list of RLP encoded blocks.
+// Due to information loss during pre-compression encoding, two pieces of information are represented "hackily":
+// The block hash is in the ParentHash field.
+// The transaction from address is in the signature.R field.
+func DecompressBlob(blob []byte, dictStore dictionary.Store) ([]byte, error) {
+ vsn := GetVersion(blob)
+ var (
+ blockDecoder func(*bytes.Reader) (encode.DecodedBlockData, error)
+ blocks [][]byte
+ err error
+ )
+ switch vsn {
+ case 0:
+ _, _, blocks, err = v0.DecompressBlob(blob, dictStore)
+ blockDecoder = v0.DecodeBlockFromUncompressed
+ case 1:
+ _, _, blocks, err = v1.DecompressBlob(blob, dictStore)
+ blockDecoder = v1.DecodeBlockFromUncompressed
+ default:
+ return nil, errors.New("unrecognized blob version")
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ blocksSerialized := make([][]byte, len(blocks))
+ var decodedBlock encode.DecodedBlockData
+ for i, block := range blocks {
+ if decodedBlock, err = blockDecoder(bytes.NewReader(block)); err != nil {
+ return nil, err
+ }
+ if blocksSerialized[i], err = rlp.EncodeToBytes(decodedBlock.ToStd()); err != nil {
+ return nil, err
+ }
+ }
+ return rlp.EncodeToBytes(blocksSerialized)
+}
diff --git a/prover/lib/compressor/blob/blob_test.go b/prover/lib/compressor/blob/blob_test.go
index 83f8a09af..ba8cf9971 100644
--- a/prover/lib/compressor/blob/blob_test.go
+++ b/prover/lib/compressor/blob/blob_test.go
@@ -1,14 +1,162 @@
package blob_test
import (
- "testing"
-
+ "bytes"
+ "fmt"
"github.com/consensys/linea-monorepo/prover/lib/compressor/blob"
- "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ v0 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v0"
+ blobv1testing "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
+ "github.com/ethereum/go-ethereum/core/types"
+ "github.com/ethereum/go-ethereum/rlp"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "os"
+ "path/filepath"
+ "testing"
)
func TestGetVersion(t *testing.T) {
- _blob := test_utils.GenTestBlob(t, 1)
+ _blob := blobv1testing.GenTestBlob(t, 1)
assert.Equal(t, uint32(0x10000), uint32(0xffff)+uint32(blob.GetVersion(_blob)), "version should match the current one")
}
+
+const dictPath = "../compressor_dict.bin"
+
+func TestAddToBlob(t *testing.T) {
+ dictStore := dictionary.NewStore()
+ require.NoError(t, dictStore.Load(dictPath))
+ blobData := withNoError(t, os.ReadFile, "testdata/v0/sample-blob-01b9918c3f0ceb6a.bin")
+ header, _, blocksSerialized, err := v0.DecompressBlob(blobData, dictStore)
+ require.NoError(t, err)
+
+ blobData = withNoError(t, os.ReadFile, "testdata/v0/sample-blob-0151eda71505187b5.bin")
+ _, _, blocksSerializedNext, err := v0.DecompressBlob(blobData, dictStore)
+ require.NoError(t, err)
+
+ bm, err := v0.NewBlobMaker(v0.MaxUsableBytes, "../compressor_dict.bin")
+ require.NoError(t, err)
+ var ok bool
+ writeBlock := func(blocks *[][]byte) {
+ dbd, err := v0.DecodeBlockFromUncompressed(bytes.NewReader((*blocks)[0]))
+ assert.NoError(t, err)
+
+ stdBlockRlp, err := rlp.EncodeToBytes(dbd.ToStd())
+
+ ok, err = bm.Write(stdBlockRlp, false, encode.WithTxAddressGetter(encode.GetAddressFromR))
+ assert.NoError(t, err)
+
+ *blocks = (*blocks)[1:]
+ }
+
+ for i := 0; i < header.NbBatches(); i++ {
+ for j := 0; j < header.NbBlocksInBatch(i); j++ {
+ writeBlock(&blocksSerialized)
+ assert.True(t, ok)
+ }
+ bm.StartNewBatch()
+ }
+ assert.Empty(t, blocksSerialized)
+
+ util0 := 100 * bm.Len() / v0.MaxUsableBytes
+
+ require.NoError(t, err)
+ for ok { // all in one batch
+ writeBlock(&blocksSerializedNext)
+ }
+
+ util1 := 100 * bm.Len() / v0.MaxUsableBytes
+
+ fmt.Printf("%d%%\n%d%%\n", util0, util1)
+}
+
+func withNoError[X, Y any](t *testing.T, f func(X) (Y, error), x X) Y {
+ y, err := f(x)
+ require.NoError(t, err)
+ return y
+}
+
+func TestDecompressBlob(t *testing.T) {
+ store := dictionary.NewStore("../compressor_dict.bin")
+ files := newRecursiveFolderIterator(t, "testdata")
+ for files.hasNext() {
+ f := files.next()
+ if filepath.Ext(f.path) == ".bin" {
+ t.Run(f.path, func(t *testing.T) {
+ decompressed, err := blob.DecompressBlob(f.content, store)
+ assert.NoError(t, err)
+ t.Log("decompressed length", len(decompressed))
+
+ // load decompressed blob as blocks
+ var blocksSerialized [][]byte
+ assert.NoError(t, rlp.DecodeBytes(decompressed, &blocksSerialized))
+ t.Log("number of decoded blocks", len(blocksSerialized))
+ for _, blockSerialized := range blocksSerialized {
+ var b types.Block
+ assert.NoError(t, rlp.DecodeBytes(blockSerialized, &b))
+ }
+ })
+ }
+ }
+}
+
+type dirEntryWithFullPath struct {
+ path string
+ content os.DirEntry
+}
+
+// goes through all files in a directory and its subdirectories
+type recursiveFolderIterator struct {
+ toVisit []dirEntryWithFullPath
+ t *testing.T
+ pathLen int
+}
+
+type file struct {
+ content []byte
+ path string
+}
+
+func (i *recursiveFolderIterator) openDir(path string) {
+ content, err := os.ReadDir(path)
+ require.NoError(i.t, err)
+ for _, c := range content {
+ i.toVisit = append(i.toVisit, dirEntryWithFullPath{path: filepath.Join(path, c.Name()), content: c})
+ }
+}
+
+func (i *recursiveFolderIterator) hasNext() bool {
+ return i.peek() != nil
+}
+
+func (i *recursiveFolderIterator) next() *file {
+ f := i.peek()
+ if f != nil {
+ i.toVisit = i.toVisit[:len(i.toVisit)-1]
+ }
+ return f
+}
+
+// counter-intuitively, peek does most of the work by ensuring the top of the stack is always a file
+func (i *recursiveFolderIterator) peek() *file {
+ for len(i.toVisit) != 0 {
+ lastIndex := len(i.toVisit) - 1
+ c := i.toVisit[lastIndex]
+ if c.content.IsDir() {
+ i.toVisit = i.toVisit[:lastIndex]
+ i.openDir(c.path)
+ } else {
+ b, err := os.ReadFile(c.path)
+ require.NoError(i.t, err)
+ return &file{content: b, path: c.path[i.pathLen:]}
+ }
+ }
+ return nil
+}
+
+func newRecursiveFolderIterator(t *testing.T, path string) *recursiveFolderIterator {
+ res := recursiveFolderIterator{t: t, pathLen: len(path) + 1}
+ res.openDir(path)
+ return &res
+}
diff --git a/prover/lib/compressor/blob/dictionary/dictionary.go b/prover/lib/compressor/blob/dictionary/dictionary.go
new file mode 100644
index 000000000..6b9302a61
--- /dev/null
+++ b/prover/lib/compressor/blob/dictionary/dictionary.go
@@ -0,0 +1,79 @@
+package dictionary
+
+import (
+ "bytes"
+ "errors"
+ "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
+ "github.com/consensys/gnark-crypto/hash"
+ "github.com/consensys/linea-monorepo/prover/circuits/blobdecompression/v0/compress"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ "os"
+)
+
+// Checksum according to the given spec version
+func Checksum(dict []byte, version uint16) ([]byte, error) {
+ switch version {
+ case 1:
+ return encode.MiMCChecksumPackedData(dict, 8)
+ case 0:
+ return compress.ChecksumPaddedBytes(dict, len(dict), hash.MIMC_BLS12_377.New(), fr.Bits), nil
+ }
+ return nil, errors.New("unsupported version")
+}
+
+type Store []map[string][]byte
+
+func NewStore(paths ...string) Store {
+ res := make(Store, 2)
+ for i := range res {
+ res[i] = make(map[string][]byte)
+ }
+ if err := res.Load(paths...); err != nil {
+ panic(err)
+ }
+ return res
+}
+
+func SingletonStore(dict []byte, version uint16) (Store, error) {
+ s := make(Store, version+1)
+ key, err := Checksum(dict, version)
+ s[version] = make(map[string][]byte, 1)
+ s[version][string(key)] = dict
+ return s, err
+}
+
+func (s Store) Load(paths ...string) error {
+ loadVsn := func(vsn uint16) error {
+ for _, path := range paths {
+ dict, err := os.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ checksum, err := Checksum(dict, vsn)
+ if err != nil {
+ return err
+ }
+ key := string(checksum)
+ existing, exists := s[vsn][key]
+ if exists && !bytes.Equal(dict, existing) { // should be incredibly unlikely
+ return errors.New("unmatching dictionary found")
+ }
+ s[vsn][key] = dict
+ }
+ return nil
+ }
+
+ return errors.Join(loadVsn(0), loadVsn(1))
+}
+
+func (s Store) Get(checksum []byte, version uint16) ([]byte, error) {
+ if int(version) > len(s) {
+ return nil, errors.New("unrecognized blob version")
+ }
+ res, ok := s[version][string(checksum)]
+ if !ok {
+ return nil, errors.New("dictionary not found")
+ }
+ return res, nil
+}
diff --git a/prover/lib/compressor/blob/encode/encode.go b/prover/lib/compressor/blob/encode/encode.go
new file mode 100644
index 000000000..b89a83f28
--- /dev/null
+++ b/prover/lib/compressor/blob/encode/encode.go
@@ -0,0 +1,337 @@
+package encode
+
+import (
+ "bytes"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "github.com/consensys/gnark-crypto/ecc/bls12-377/fr"
+ "github.com/consensys/gnark-crypto/hash"
+ "github.com/consensys/linea-monorepo/prover/backend/ethereum"
+ typesLinea "github.com/consensys/linea-monorepo/prover/utils/types"
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/core/types"
+ "github.com/icza/bitio"
+ "io"
+ "math/big"
+)
+
+// UnpackAlign unpacks r (packed with PackAlign) and returns the unpacked data.
+func UnpackAlign(r []byte, packingSize int, noTerminalSymbol bool) ([]byte, error) {
+ bytesPerElem := (packingSize + 7) / 8
+ packingSizeLastU64 := uint8(packingSize % 64)
+ if packingSizeLastU64 == 0 {
+ packingSizeLastU64 = 64
+ }
+
+ n := len(r) / bytesPerElem
+ if n*bytesPerElem != len(r) {
+ return nil, fmt.Errorf("invalid data length; expected multiple of %d", bytesPerElem)
+ }
+
+ var out bytes.Buffer
+ w := bitio.NewWriter(&out)
+ for i := 0; i < n; i++ {
+ // read bytes
+ element := r[bytesPerElem*i : bytesPerElem*(i+1)]
+ // write bits
+ w.TryWriteBits(binary.BigEndian.Uint64(element[0:8]), packingSizeLastU64)
+ for j := 8; j < bytesPerElem; j += 8 {
+ w.TryWriteBits(binary.BigEndian.Uint64(element[j:j+8]), 64)
+ }
+ }
+ if w.TryError != nil {
+ return nil, fmt.Errorf("when writing to bitio.Writer: %w", w.TryError)
+ }
+ if err := w.Close(); err != nil {
+ return nil, fmt.Errorf("when closing bitio.Writer: %w", err)
+ }
+
+ if !noTerminalSymbol {
+ // the last nonzero byte should be 0xff
+ outLen := out.Len() - 1
+ for out.Bytes()[outLen] == 0 {
+ outLen--
+ }
+ if out.Bytes()[outLen] != 0xff {
+ return nil, errors.New("invalid terminal symbol")
+ }
+ out.Truncate(outLen)
+ }
+
+ return out.Bytes(), nil
+}
+
+type packAlignSettings struct {
+ dataNbBits int
+ lastByteNbUnusedBits uint8
+ noTerminalSymbol bool
+ additionalInput [][]byte
+}
+
+func (s *packAlignSettings) initialize(length int, options ...packAlignOption) {
+
+ for _, opt := range options {
+ opt(s)
+ }
+
+ nbBytes := length
+ for _, data := range s.additionalInput {
+ nbBytes += len(data)
+ }
+
+ if !s.noTerminalSymbol {
+ nbBytes++
+ }
+
+ s.dataNbBits = nbBytes*8 - int(s.lastByteNbUnusedBits)
+}
+
+type packAlignOption func(*packAlignSettings)
+
+func NoTerminalSymbol() packAlignOption {
+ return func(o *packAlignSettings) {
+ o.noTerminalSymbol = true
+ }
+}
+
+// PackAlignSize returns the size of the data when packed with PackAlign.
+func PackAlignSize(length0, packingSize int, options ...packAlignOption) (n int) {
+ var s packAlignSettings
+ s.initialize(length0, options...)
+
+ // we may need to add some bits to a and b to ensure we can process some blocks of 248 bits
+ extraBits := (packingSize - s.dataNbBits%packingSize) % packingSize
+ nbBits := s.dataNbBits + extraBits
+
+ return (nbBits / packingSize) * ((packingSize + 7) / 8)
+}
+
+func WithAdditionalInput(data ...[]byte) packAlignOption {
+ return func(o *packAlignSettings) {
+ o.additionalInput = append(o.additionalInput, data...)
+ }
+}
+
+func WithLastByteNbUnusedBits(n uint8) packAlignOption {
+ if n > 7 {
+ panic("only 8 bits to a byte")
+ }
+ return func(o *packAlignSettings) {
+ o.lastByteNbUnusedBits = n
+ }
+}
+
+// PackAlign writes a and b to w, aligned to fr.Element (bls12-377) boundary.
+// It returns the length of the data written to w.
+func PackAlign(w io.Writer, a []byte, packingSize int, options ...packAlignOption) (n int64, err error) {
+
+ var s packAlignSettings
+ s.initialize(len(a), options...)
+ if !s.noTerminalSymbol && s.lastByteNbUnusedBits != 0 {
+ return 0, errors.New("terminal symbols with byte aligned input not yet supported")
+ }
+
+ // we may need to add some bits to a and b to ensure we can process some blocks of packingSize bits
+ nbBits := (s.dataNbBits + (packingSize - 1)) / packingSize * packingSize
+ extraBits := nbBits - s.dataNbBits
+
+ // padding will always be less than bytesPerElem bytes
+ bytesPerElem := (packingSize + 7) / 8
+ packingSizeLastU64 := uint8(packingSize % 64)
+ if packingSizeLastU64 == 0 {
+ packingSizeLastU64 = 64
+ }
+ bytePadding := (extraBits + 7) / 8
+ buf := make([]byte, bytesPerElem, bytesPerElem+1)
+
+ // the last nonzero byte is 0xff
+ if !s.noTerminalSymbol {
+ buf = append(buf, 0)
+ buf[0] = 0xff
+ }
+
+ inReaders := make([]io.Reader, 2+len(s.additionalInput))
+ inReaders[0] = bytes.NewReader(a)
+ for i, data := range s.additionalInput {
+ inReaders[i+1] = bytes.NewReader(data)
+ }
+ inReaders[len(inReaders)-1] = bytes.NewReader(buf[:bytePadding+1])
+
+ r := bitio.NewReader(io.MultiReader(inReaders...))
+
+ var tryWriteErr error
+ tryWrite := func(v uint64) {
+ if tryWriteErr == nil {
+ tryWriteErr = binary.Write(w, binary.BigEndian, v)
+ }
+ }
+
+ for i := 0; i < nbBits/packingSize; i++ {
+ tryWrite(r.TryReadBits(packingSizeLastU64))
+ for j := int(packingSizeLastU64); j < packingSize; j += 64 {
+ tryWrite(r.TryReadBits(64))
+ }
+ }
+
+ if tryWriteErr != nil {
+ return 0, fmt.Errorf("when writing to w: %w", tryWriteErr)
+ }
+
+ if r.TryError != nil {
+ return 0, fmt.Errorf("when reading from multi-reader: %w", r.TryError)
+ }
+
+ n1 := (nbBits / packingSize) * bytesPerElem
+ if n1 != PackAlignSize(len(a), packingSize, options...) {
+ return 0, errors.New("inconsistent PackAlignSize")
+ }
+ return int64(n1), nil
+}
+
+// MiMCChecksumPackedData re-packs the data tightly into bls12-377 elements and computes the MiMC checksum.
+// only supporting packing without a terminal symbol. Input with a terminal symbol will be interpreted in full padded length.
+func MiMCChecksumPackedData(data []byte, inputPackingSize int, hashPackingOptions ...packAlignOption) ([]byte, error) {
+ dataNbBits := len(data) * 8
+ if inputPackingSize%8 != 0 {
+ inputBytesPerElem := (inputPackingSize + 7) / 8
+ dataNbBits = dataNbBits / inputBytesPerElem * inputPackingSize
+ var err error
+ if data, err = UnpackAlign(data, inputPackingSize, true); err != nil {
+ return nil, err
+ }
+ }
+
+ lastByteNbUnusedBits := 8 - dataNbBits%8
+ if lastByteNbUnusedBits == 8 {
+ lastByteNbUnusedBits = 0
+ }
+
+ var bb bytes.Buffer
+ packingOptions := make([]packAlignOption, len(hashPackingOptions)+1)
+ copy(packingOptions, hashPackingOptions)
+ packingOptions[len(packingOptions)-1] = WithLastByteNbUnusedBits(uint8(lastByteNbUnusedBits))
+ if _, err := PackAlign(&bb, data, fr.Bits-1, packingOptions...); err != nil {
+ return nil, err
+ }
+
+ hsh := hash.MIMC_BLS12_377.New()
+ hsh.Write(bb.Bytes())
+ return hsh.Sum(nil), nil
+}
+
+// DecodedBlockData is a wrapper struct storing the different fields of a block
+// that we deserialize when decoding an ethereum block.
+type DecodedBlockData struct {
+ // BlockHash stores the decoded block hash
+ BlockHash common.Hash
+ // Timestamp holds the Unix timestamp of the block in
+ Timestamp uint64
+ // Froms stores the list of the sender address of every transaction
+ Froms []common.Address
+ // Txs stores the list of the decoded transactions.
+ Txs []types.TxData
+}
+
+func InjectFromAddressIntoR(txData types.TxData, from *common.Address) *types.Transaction {
+ switch txData := txData.(type) {
+ case *types.DynamicFeeTx:
+ tx := *txData
+ tx.R = new(big.Int)
+ tx.R.SetBytes(from[:])
+ tx.S = big.NewInt(1)
+ return types.NewTx(&tx)
+ case *types.AccessListTx:
+ tx := *txData
+ tx.R = new(big.Int)
+ tx.R.SetBytes(from[:])
+ tx.S = big.NewInt(1)
+ return types.NewTx(&tx)
+ case *types.LegacyTx:
+ tx := *txData
+ tx.R = new(big.Int)
+ tx.R.SetBytes(from[:])
+ tx.S = big.NewInt(1)
+ return types.NewTx(&tx)
+ default:
+ panic("unexpected transaction type")
+ }
+}
+
+// ToStd converts the decoded block data into a standard
+// block object capable of being encoded in a way consumable
+// by existing decoders. The process involves some abuse,
+// whereby 1) the "from" address of a transaction is put in the
+// signature.R field, though the signature as a whole is invalid.
+// 2) the block hash is stored in the ParentHash field in the block
+// header.
+func (d *DecodedBlockData) ToStd() *types.Block {
+ header := types.Header{
+ ParentHash: d.BlockHash,
+ Time: d.Timestamp,
+ }
+
+ body := types.Body{
+ Transactions: make([]*types.Transaction, len(d.Txs)),
+ }
+
+ for i := range d.Txs {
+ body.Transactions[i] = InjectFromAddressIntoR(d.Txs[i], &d.Froms[i])
+ }
+
+ return types.NewBlock(&header, &body, nil, emptyTrieHasher{})
+}
+
+func GetAddressFromR(tx *types.Transaction) typesLinea.EthAddress {
+ _, r, _ := tx.RawSignatureValues()
+ var res typesLinea.EthAddress
+ r.FillBytes(res[:])
+ return res
+}
+
+// TODO delete if unused
+type fixedTrieHasher common.Hash
+
+func (e fixedTrieHasher) Reset() {
+}
+
+func (e fixedTrieHasher) Update(_, _ []byte) error {
+ return nil
+}
+
+func (e fixedTrieHasher) Hash() common.Hash {
+ return common.Hash(e)
+}
+
+type emptyTrieHasher struct{}
+
+func (h emptyTrieHasher) Reset() {
+}
+
+func (h emptyTrieHasher) Update(_, _ []byte) error {
+ return nil
+}
+
+func (h emptyTrieHasher) Hash() common.Hash {
+ return common.Hash{}
+}
+
+type TxAddressGetter func(*types.Transaction) typesLinea.EthAddress
+
+type Config struct {
+ GetAddress TxAddressGetter
+}
+
+func NewConfig() Config {
+ return Config{
+ GetAddress: ethereum.GetFrom,
+ }
+}
+
+type Option func(*Config)
+
+func WithTxAddressGetter(g TxAddressGetter) Option {
+ return func(cfg *Config) {
+ cfg.GetAddress = g
+ }
+}
diff --git a/prover/lib/compressor/blob/encode/test_utils/test_utils.go b/prover/lib/compressor/blob/encode/test_utils/test_utils.go
new file mode 100644
index 000000000..820d10b44
--- /dev/null
+++ b/prover/lib/compressor/blob/encode/test_utils/test_utils.go
@@ -0,0 +1,22 @@
+package test_utils
+
+import (
+ "github.com/consensys/linea-monorepo/prover/backend/ethereum"
+ typesLinea "github.com/consensys/linea-monorepo/prover/utils/types"
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/core/types"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+// CheckSameTx checks if the most essential fields in two transactions are equal
+// TODO cover type-specific fields
+func CheckSameTx(t *testing.T, orig, decoded *types.Transaction, decodedFrom common.Address) {
+ assert.Equal(t, orig.Type(), decoded.Type())
+ assert.Equal(t, orig.To(), decoded.To())
+ assert.Equal(t, orig.Nonce(), decoded.Nonce())
+ assert.Equal(t, orig.Data(), decoded.Data())
+ assert.Equal(t, orig.Value(), decoded.Value())
+ assert.Equal(t, orig.Cost(), decoded.Cost())
+ assert.Equal(t, ethereum.GetFrom(orig), typesLinea.EthAddress(decodedFrom))
+}
diff --git a/prover/lib/compressor/blob/testdata/sample-blob-0.bin b/prover/lib/compressor/blob/testdata/sample-blob-0.bin
deleted file mode 100644
index 4fbeaee56..000000000
Binary files a/prover/lib/compressor/blob/testdata/sample-blob-0.bin and /dev/null differ
diff --git a/prover/lib/compressor/blob/testdata/v0/sample-blob-0151eda71505187b5.bin b/prover/lib/compressor/blob/testdata/v0/sample-blob-0151eda71505187b5.bin
new file mode 100644
index 000000000..faa13a320
Binary files /dev/null and b/prover/lib/compressor/blob/testdata/v0/sample-blob-0151eda71505187b5.bin differ
diff --git a/prover/lib/compressor/blob/testdata/v0/sample-blob-01b9918c3f0ceb6a.bin b/prover/lib/compressor/blob/testdata/v0/sample-blob-01b9918c3f0ceb6a.bin
new file mode 100644
index 000000000..674fe6004
Binary files /dev/null and b/prover/lib/compressor/blob/testdata/v0/sample-blob-01b9918c3f0ceb6a.bin differ
diff --git a/prover/lib/compressor/blob/v0/blob_maker.go b/prover/lib/compressor/blob/v0/blob_maker.go
index de6e771cd..d3c6c2f09 100644
--- a/prover/lib/compressor/blob/v0/blob_maker.go
+++ b/prover/lib/compressor/blob/v0/blob_maker.go
@@ -5,6 +5,8 @@ import (
"encoding/binary"
"errors"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
"io"
"os"
"strings"
@@ -39,6 +41,7 @@ type BlobMaker struct {
limit int // maximum size of the compressed data
compressor *lzss.Compressor // compressor used to compress the blob body
dict []byte // dictionary used for compression
+ dictStore dictionary.Store
header Header
@@ -67,6 +70,10 @@ func NewBlobMaker(dataLimit int, dictPath string) (*BlobMaker, error) {
}
dict = lzss.AugmentDict(dict)
blobMaker.dict = dict
+ blobMaker.dictStore, err = dictionary.SingletonStore(dict, 0)
+ if err != nil {
+ return nil, err
+ }
dictChecksum := compress.ChecksumPaddedBytes(dict, len(dict), hash.MIMC_BLS12_377.New(), fr.Bits)
copy(blobMaker.header.DictChecksum[:], dictChecksum)
@@ -119,7 +126,7 @@ func (bm *BlobMaker) Written() int {
func (bm *BlobMaker) Bytes() []byte {
if bm.currentBlobLength > 0 {
// sanity check that we can always decompress.
- header, rawBlocks, _, err := DecompressBlob(bm.currentBlob[:bm.currentBlobLength], bm.dict)
+ header, rawBlocks, _, err := DecompressBlob(bm.currentBlob[:bm.currentBlobLength], bm.dictStore)
if err != nil {
var sbb strings.Builder
fmt.Fprintf(&sbb, "invalid blob: %v\n", err)
@@ -130,8 +137,8 @@ func (bm *BlobMaker) Bytes() []byte {
panic(sbb.String())
}
// compare the header
- if !header.Equals(&bm.header) {
- panic("invalid blob: header mismatch")
+ if err = header.CheckEquality(&bm.header); err != nil {
+ panic(fmt.Errorf("invalid blob: header mismatch %v", err))
}
rawBlocksUnpacked, err := UnpackAlign(rawBlocks)
if err != nil {
@@ -146,7 +153,7 @@ func (bm *BlobMaker) Bytes() []byte {
// Write attempts to append the RLP block to the current batch.
// if forceReset is set; this will NOT append the bytes but still returns true if the chunk could have been appended
-func (bm *BlobMaker) Write(rlpBlock []byte, forceReset bool) (ok bool, err error) {
+func (bm *BlobMaker) Write(rlpBlock []byte, forceReset bool, encodingOptions ...encode.Option) (ok bool, err error) {
// decode the RLP block.
var block types.Block
@@ -156,7 +163,7 @@ func (bm *BlobMaker) Write(rlpBlock []byte, forceReset bool) (ok bool, err error
// re-encode it for compression
bm.buf.Reset()
- if err := EncodeBlockForCompression(&block, &bm.buf); err != nil {
+ if err := EncodeBlockForCompression(&block, &bm.buf, encodingOptions...); err != nil {
return false, fmt.Errorf("when re-encoding block for compression: %w", err)
}
blockLen := bm.buf.Len()
@@ -281,7 +288,8 @@ func (bm *BlobMaker) Equals(other *BlobMaker) bool {
}
// DecompressBlob decompresses a blob and returns the header and the blocks as they were compressed.
-func DecompressBlob(b, dict []byte) (blobHeader *Header, rawBlocks []byte, blocks [][]byte, err error) {
+// rawBlocks is the raw payload of the blob, delivered in packed format @TODO bad idea. fix
+func DecompressBlob(b []byte, dictStore dictionary.Store) (blobHeader *Header, rawBlocks []byte, blocks [][]byte, err error) {
// UnpackAlign the blob
b, err = UnpackAlign(b)
if err != nil {
@@ -295,11 +303,10 @@ func DecompressBlob(b, dict []byte) (blobHeader *Header, rawBlocks []byte, block
return nil, nil, nil, fmt.Errorf("failed to read blob header: %w", err)
}
- // ensure the dict hash matches
- {
- if !bytes.Equal(compress.ChecksumPaddedBytes(dict, len(dict), hash.MIMC_BLS12_377.New(), fr.Bits), blobHeader.DictChecksum[:]) {
- return nil, nil, nil, errors.New("invalid dict hash")
- }
+ // retrieve dict
+ dict, err := dictStore.Get(blobHeader.DictChecksum[:], 0)
+ if err != nil {
+ return nil, nil, nil, err
}
b = b[read:]
@@ -438,7 +445,8 @@ func UnpackAlign(r []byte) ([]byte, error) {
cpt++
}
// last byte should be equal to cpt
- if cpt != int(out.Bytes()[out.Len()-1])-1 {
+ lastNonZero := out.Bytes()[out.Len()-1]
+ if (cpt % 31) != int(lastNonZero)-1 {
return nil, errors.New("invalid padding length")
}
out.Truncate(out.Len() - 1)
diff --git a/prover/lib/compressor/blob/v0/blob_maker_test.go b/prover/lib/compressor/blob/v0/blob_maker_test.go
index 8e8c1a1c7..ff37f6760 100644
--- a/prover/lib/compressor/blob/v0/blob_maker_test.go
+++ b/prover/lib/compressor/blob/v0/blob_maker_test.go
@@ -8,14 +8,16 @@ import (
"encoding/json"
"errors"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ encodeTesting "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode/test_utils"
+ "github.com/consensys/linea-monorepo/prover/utils"
"io"
"math/big"
"math/rand"
"os"
+ "slices"
"testing"
- "github.com/consensys/linea-monorepo/prover/utils"
-
"github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v0/compress/lzss"
"github.com/consensys/linea-monorepo/prover/backend/ethereum"
@@ -603,7 +605,11 @@ func decompressBlob(b []byte) ([][][]byte, error) {
if err != nil {
return nil, fmt.Errorf("can't read dict: %w", err)
}
- header, _, blocks, err := DecompressBlob(b, dict)
+ dictStore, err := dictionary.SingletonStore(dict, 0)
+ if err != nil {
+ return nil, err
+ }
+ header, _, blocks, err := DecompressBlob(b, dictStore)
if err != nil {
return nil, fmt.Errorf("can't decompress blob: %w", err)
}
@@ -744,3 +750,18 @@ func TestPack(t *testing.T) {
assert.Equal(s2, original[n1:], "slices should match")
}
}
+
+func TestEncode(t *testing.T) {
+ var block types.Block
+ assert.NoError(t, rlp.DecodeBytes(testBlocks[0], &block))
+ tx := block.Transactions()[0]
+ var bb bytes.Buffer
+ assert.NoError(t, EncodeTxForCompression(tx, &bb))
+
+ var from common.Address
+ txBackData, err := DecodeTxFromUncompressed(bytes.NewReader(slices.Clone(bb.Bytes())), &from)
+ assert.NoError(t, err)
+ txBack := types.NewTx(txBackData)
+
+ encodeTesting.CheckSameTx(t, tx, txBack, from)
+}
diff --git a/prover/lib/compressor/blob/v0/encode.go b/prover/lib/compressor/blob/v0/encode.go
index e127082a6..09ec36dce 100644
--- a/prover/lib/compressor/blob/v0/encode.go
+++ b/prover/lib/compressor/blob/v0/encode.go
@@ -1,23 +1,27 @@
package v0
import (
+ "bytes"
"encoding/binary"
+ "errors"
"fmt"
"io"
"github.com/consensys/linea-monorepo/prover/backend/ethereum"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/rlp"
)
// EncodeBlockForCompression encodes a block for compression.
-func EncodeBlockForCompression(block *types.Block, w io.Writer) error {
+func EncodeBlockForCompression(block *types.Block, w io.Writer, encodingOptions ...encode.Option) error {
if err := binary.Write(w, binary.LittleEndian, block.Time()); err != nil {
return err
}
for _, tx := range block.Transactions() {
- if err := EncodeTxForCompression(tx, w); err != nil {
+ if err := EncodeTxForCompression(tx, w, encodingOptions...); err != nil {
return err
}
}
@@ -26,7 +30,11 @@ func EncodeBlockForCompression(block *types.Block, w io.Writer) error {
// EncodeTxForCompression encodes a transaction for compression.
// this code is from zk-evm-monorepo/prover/... but doesn't include the chainID
-func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
+func EncodeTxForCompression(tx *types.Transaction, w io.Writer, encodingOptions ...encode.Option) error {
+ cfg := encode.NewConfig()
+ for _, o := range encodingOptions {
+ o(&cfg)
+ }
switch {
// LONDON with dynamic fees
case tx.Type() == types.DynamicFeeTxType:
@@ -39,7 +47,7 @@ func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
tx.GasTipCap(),
tx.GasFeeCap(),
tx.Gas(),
- ethereum.GetFrom(tx),
+ cfg.GetAddress(tx),
tx.To(),
tx.Value(),
tx.Data(),
@@ -57,7 +65,7 @@ func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
tx.Nonce(),
tx.GasPrice(),
tx.Gas(),
- ethereum.GetFrom(tx),
+ cfg.GetAddress(tx),
tx.To(),
tx.Value(),
tx.Data(),
@@ -71,7 +79,7 @@ func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
tx.Nonce(),
tx.GasPrice(),
tx.Gas(),
- ethereum.GetFrom(tx),
+ cfg.GetAddress(tx),
tx.To(),
tx.Value(),
tx.Data(),
@@ -85,7 +93,7 @@ func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
tx.Nonce(),
tx.GasPrice(),
tx.Gas(),
- ethereum.GetFrom(tx),
+ cfg.GetAddress(tx),
tx.To(),
tx.Value(),
tx.Data(),
@@ -98,3 +106,140 @@ func EncodeTxForCompression(tx *types.Transaction, w io.Writer) error {
return nil
}
+
+// DecodeBlockFromUncompressed inverts [EncodeBlockForCompression]. It is primarily meant for
+// testing and ensuring the encoding is bijective.
+func DecodeBlockFromUncompressed(r *bytes.Reader) (encode.DecodedBlockData, error) {
+ var decTimestamp uint64
+
+ if err := binary.Read(r, binary.LittleEndian, &decTimestamp); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not decode timestamp: %w", err)
+ }
+
+ decodedBlk := encode.DecodedBlockData{
+ Timestamp: decTimestamp,
+ }
+
+ for r.Len() != 0 {
+ var (
+ from common.Address
+ )
+ if tx, err := DecodeTxFromUncompressed(r, &from); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not decode transaction #%v: %w", len(decodedBlk.Txs), err)
+ } else {
+ decodedBlk.Txs = append(decodedBlk.Txs, tx)
+ decodedBlk.Froms = append(decodedBlk.Froms, from)
+ }
+ }
+
+ return decodedBlk, nil
+}
+
+func ReadTxAsRlp(r *bytes.Reader) (fields []any, _type uint8, err error) {
+ firstByte, err := r.ReadByte()
+ if err != nil {
+ err = fmt.Errorf("could not read the first byte: %w", err)
+ return
+ }
+
+ // According to the RLP rule, `0xc0 + x` or `0xf7` indicates that the current
+ // item is a list and this is what's used to identify that the transaction is
+ // a legacy transaction or an EIP-155 transaction.
+ //
+ // Note that 0xc0 would indicate an empty list and thus be an invalid tx.
+ if firstByte == types.AccessListTxType || firstByte == types.DynamicFeeTxType {
+ _type = firstByte
+ } else {
+ if firstByte > 0xc0 {
+ // Set the byte-reader backward so that we can apply the rlp-decoder
+ // over it.
+ if err = r.UnreadByte(); err != nil {
+ return
+ }
+ _type = 0
+ } else {
+ err = fmt.Errorf("unexpected first byte: %x", firstByte)
+ return
+ }
+ }
+
+ err = rlp.Decode(r, &fields)
+ return
+}
+
+// DecodeTxFromUncompressed puts all the transaction data into the output, except for the from address,
+// which will be put where the argument "from" is referencing
+func DecodeTxFromUncompressed(r *bytes.Reader, from *common.Address) (types.TxData, error) {
+ fields, _type, err := ReadTxAsRlp(r)
+ if err != nil {
+ return nil, err
+ }
+ decoders := [3]func([]any, *common.Address) (types.TxData, error){
+ decodeLegacyTx,
+ decodeAccessListTx,
+ decodeDynamicFeeTx,
+ }
+ return decoders[_type](fields, from)
+}
+
+func decodeLegacyTx(fields []any, from *common.Address) (types.TxData, error) {
+
+ if len(fields) != 7 {
+ return nil, fmt.Errorf("unexpected number of field")
+ }
+
+ tx := new(types.LegacyTx)
+ err := errors.Join(
+ ethereum.TryCast(&tx.Nonce, fields[0], "nonce"),
+ ethereum.TryCast(&tx.GasPrice, fields[1], "gas-price"),
+ ethereum.TryCast(&tx.Gas, fields[2], "gas"),
+ ethereum.TryCast(from, fields[3], "from"),
+ ethereum.TryCast(&tx.To, fields[4], "to"),
+ ethereum.TryCast(&tx.Value, fields[5], "value"),
+ ethereum.TryCast(&tx.Data, fields[6], "data"),
+ )
+ return tx, err
+}
+
+func decodeAccessListTx(fields []any, from *common.Address) (types.TxData, error) {
+
+ if len(fields) != 8 {
+ return nil, fmt.Errorf("invalid number of field for a dynamic transaction")
+ }
+
+ tx := new(types.AccessListTx)
+ err := errors.Join(
+ ethereum.TryCast(&tx.Nonce, fields[0], "nonce"),
+ ethereum.TryCast(&tx.GasPrice, fields[1], "gas-price"),
+ ethereum.TryCast(&tx.Gas, fields[2], "gas"),
+ ethereum.TryCast(from, fields[3], "from"),
+ ethereum.TryCast(&tx.To, fields[4], "to"),
+ ethereum.TryCast(&tx.Value, fields[5], "value"),
+ ethereum.TryCast(&tx.Data, fields[6], "data"),
+ ethereum.TryCast(&tx.AccessList, fields[7], "access-list"),
+ )
+
+ return tx, err
+}
+
+func decodeDynamicFeeTx(fields []any, from *common.Address) (types.TxData, error) {
+
+ if len(fields) != 9 {
+ return nil, fmt.Errorf("invalid number of field for a dynamic transaction")
+ }
+
+ tx := new(types.DynamicFeeTx)
+ err := errors.Join(
+ ethereum.TryCast(&tx.Nonce, fields[0], "nonce"),
+ ethereum.TryCast(&tx.GasTipCap, fields[1], "gas-tip-cap"),
+ ethereum.TryCast(&tx.GasFeeCap, fields[2], "gas-fee-cap"),
+ ethereum.TryCast(&tx.Gas, fields[3], "gas"),
+ ethereum.TryCast(from, fields[4], "from"),
+ ethereum.TryCast(&tx.To, fields[5], "to"),
+ ethereum.TryCast(&tx.Value, fields[6], "value"),
+ ethereum.TryCast(&tx.Data, fields[7], "data"),
+ ethereum.TryCast(&tx.AccessList, fields[8], "access-list"),
+ )
+
+ return tx, err
+}
diff --git a/prover/lib/compressor/blob/v0/header.go b/prover/lib/compressor/blob/v0/header.go
index 0b917a39c..9137589db 100644
--- a/prover/lib/compressor/blob/v0/header.go
+++ b/prover/lib/compressor/blob/v0/header.go
@@ -2,6 +2,7 @@ package v0
import (
"encoding/binary"
+ "errors"
"fmt"
"io"
@@ -22,11 +23,17 @@ type Header struct {
}
func (s *Header) Equals(other *Header) bool {
+ return s.CheckEquality(other) == nil
+}
+
+// CheckEquality similar to Equals but returning a description of the mismatch,
+// returning nil if the objects are equal
+func (s *Header) CheckEquality(other *Header) error {
if other == nil {
- return false
+ return errors.New("empty header")
}
if s.DictChecksum != other.DictChecksum {
- return false
+ return errors.New("dictionary mismatch")
}
// we ignore batches of len(0), since caller could have
@@ -36,25 +43,27 @@ func (s *Header) Equals(other *Header) bool {
small, large = other, s
}
+ absJ := 0
for i := range small.table {
if len(small.table[i]) != len(large.table[i]) {
- return false
+ return fmt.Errorf("batch size mismatch at #%d", i)
}
for j := range small.table[i] {
if small.table[i][j] != large.table[i][j] {
- return false
+ return fmt.Errorf("block size mismatch at block #%d of batch #%d, #%d total", j, i, absJ+j)
}
}
+ absJ += len(small.table[i])
}
// remaining batches of large should be empty
for i := len(small.table); i < len(large.table); i++ {
if len(large.table[i]) != 0 {
- return false
+ return errors.New("batch count mismatch")
}
}
- return true
+ return nil
}
func (s *Header) NbBatches() int {
diff --git a/prover/lib/compressor/blob/v1/blob_maker.go b/prover/lib/compressor/blob/v1/blob_maker.go
index 6e6b3030d..d6434d467 100644
--- a/prover/lib/compressor/blob/v1/blob_maker.go
+++ b/prover/lib/compressor/blob/v1/blob_maker.go
@@ -2,10 +2,10 @@ package v1
import (
"bytes"
- "encoding/binary"
"errors"
"fmt"
- "io"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
"os"
"slices"
"strings"
@@ -13,10 +13,6 @@ import (
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/sirupsen/logrus"
- fr377 "github.com/consensys/gnark-crypto/ecc/bls12-377/fr"
- "github.com/consensys/gnark-crypto/hash"
- "github.com/icza/bitio"
-
"github.com/consensys/compress/lzss"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/rlp"
@@ -40,6 +36,7 @@ type BlobMaker struct {
Limit int // maximum size of the compressed data
compressor *lzss.Compressor // compressor used to compress the blob body
dict []byte // dictionary used for compression
+ dictStore dictionary.Store // dictionary store comprising only dict, used for decompression sanity checks
Header Header
@@ -68,8 +65,11 @@ func NewBlobMaker(dataLimit int, dictPath string) (*BlobMaker, error) {
}
dict = lzss.AugmentDict(dict)
blobMaker.dict = dict
+ if blobMaker.dictStore, err = dictionary.SingletonStore(dict, 1); err != nil {
+ return nil, err
+ }
- dictChecksum, err := MiMCChecksumPackedData(dict, 8)
+ dictChecksum, err := encode.MiMCChecksumPackedData(dict, 8)
if err != nil {
return nil, err
}
@@ -116,7 +116,7 @@ func (bm *BlobMaker) Written() int {
func (bm *BlobMaker) Bytes() []byte {
if bm.currentBlobLength > 0 {
// sanity check that we can always decompress.
- header, rawBlocks, _, err := DecompressBlob(bm.currentBlob[:bm.currentBlobLength], bm.dict)
+ header, rawBlocks, _, err := DecompressBlob(bm.currentBlob[:bm.currentBlobLength], bm.dictStore)
if err != nil {
var sbb strings.Builder
fmt.Fprintf(&sbb, "invalid blob: %v\n", err)
@@ -191,13 +191,13 @@ func (bm *BlobMaker) Write(rlpBlock []byte, forceReset bool) (ok bool, err error
}
// check that the header + the compressed data fits in the blob
- fitsInBlob := PackAlignSize(bm.buf.Len()+bm.compressor.Len(), fr381.Bits-1) <= bm.Limit
+ fitsInBlob := encode.PackAlignSize(bm.buf.Len()+bm.compressor.Len(), fr381.Bits-1) <= bm.Limit
if !fitsInBlob {
// first thing to check is if we bypass compression, would that fit?
if bm.compressor.ConsiderBypassing() {
// we can bypass compression and get a better ratio.
// let's check if now we fit in the blob.
- if PackAlignSize(bm.buf.Len()+bm.compressor.Len(), fr381.Bits-1) <= bm.Limit {
+ if encode.PackAlignSize(bm.buf.Len()+bm.compressor.Len(), fr381.Bits-1) <= bm.Limit {
goto bypass
}
}
@@ -221,7 +221,7 @@ bypass:
// copy the compressed data to the blob
bm.packBuffer.Reset()
- n2, err := PackAlign(&bm.packBuffer, bm.buf.Bytes(), fr381.Bits-1, WithAdditionalInput(bm.compressor.Bytes()))
+ n2, err := encode.PackAlign(&bm.packBuffer, bm.buf.Bytes(), fr381.Bits-1, encode.WithAdditionalInput(bm.compressor.Bytes()))
if err != nil {
bm.compressor.Revert()
bm.Header.removeLastBlock()
@@ -264,9 +264,9 @@ func (bm *BlobMaker) Equals(other *BlobMaker) bool {
}
// DecompressBlob decompresses a blob and returns the header and the blocks as they were compressed.
-func DecompressBlob(b, dict []byte) (blobHeader *Header, rawPayload []byte, blocks [][]byte, err error) {
+func DecompressBlob(b []byte, dictStore dictionary.Store) (blobHeader *Header, rawPayload []byte, blocks [][]byte, err error) {
// UnpackAlign the blob
- b, err = UnpackAlign(b, fr381.Bits-1, false)
+ b, err = encode.UnpackAlign(b, fr381.Bits-1, false)
if err != nil {
return nil, nil, nil, err
}
@@ -277,15 +277,10 @@ func DecompressBlob(b, dict []byte) (blobHeader *Header, rawPayload []byte, bloc
if err != nil {
return nil, nil, nil, fmt.Errorf("failed to read blob header: %w", err)
}
- // ensure the dict hash matches
- {
- expectedDictChecksum, err := MiMCChecksumPackedData(dict, 8)
- if err != nil {
- return nil, nil, nil, err
- }
- if !bytes.Equal(expectedDictChecksum, blobHeader.DictChecksum[:]) {
- return nil, nil, nil, errors.New("invalid dict hash")
- }
+ // retrieve dict
+ dict, err := dictStore.Get(blobHeader.DictChecksum[:], 1)
+ if err != nil {
+ return nil, nil, nil, err
}
b = b[read:]
@@ -317,210 +312,6 @@ func DecompressBlob(b, dict []byte) (blobHeader *Header, rawPayload []byte, bloc
return blobHeader, rawPayload, blocks, nil
}
-// PackAlignSize returns the size of the data when packed with PackAlign.
-func PackAlignSize(length0, packingSize int, options ...packAlignOption) (n int) {
- var s packAlignSettings
- s.initialize(length0, options...)
-
- // we may need to add some bits to a and b to ensure we can process some blocks of 248 bits
- extraBits := (packingSize - s.dataNbBits%packingSize) % packingSize
- nbBits := s.dataNbBits + extraBits
-
- return (nbBits / packingSize) * ((packingSize + 7) / 8)
-}
-
-type packAlignSettings struct {
- dataNbBits int
- lastByteNbUnusedBits uint8
- noTerminalSymbol bool
- additionalInput [][]byte
-}
-
-type packAlignOption func(*packAlignSettings)
-
-func NoTerminalSymbol() packAlignOption {
- return func(o *packAlignSettings) {
- o.noTerminalSymbol = true
- }
-}
-
-func WithAdditionalInput(data ...[]byte) packAlignOption {
- return func(o *packAlignSettings) {
- o.additionalInput = append(o.additionalInput, data...)
- }
-}
-
-func WithLastByteNbUnusedBits(n uint8) packAlignOption {
- if n > 7 {
- panic("only 8 bits to a byte")
- }
- return func(o *packAlignSettings) {
- o.lastByteNbUnusedBits = n
- }
-}
-
-func (s *packAlignSettings) initialize(length int, options ...packAlignOption) {
-
- for _, opt := range options {
- opt(s)
- }
-
- nbBytes := length
- for _, data := range s.additionalInput {
- nbBytes += len(data)
- }
-
- if !s.noTerminalSymbol {
- nbBytes++
- }
-
- s.dataNbBits = nbBytes*8 - int(s.lastByteNbUnusedBits)
-}
-
-// PackAlign writes a and b to w, aligned to fr.Element (bls12-377) boundary.
-// It returns the length of the data written to w.
-func PackAlign(w io.Writer, a []byte, packingSize int, options ...packAlignOption) (n int64, err error) {
-
- var s packAlignSettings
- s.initialize(len(a), options...)
- if !s.noTerminalSymbol && s.lastByteNbUnusedBits != 0 {
- return 0, errors.New("terminal symbols with byte aligned input not yet supported")
- }
-
- // we may need to add some bits to a and b to ensure we can process some blocks of packingSize bits
- nbBits := (s.dataNbBits + (packingSize - 1)) / packingSize * packingSize
- extraBits := nbBits - s.dataNbBits
-
- // padding will always be less than bytesPerElem bytes
- bytesPerElem := (packingSize + 7) / 8
- packingSizeLastU64 := uint8(packingSize % 64)
- if packingSizeLastU64 == 0 {
- packingSizeLastU64 = 64
- }
- bytePadding := (extraBits + 7) / 8
- buf := make([]byte, bytesPerElem, bytesPerElem+1)
-
- // the last nonzero byte is 0xff
- if !s.noTerminalSymbol {
- buf = append(buf, 0)
- buf[0] = 0xff
- }
-
- inReaders := make([]io.Reader, 2+len(s.additionalInput))
- inReaders[0] = bytes.NewReader(a)
- for i, data := range s.additionalInput {
- inReaders[i+1] = bytes.NewReader(data)
- }
- inReaders[len(inReaders)-1] = bytes.NewReader(buf[:bytePadding+1])
-
- r := bitio.NewReader(io.MultiReader(inReaders...))
-
- var tryWriteErr error
- tryWrite := func(v uint64) {
- if tryWriteErr == nil {
- tryWriteErr = binary.Write(w, binary.BigEndian, v)
- }
- }
-
- for i := 0; i < nbBits/packingSize; i++ {
- tryWrite(r.TryReadBits(packingSizeLastU64))
- for j := int(packingSizeLastU64); j < packingSize; j += 64 {
- tryWrite(r.TryReadBits(64))
- }
- }
-
- if tryWriteErr != nil {
- return 0, fmt.Errorf("when writing to w: %w", tryWriteErr)
- }
-
- if r.TryError != nil {
- return 0, fmt.Errorf("when reading from multi-reader: %w", r.TryError)
- }
-
- n1 := (nbBits / packingSize) * bytesPerElem
- if n1 != PackAlignSize(len(a), packingSize, options...) {
- return 0, errors.New("inconsistent PackAlignSize")
- }
- return int64(n1), nil
-}
-
-// UnpackAlign unpacks r (packed with PackAlign) and returns the unpacked data.
-func UnpackAlign(r []byte, packingSize int, noTerminalSymbol bool) ([]byte, error) {
- bytesPerElem := (packingSize + 7) / 8
- packingSizeLastU64 := uint8(packingSize % 64)
- if packingSizeLastU64 == 0 {
- packingSizeLastU64 = 64
- }
-
- n := len(r) / bytesPerElem
- if n*bytesPerElem != len(r) {
- return nil, fmt.Errorf("invalid data length; expected multiple of %d", bytesPerElem)
- }
-
- var out bytes.Buffer
- w := bitio.NewWriter(&out)
- for i := 0; i < n; i++ {
- // read bytes
- element := r[bytesPerElem*i : bytesPerElem*(i+1)]
- // write bits
- w.TryWriteBits(binary.BigEndian.Uint64(element[0:8]), packingSizeLastU64)
- for j := 8; j < bytesPerElem; j += 8 {
- w.TryWriteBits(binary.BigEndian.Uint64(element[j:j+8]), 64)
- }
- }
- if w.TryError != nil {
- return nil, fmt.Errorf("when writing to bitio.Writer: %w", w.TryError)
- }
- if err := w.Close(); err != nil {
- return nil, fmt.Errorf("when closing bitio.Writer: %w", err)
- }
-
- if !noTerminalSymbol {
- // the last nonzero byte should be 0xff
- outLen := out.Len() - 1
- for out.Bytes()[outLen] == 0 {
- outLen--
- }
- if out.Bytes()[outLen] != 0xff {
- return nil, errors.New("invalid terminal symbol")
- }
- out.Truncate(outLen)
- }
-
- return out.Bytes(), nil
-}
-
-// MiMCChecksumPackedData re-packs the data tightly into bls12-377 elements and computes the MiMC checksum.
-// only supporting packing without a terminal symbol. Input with a terminal symbol will be interpreted in full padded length.
-func MiMCChecksumPackedData(data []byte, inputPackingSize int, hashPackingOptions ...packAlignOption) ([]byte, error) {
- dataNbBits := len(data) * 8
- if inputPackingSize%8 != 0 {
- inputBytesPerElem := (inputPackingSize + 7) / 8
- dataNbBits = dataNbBits / inputBytesPerElem * inputPackingSize
- var err error
- if data, err = UnpackAlign(data, inputPackingSize, true); err != nil {
- return nil, err
- }
- }
-
- lastByteNbUnusedBits := 8 - dataNbBits%8
- if lastByteNbUnusedBits == 8 {
- lastByteNbUnusedBits = 0
- }
-
- var bb bytes.Buffer
- packingOptions := make([]packAlignOption, len(hashPackingOptions)+1)
- copy(packingOptions, hashPackingOptions)
- packingOptions[len(packingOptions)-1] = WithLastByteNbUnusedBits(uint8(lastByteNbUnusedBits))
- if _, err := PackAlign(&bb, data, fr377.Bits-1, packingOptions...); err != nil {
- return nil, err
- }
-
- hsh := hash.MIMC_BLS12_377.New()
- hsh.Write(bb.Bytes())
- return hsh.Sum(nil), nil
-}
-
// WorstCompressedBlockSize returns the size of the given block, as compressed by an "empty" blob maker.
// That is, with more context, blob maker could compress the block further, but this function
// returns the maximum size that can be achieved.
@@ -557,7 +348,7 @@ func (bm *BlobMaker) WorstCompressedBlockSize(rlpBlock []byte) (bool, int, error
}
// account for the padding
- n = PackAlignSize(n, fr381.Bits-1, NoTerminalSymbol())
+ n = encode.PackAlignSize(n, fr381.Bits-1, encode.NoTerminalSymbol())
return expandingBlock, n, nil
}
@@ -610,7 +401,7 @@ func (bm *BlobMaker) RawCompressedSize(data []byte) (int, error) {
}
// account for the padding
- n = PackAlignSize(n, fr381.Bits-1, NoTerminalSymbol())
+ n = encode.PackAlignSize(n, fr381.Bits-1, encode.NoTerminalSymbol())
return n, nil
}
diff --git a/prover/lib/compressor/blob/v1/blob_maker_test.go b/prover/lib/compressor/blob/v1/blob_maker_test.go
index 9ea25a0c7..2d67e2f39 100644
--- a/prover/lib/compressor/blob/v1/blob_maker_test.go
+++ b/prover/lib/compressor/blob/v1/blob_maker_test.go
@@ -9,6 +9,8 @@ import (
"encoding/hex"
"errors"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
"math/big"
"math/rand"
"os"
@@ -57,7 +59,8 @@ func testCompressorSingleSmallBatch(t *testing.T, blocks [][]byte) {
dict, err := os.ReadFile(testDictPath)
assert.NoError(t, err)
- _, _, blocksBack, err := v1.DecompressBlob(bm.Bytes(), dict)
+ dictStore, err := dictionary.SingletonStore(dict, 1)
+ _, _, blocksBack, err := v1.DecompressBlob(bm.Bytes(), dictStore)
assert.NoError(t, err)
assert.Equal(t, len(blocks), len(blocksBack), "number of blocks should match")
// TODO compare the blocks
@@ -121,7 +124,7 @@ func assertBatchesConsistent(t *testing.T, raw, decoded [][]byte) {
var block types.Block
assert.NoError(t, rlp.Decode(bytes.NewReader(raw[i]), &block))
- blockBack, err := test_utils.DecodeBlockFromUncompressed(bytes.NewReader(decoded[i]))
+ blockBack, err := v1.DecodeBlockFromUncompressed(bytes.NewReader(decoded[i]))
assert.NoError(t, err)
assert.Equal(t, block.Time(), blockBack.Timestamp, "block time should match")
}
@@ -512,7 +515,11 @@ func decompressBlob(b []byte) ([][][]byte, error) {
if err != nil {
return nil, fmt.Errorf("can't read dict: %w", err)
}
- header, _, blocks, err := v1.DecompressBlob(b, dict)
+ dictStore, err := dictionary.SingletonStore(dict, 1)
+ if err != nil {
+ return nil, err
+ }
+ header, _, blocks, err := v1.DecompressBlob(b, dictStore)
if err != nil {
return nil, fmt.Errorf("can't decompress blob: %w", err)
}
@@ -641,10 +648,10 @@ func TestPack(t *testing.T) {
runTest := func(s1, s2 []byte) {
// pack them
buf.Reset()
- written, err := v1.PackAlign(&buf, s1, fr381.Bits-1, v1.WithAdditionalInput(s2))
+ written, err := encode.PackAlign(&buf, s1, fr381.Bits-1, encode.WithAdditionalInput(s2))
assert.NoError(err, "pack should not generate an error")
- assert.Equal(v1.PackAlignSize(len(s1)+len(s2), fr381.Bits-1), int(written), "written bytes should match expected PackAlignSize")
- original, err := v1.UnpackAlign(buf.Bytes(), fr381.Bits-1, false)
+ assert.Equal(encode.PackAlignSize(len(s1)+len(s2), fr381.Bits-1), int(written), "written bytes should match expected PackAlignSize")
+ original, err := encode.UnpackAlign(buf.Bytes(), fr381.Bits-1, false)
assert.NoError(err, "unpack should not generate an error")
assert.Equal(s1, original[:len(s1)], "slices should match")
diff --git a/prover/lib/compressor/blob/v1/encode.go b/prover/lib/compressor/blob/v1/encode.go
index 60e71ef5e..16a79b64f 100644
--- a/prover/lib/compressor/blob/v1/encode.go
+++ b/prover/lib/compressor/blob/v1/encode.go
@@ -8,6 +8,8 @@ import (
"io"
"github.com/consensys/linea-monorepo/prover/backend/ethereum"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
)
@@ -159,3 +161,51 @@ func PassRlpList(r *bytes.Reader) error {
return nil
}
+
+// DecodeBlockFromUncompressed inverts [EncodeBlockForCompression]. It is primarily meant for
+// testing and ensuring the encoding is bijective.
+func DecodeBlockFromUncompressed(r *bytes.Reader) (encode.DecodedBlockData, error) {
+
+ var (
+ decNumTxs uint16
+ decTimestamp uint32
+ blockHash common.Hash
+ )
+
+ if err := binary.Read(r, binary.BigEndian, &decNumTxs); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not decode nb txs: %w", err)
+ }
+
+ if err := binary.Read(r, binary.BigEndian, &decTimestamp); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not decode timestamp: %w", err)
+ }
+
+ if _, err := r.Read(blockHash[:]); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not read the block hash: %w", err)
+ }
+
+ numTxs := int(decNumTxs)
+ decodedBlk := encode.DecodedBlockData{
+ Froms: make([]common.Address, numTxs),
+ Txs: make([]types.TxData, numTxs),
+ Timestamp: uint64(decTimestamp),
+ BlockHash: blockHash,
+ }
+
+ var err error
+ for i := 0; i < int(decNumTxs); i++ {
+ if decodedBlk.Txs[i], err = DecodeTxFromUncompressed(r, &decodedBlk.Froms[i]); err != nil {
+ return encode.DecodedBlockData{}, fmt.Errorf("could not decode transaction #%v: %w", i, err)
+ }
+ }
+
+ return decodedBlk, nil
+}
+
+func DecodeTxFromUncompressed(r *bytes.Reader, from *common.Address) (types.TxData, error) {
+ if _, err := r.Read(from[:]); err != nil {
+ return nil, fmt.Errorf("could not read from address: %w", err)
+ }
+
+ return ethereum.DecodeTxFromBytes(r)
+}
diff --git a/prover/lib/compressor/blob/v1/encode_test.go b/prover/lib/compressor/blob/v1/encode_test.go
index 021f2711a..0df74870f 100644
--- a/prover/lib/compressor/blob/v1/encode_test.go
+++ b/prover/lib/compressor/blob/v1/encode_test.go
@@ -6,13 +6,12 @@ import (
"bytes"
"encoding/hex"
"fmt"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ encodeTesting "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode/test_utils"
"testing"
v1 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
"github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1/test_utils"
- "github.com/consensys/linea-monorepo/prover/utils/types"
-
- "github.com/consensys/linea-monorepo/prover/backend/ethereum"
"github.com/consensys/linea-monorepo/prover/utils"
"github.com/ethereum/go-ethereum/common"
ethtypes "github.com/ethereum/go-ethereum/core/types"
@@ -33,58 +32,57 @@ func TestEncodeDecode(t *testing.T) {
t.Fatalf("could not decode test RLP block: %s", err.Error())
}
- var (
- buf = &bytes.Buffer{}
- expected = test_utils.DecodedBlockData{
- BlockHash: block.Hash(),
- Txs: make([]ethtypes.Transaction, len(block.Transactions())),
- Timestamp: block.Time(),
- }
- )
-
- for i := range expected.Txs {
- expected.Txs[i] = *block.Transactions()[i]
- }
+ var buf bytes.Buffer
- if err := v1.EncodeBlockForCompression(&block, buf); err != nil {
+ if err := v1.EncodeBlockForCompression(&block, &buf); err != nil {
t.Fatalf("failed encoding the block: %s", err.Error())
}
- var (
- encoded = buf.Bytes()
- r = bytes.NewReader(encoded)
- decoded, err = test_utils.DecodeBlockFromUncompressed(r)
- size, errScan = v1.ScanBlockByteLen(encoded)
- )
+ encoded := buf.Bytes()
+ r := bytes.NewReader(encoded)
+ decoded, err := v1.DecodeBlockFromUncompressed(r)
+ size, errScan := v1.ScanBlockByteLen(encoded)
assert.NoError(t, errScan, "error scanning the payload length")
assert.NotZero(t, size, "scanned a block size of zero")
require.NoError(t, err)
- assert.Equal(t, expected.BlockHash, decoded.BlockHash)
- assert.Equal(t, expected.Timestamp, decoded.Timestamp)
- assert.Equal(t, len(expected.Txs), len(decoded.Txs))
+ assert.Equal(t, block.Hash(), decoded.BlockHash)
+ assert.Equal(t, block.Time(), decoded.Timestamp)
+ assert.Equal(t, len(block.Transactions()), len(decoded.Txs))
+
+ for i := range block.Transactions() {
+ encodeTesting.CheckSameTx(t, block.Transactions()[i], ethtypes.NewTx(decoded.Txs[i]), decoded.Froms[i])
+ if t.Failed() {
+ return
+ }
+ }
+
+ t.Log("attempting RLP serialization")
+
+ encoded, err = rlp.EncodeToBytes(decoded.ToStd())
+ assert.NoError(t, err)
+
+ var blockBack ethtypes.Block
+ assert.NoError(t, rlp.Decode(bytes.NewReader(encoded), &blockBack))
- for i := range expected.Txs {
- checkSameTx(t, &expected.Txs[i], &decoded.Txs[i], decoded.Froms[i])
+ assert.Equal(t, block.Hash(), blockBack.ParentHash())
+ assert.Equal(t, block.Time(), blockBack.Time())
+ assert.Equal(t, len(block.Transactions()), len(blockBack.Transactions()))
+
+ for i := range block.Transactions() {
+ tx := blockBack.Transactions()[i]
+ encodeTesting.CheckSameTx(t, block.Transactions()[i], ethtypes.NewTx(decoded.Txs[i]), common.Address(encode.GetAddressFromR(tx)))
if t.Failed() {
return
}
}
+
})
}
}
-func checkSameTx(t *testing.T, orig, decoded *ethtypes.Transaction, from common.Address) {
- assert.Equal(t, orig.To(), decoded.To())
- assert.Equal(t, orig.Nonce(), decoded.Nonce())
- assert.Equal(t, orig.Data(), decoded.Data())
- assert.Equal(t, orig.Value(), decoded.Value())
- assert.Equal(t, orig.Cost(), decoded.Cost())
- assert.Equal(t, ethereum.GetFrom(orig), types.EthAddress(from))
-}
-
func TestPassRlpList(t *testing.T) {
makeRlpSlice := func(n int) []byte {
@@ -138,7 +136,7 @@ func TestVectorDecode(t *testing.T) {
var (
postPadded = append(b, postPad[:]...)
r = bytes.NewReader(b)
- _, errDec = test_utils.DecodeBlockFromUncompressed(r)
+ _, errDec = v1.DecodeBlockFromUncompressed(r)
_, errScan = v1.ScanBlockByteLen(postPadded)
)
diff --git a/prover/lib/compressor/blob/v1/test_utils/blob_maker_testing.go b/prover/lib/compressor/blob/v1/test_utils/blob_maker_testing.go
index a325cf5a2..7c0025013 100644
--- a/prover/lib/compressor/blob/v1/test_utils/blob_maker_testing.go
+++ b/prover/lib/compressor/blob/v1/test_utils/blob_maker_testing.go
@@ -5,20 +5,16 @@ import (
"crypto/rand"
"encoding/binary"
"encoding/json"
- "fmt"
"os"
"path/filepath"
"strings"
- "github.com/consensys/linea-monorepo/prover/backend/ethereum"
- "github.com/consensys/linea-monorepo/prover/lib/compressor/blob"
- v1 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/core/types"
-
"github.com/consensys/compress/lzss"
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/consensys/linea-monorepo/prover/backend/execution"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode"
+ v1 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -79,7 +75,7 @@ func LoadTestBlocks(testDataDir string) (testBlocks [][]byte, err error) {
return testBlocks, nil
}
-func RandIntn(n int) int {
+func RandIntn(n int) int { // TODO @Tabaie remove
var b [8]byte
_, _ = rand.Read(b[:])
return int(binary.BigEndian.Uint64(b[:]) % uint64(n))
@@ -102,7 +98,7 @@ func EmptyBlob(t require.TestingT) []byte {
assert.NoError(t, err)
var bb bytes.Buffer
- if _, err = v1.PackAlign(&bb, headerB.Bytes(), fr381.Bits-1, v1.WithAdditionalInput(compressor.Bytes())); err != nil {
+ if _, err = encode.PackAlign(&bb, headerB.Bytes(), fr381.Bits-1, encode.WithAdditionalInput(compressor.Bytes())); err != nil {
panic(err)
}
return bb.Bytes()
@@ -165,72 +161,6 @@ func TestBlocksAndBlobMaker(t require.TestingT) ([][]byte, *v1.BlobMaker) {
return testBlocks, bm
}
-// DecodedBlockData is a wrapper struct storing the different fields of a block
-// that we deserialize when decoding an ethereum block.
-type DecodedBlockData struct {
- // BlockHash stores the decoded block hash
- BlockHash common.Hash
- // Timestamp holds the Unix timestamp of the block in
- Timestamp uint64
- // Froms stores the list of the sender address of every transaction
- Froms []common.Address
- // Txs stores the list of the decoded transactions.
- Txs []types.Transaction
-}
-
-// DecodeBlockFromUncompressed inverts [EncodeBlockForCompression]. It is primarily meant for
-// testing and ensuring the encoding is bijective.
-func DecodeBlockFromUncompressed(r *bytes.Reader) (DecodedBlockData, error) {
-
- var (
- decNumTxs uint16
- decTimestamp uint32
- blockHash common.Hash
- )
-
- if err := binary.Read(r, binary.BigEndian, &decNumTxs); err != nil {
- return DecodedBlockData{}, fmt.Errorf("could not decode nb txs: %w", err)
- }
-
- if err := binary.Read(r, binary.BigEndian, &decTimestamp); err != nil {
- return DecodedBlockData{}, fmt.Errorf("could not decode timestamp: %w", err)
- }
-
- if _, err := r.Read(blockHash[:]); err != nil {
- return DecodedBlockData{}, fmt.Errorf("could not read the block hash: %w", err)
- }
-
- var (
- numTxs = int(decNumTxs)
- decodedBlk = DecodedBlockData{
- Froms: make([]common.Address, numTxs),
- Txs: make([]types.Transaction, numTxs),
- Timestamp: uint64(decTimestamp),
- BlockHash: blockHash,
- }
- )
-
- for i := 0; i < int(decNumTxs); i++ {
- if err := DecodeTxFromUncompressed(r, &decodedBlk.Txs[i], &decodedBlk.Froms[i]); err != nil {
- return DecodedBlockData{}, fmt.Errorf("could not decode transaction #%v: %w", i, err)
- }
- }
-
- return decodedBlk, nil
-}
-
-func DecodeTxFromUncompressed(r *bytes.Reader, tx *types.Transaction, from *common.Address) (err error) {
- if _, err := r.Read(from[:]); err != nil {
- return fmt.Errorf("could not read from address: %w", err)
- }
-
- if err := ethereum.DecodeTxFromBytes(r, tx); err != nil {
- return fmt.Errorf("could not deserialize transaction")
- }
-
- return nil
-}
-
func GetDict(t require.TestingT) []byte {
dict, err := blob.GetDict()
require.NoError(t, err)
diff --git a/prover/lib/compressor/libcompressor.go b/prover/lib/compressor/libcompressor/libcompressor.go
similarity index 100%
rename from prover/lib/compressor/libcompressor.go
rename to prover/lib/compressor/libcompressor/libcompressor.go
diff --git a/prover/lib/compressor/libcompressor.h b/prover/lib/compressor/libcompressor/libcompressor.h
similarity index 100%
rename from prover/lib/compressor/libcompressor.h
rename to prover/lib/compressor/libcompressor/libcompressor.h
diff --git a/prover/lib/compressor/libdecompressor/libdecompressor.go b/prover/lib/compressor/libdecompressor/libdecompressor.go
new file mode 100644
index 000000000..585fc300d
--- /dev/null
+++ b/prover/lib/compressor/libdecompressor/libdecompressor.go
@@ -0,0 +1,96 @@
+package main
+
+import "C"
+
+import (
+ "errors"
+ "strings"
+ "sync"
+ "unsafe"
+
+ decompressor "github.com/consensys/linea-monorepo/prover/lib/compressor/blob"
+ "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/dictionary"
+)
+
+//go:generate go build -tags nocorset -ldflags "-s -w" -buildmode=c-shared -o libdecompressor.so libdecompressor.go
+func main() {}
+
+var (
+ dictStore dictionary.Store
+ lastError error
+ lock sync.Mutex // probably unnecessary if coordinator guarantees single-threaded access
+)
+
+// Init initializes the decompressor.
+//
+//export Init
+func Init() {
+ dictStore = dictionary.NewStore()
+}
+
+// LoadDictionaries loads a number of dictionaries into the decompressor
+// according to colon-separated paths.
+// Returns the number of dictionaries loaded, or -1 if unsuccessful.
+// If -1 is returned, the Error() method will return a string describing the error.
+//
+//export LoadDictionaries
+func LoadDictionaries(dictPaths *C.char) C.int {
+ lock.Lock()
+ defer lock.Unlock()
+
+ pathsConcat := C.GoString(dictPaths)
+ paths := strings.Split(pathsConcat, ":")
+
+ if err := dictStore.Load(paths...); err != nil {
+ lastError = err
+ return -1
+ }
+ return C.int(len(paths))
+}
+
+// Decompress processes a Linea blob and outputs an RLP encoded list of RLP encoded blocks.
+// Due to information loss during pre-compression encoding, two pieces of information are represented "hackily":
+// The block hash is in the ParentHash field.
+// The transaction from address is in the signature.R field.
+//
+// Returns the number of bytes in out, or -1 in case of failure
+// If -1 is returned, the Error() method will return a string describing the error.
+//
+//export Decompress
+func Decompress(blob *C.char, blobLength C.int, out *C.char, outMaxLength C.int) C.int {
+
+ lock.Lock()
+ defer lock.Unlock()
+
+ bGo := C.GoBytes(unsafe.Pointer(blob), blobLength)
+
+ blocks, err := decompressor.DecompressBlob(bGo, dictStore)
+ if err != nil {
+ lastError = err
+ return -1
+ }
+
+ if len(blocks) > int(outMaxLength) {
+ lastError = errors.New("decoded blob does not fit in output buffer")
+ return -1
+ }
+
+ outSlice := unsafe.Slice((*byte)(unsafe.Pointer(out)), len(blocks))
+ copy(outSlice, blocks)
+
+ return C.int(len(blocks))
+}
+
+// Error returns the last encountered error.
+// If no error was encountered, returns nil.
+//
+//export Error
+func Error() *C.char {
+ lock.Lock()
+ defer lock.Unlock()
+ if lastError != nil {
+ // this leaks memory, but since this represents a fatal error, it's probably ok.
+ return C.CString(lastError.Error())
+ }
+ return nil
+}
diff --git a/prover/lib/compressor/libdecompressor/libdecompressor.h b/prover/lib/compressor/libdecompressor/libdecompressor.h
new file mode 100644
index 000000000..09bf1517a
--- /dev/null
+++ b/prover/lib/compressor/libdecompressor/libdecompressor.h
@@ -0,0 +1,106 @@
+/* Code generated by cmd/cgo; DO NOT EDIT. */
+
+/* package command-line-arguments */
+
+
+#line 1 "cgo-builtin-export-prolog"
+
+#include
+
+#ifndef GO_CGO_EXPORT_PROLOGUE_H
+#define GO_CGO_EXPORT_PROLOGUE_H
+
+#ifndef GO_CGO_GOSTRING_TYPEDEF
+typedef struct { const char *p; ptrdiff_t n; } _GoString_;
+#endif
+
+#endif
+
+/* Start of preamble from import "C" comments. */
+
+
+
+
+/* End of preamble from import "C" comments. */
+
+
+/* Start of boilerplate cgo prologue. */
+#line 1 "cgo-gcc-export-header-prolog"
+
+#ifndef GO_CGO_PROLOGUE_H
+#define GO_CGO_PROLOGUE_H
+
+typedef signed char GoInt8;
+typedef unsigned char GoUint8;
+typedef short GoInt16;
+typedef unsigned short GoUint16;
+typedef int GoInt32;
+typedef unsigned int GoUint32;
+typedef long long GoInt64;
+typedef unsigned long long GoUint64;
+typedef GoInt64 GoInt;
+typedef GoUint64 GoUint;
+typedef size_t GoUintptr;
+typedef float GoFloat32;
+typedef double GoFloat64;
+#ifdef _MSC_VER
+#include
+typedef _Fcomplex GoComplex64;
+typedef _Dcomplex GoComplex128;
+#else
+typedef float _Complex GoComplex64;
+typedef double _Complex GoComplex128;
+#endif
+
+/*
+ static assertion to make sure the file is being used on architecture
+ at least with matching size of GoInt.
+*/
+typedef char _check_for_64_bit_pointer_matching_GoInt[sizeof(void*)==64/8 ? 1:-1];
+
+#ifndef GO_CGO_GOSTRING_TYPEDEF
+typedef _GoString_ GoString;
+#endif
+typedef void *GoMap;
+typedef void *GoChan;
+typedef struct { void *t; void *v; } GoInterface;
+typedef struct { void *data; GoInt len; GoInt cap; } GoSlice;
+
+#endif
+
+/* End of boilerplate cgo prologue. */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+// Init initializes the decompressor.
+//
+extern void Init();
+
+// LoadDictionaries loads a number of dictionaries into the decompressor
+// according to colon-separated paths.
+// Returns the number of dictionaries loaded, or -1 if unsuccessful.
+// If -1 is returned, the Error() method will return a string describing the error.
+//
+extern int LoadDictionaries(char* dictPaths);
+
+// Decompress processes a Linea blob and outputs an RLP encoded list of RLP encoded blocks.
+// Due to information loss during pre-compression encoding, two pieces of information are represented "hackily":
+// The block hash is in the ParentHash field.
+// The transaction from address is in the signature.R field.
+//
+// Returns the number of bytes in out, or -1 in case of failure
+// If -1 is returned, the Error() method will return a string describing the error.
+//
+extern int Decompress(char* blob, int blobLength, char* out, int outMaxLength);
+
+// Error returns the last encountered error.
+// If no error was encountered, returns nil.
+//
+extern char* Error();
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/prover/utils/test_utils/test_utils.go b/prover/utils/test_utils/test_utils.go
index dde00a424..784621029 100644
--- a/prover/utils/test_utils/test_utils.go
+++ b/prover/utils/test_utils/test_utils.go
@@ -3,12 +3,17 @@ package test_utils
import (
"crypto/rand"
"encoding/binary"
+ "encoding/hex"
"encoding/json"
"fmt"
- "github.com/stretchr/testify/require"
"math"
"os"
+ "reflect"
+ "strconv"
+ "strings"
"testing"
+
+ "github.com/stretchr/testify/require"
)
type FakeTestingT struct{}
@@ -41,8 +46,109 @@ func RandIntSliceN(length, n int) []int {
return res
}
+type BytesEqualError struct {
+ Index int
+ error string
+}
+
+func (e *BytesEqualError) Error() string {
+ return e.error
+}
+
func LoadJson(t *testing.T, path string, v any) {
in, err := os.Open(path)
require.NoError(t, err)
require.NoError(t, json.NewDecoder(in).Decode(v))
}
+
+// BytesEqual between byte slices a,b
+// a readable error message would show in case of inequality
+// TODO error options: block size, check forwards or backwards etc
+func BytesEqual(expected, actual []byte) error {
+ l := min(len(expected), len(actual))
+
+ failure := 0
+ for failure < l {
+ if expected[failure] != actual[failure] {
+ break
+ }
+ failure++
+ }
+
+ if len(expected) == len(actual) {
+ return nil
+ }
+
+ // there is a mismatch
+ var sb strings.Builder
+
+ const (
+ radius = 40
+ blockSize = 32
+ )
+
+ printCentered := func(b []byte) {
+
+ for i := max(failure-radius, 0); i <= failure+radius; i++ {
+ if i%blockSize == 0 && i != failure-radius {
+ sb.WriteString(" ")
+ }
+ if i >= 0 && i < len(b) {
+ sb.WriteString(hex.EncodeToString([]byte{b[i]})) // inefficient, but this whole error printing sub-procedure will not be run more than once
+ } else {
+ sb.WriteString(" ")
+ }
+ }
+ }
+
+ sb.WriteString(fmt.Sprintf("mismatch starting at byte %d\n", failure))
+
+ sb.WriteString("expected: ")
+ printCentered(expected)
+ sb.WriteString("\n")
+
+ sb.WriteString("actual: ")
+ printCentered(actual)
+ sb.WriteString("\n")
+
+ sb.WriteString(" ")
+ for i := max(failure-radius, 0); i <= failure+radius; {
+ if i%blockSize == 0 && i != failure-radius {
+ s := strconv.Itoa(i)
+ sb.WriteString(" ")
+ sb.WriteString(s)
+ i += len(s) / 2
+ if len(s)%2 != 0 {
+ sb.WriteString(" ")
+ i++
+ }
+ } else {
+ if i == failure {
+ sb.WriteString("^^")
+ } else {
+ sb.WriteString(" ")
+ }
+ i++
+ }
+ }
+
+ sb.WriteString("\n")
+
+ return &BytesEqualError{
+ Index: failure,
+ error: sb.String(),
+ }
+}
+
+func SlicesEqual[T any](expected, actual []T) error {
+ if l1, l2 := len(expected), len(actual); l1 != l2 {
+ return fmt.Errorf("length mismatch %d≠%d", l1, l2)
+ }
+
+ for i := range expected {
+ if !reflect.DeepEqual(expected[i], actual[i]) {
+ return fmt.Errorf("mismatch at #%d:\nexpected %v\nencountered %v", i, expected[i], actual[i])
+ }
+ }
+ return nil
+}
diff --git a/settings.gradle b/settings.gradle
index bacee4331..5e0e63284 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -3,34 +3,37 @@ rootProject.name = 'linea'
include 'jvm-libs:generic:serialization:jackson'
include 'jvm-libs:generic:json-rpc'
include 'jvm-libs:generic:http-rest'
+include 'jvm-libs:generic:extensions:futures'
include 'jvm-libs:generic:extensions:kotlin'
+include 'jvm-libs:generic:extensions:tuweni'
include 'jvm-libs:generic:logging'
include 'jvm-libs:generic:vertx-helper'
-include 'jvm-libs:generic:extensions:futures'
include 'jvm-libs:generic:errors'
include 'jvm-libs:generic:persistence:db'
+include 'jvm-libs:linea:clients:linea-state-manager'
+include 'jvm-libs:linea:core:client-interface'
include 'jvm-libs:linea:core:domain-models'
+include 'jvm-libs:linea:core:long-running-service'
include 'jvm-libs:linea:core:metrics'
include 'jvm-libs:linea:core:traces'
-include 'jvm-libs:linea:web3j-extensions'
include 'jvm-libs:linea:blob-compressor'
+include 'jvm-libs:linea:blob-decompressor'
include 'jvm-libs:linea:blob-shnarf-calculator'
-include 'jvm-libs:linea:core:long-running-service'
include 'jvm-libs:linea:linea-contracts:l1-rollup'
include 'jvm-libs:linea:linea-contracts:l2-message-service'
include 'jvm-libs:linea:metrics:micrometer'
include 'jvm-libs:linea:teku-execution-client'
+include 'jvm-libs:linea:testing:file-system'
include 'jvm-libs:linea:testing:l1-blob-and-proof-submission'
include 'jvm-libs:linea:testing:teku-helper'
-include 'jvm-libs:linea:testing:file-system'
+include 'jvm-libs:linea:web3j-extensions'
include 'coordinator:app'
include 'coordinator:core'
include 'coordinator:utilities'
include 'coordinator:clients:prover-client:file-based-client'
include 'coordinator:clients:prover-client:serialization'
-include 'coordinator:clients:type2-state-manager-client'
include 'coordinator:clients:traces-generator-api-client'
include 'coordinator:clients:shomei-client'
include 'coordinator:clients:smart-contract-client'
@@ -60,3 +63,10 @@ include 'transaction-decoder-tool'
include 'transaction-exclusion-api:app'
include 'transaction-exclusion-api:core'
include 'transaction-exclusion-api:persistence:rejectedtransaction'
+
+include 'state-recover:appcore:logic'
+include 'state-recover:appcore:domain-models'
+include 'state-recover:appcore:clients-interfaces'
+include 'state-recover:clients:blobscan-client'
+include 'state-recover:clients:execution-layer-json-rpc-client'
+include 'state-recover:clients:smartcontract'
diff --git a/state-recover/appcore/clients-interfaces/build.gradle b/state-recover/appcore/clients-interfaces/build.gradle
new file mode 100644
index 000000000..081b3e53a
--- /dev/null
+++ b/state-recover/appcore/clients-interfaces/build.gradle
@@ -0,0 +1,11 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+group = 'build.linea.staterecover'
+
+dependencies {
+ api(project(':jvm-libs:generic:extensions:kotlin'))
+ api(project(':jvm-libs:linea:core:domain-models'))
+ api(project(':state-recover:appcore:domain-models'))
+}
diff --git a/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/BlobFetcher.kt b/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/BlobFetcher.kt
new file mode 100644
index 000000000..71523de7f
--- /dev/null
+++ b/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/BlobFetcher.kt
@@ -0,0 +1,7 @@
+package build.linea.staterecover.clients
+
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+
+interface BlobFetcher {
+ fun fetchBlobsByHash(blobVersionedHashes: List): SafeFuture>
+}
diff --git a/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/ExecutionLayerClient.kt b/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/ExecutionLayerClient.kt
new file mode 100644
index 000000000..97b9d40a1
--- /dev/null
+++ b/state-recover/appcore/clients-interfaces/src/main/kotlin/build/linea/staterecover/clients/ExecutionLayerClient.kt
@@ -0,0 +1,12 @@
+package build.linea.staterecover.clients
+
+import build.linea.staterecover.BlockL1RecoveredData
+import net.consensys.linea.BlockNumberAndHash
+import net.consensys.linea.BlockParameter
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+
+interface ExecutionLayerClient {
+ fun getBlockNumberAndHash(blockParameter: BlockParameter): SafeFuture
+ fun lineaEngineImportBlocksFromBlob(blocks: List): SafeFuture
+ fun lineaEngineForkChoiceUpdated(headBlockHash: ByteArray, finalizedBlockHash: ByteArray): SafeFuture
+}
diff --git a/state-recover/appcore/domain-models/build.gradle b/state-recover/appcore/domain-models/build.gradle
new file mode 100644
index 000000000..66fdad134
--- /dev/null
+++ b/state-recover/appcore/domain-models/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+group = 'build.linea.staterecover'
+
+dependencies {
+ api(project(':jvm-libs:generic:extensions:kotlin'))
+}
diff --git a/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/BlockL1RecoveredData.kt b/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/BlockL1RecoveredData.kt
new file mode 100644
index 000000000..be0eef8f2
--- /dev/null
+++ b/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/BlockL1RecoveredData.kt
@@ -0,0 +1,78 @@
+package build.linea.staterecover
+
+import kotlinx.datetime.Instant
+import net.consensys.encodeHex
+
+data class BlockExtraData(
+ val beneficiary: ByteArray
+) {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as BlockExtraData
+
+ return beneficiary.contentEquals(other.beneficiary)
+ }
+
+ override fun hashCode(): Int {
+ return beneficiary.contentHashCode()
+ }
+
+ override fun toString(): String {
+ return "BlockExtraData(beneficiary=${beneficiary.encodeHex()})"
+ }
+}
+
+data class BlockL1RecoveredData(
+ val blockNumber: ULong,
+ val blockHash: ByteArray,
+ val coinbase: ByteArray,
+ val blockTimestamp: Instant,
+ val gasLimit: ULong,
+ val difficulty: ULong,
+ val extraData: BlockExtraData,
+ val transactions: List
+) {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as BlockL1RecoveredData
+
+ if (blockNumber != other.blockNumber) return false
+ if (!blockHash.contentEquals(other.blockHash)) return false
+ if (!coinbase.contentEquals(other.coinbase)) return false
+ if (blockTimestamp != other.blockTimestamp) return false
+ if (gasLimit != other.gasLimit) return false
+ if (difficulty != other.difficulty) return false
+ if (extraData != other.extraData) return false
+ if (transactions != other.transactions) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = blockNumber.hashCode()
+ result = 31 * result + blockHash.contentHashCode()
+ result = 31 * result + coinbase.contentHashCode()
+ result = 31 * result + blockTimestamp.hashCode()
+ result = 31 * result + gasLimit.hashCode()
+ result = 31 * result + difficulty.hashCode()
+ result = 31 * result + extraData.hashCode()
+ result = 31 * result + transactions.hashCode()
+ return result
+ }
+
+ override fun toString(): String {
+ return "BlockL1RecoveredData(" +
+ "blockNumber=$blockNumber, " +
+ "blockHash=${blockHash.encodeHex()}, " +
+ "coinbase=${coinbase.encodeHex()}, " +
+ "blockTimestamp=$blockTimestamp, " +
+ "gasLimit=$gasLimit, " +
+ "difficulty=$difficulty, " +
+ "extraData=$extraData, " +
+ "transactions=$transactions)"
+ }
+}
diff --git a/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/TransactionL1RecoveredData.kt b/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/TransactionL1RecoveredData.kt
new file mode 100644
index 000000000..cd09e2a78
--- /dev/null
+++ b/state-recover/appcore/domain-models/src/main/kotlin/build/linea/staterecover/TransactionL1RecoveredData.kt
@@ -0,0 +1,74 @@
+package build.linea.staterecover
+
+import java.math.BigInteger
+
+data class TransactionL1RecoveredData(
+ val type: UByte,
+ val nonce: ULong,
+ val maxPriorityFeePerGas: BigInteger,
+ val maxFeePerGas: BigInteger,
+ val gasLimit: ULong,
+ val from: ByteArray,
+ val to: ByteArray,
+ val value: BigInteger,
+ val data: ByteArray,
+ val accessList: List
+) {
+
+ data class AccessTuple(
+ val address: ByteArray,
+ val storageKeys: List
+ ) {
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as AccessTuple
+
+ if (!address.contentEquals(other.address)) return false
+ if (storageKeys != other.storageKeys) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = address.contentHashCode()
+ result = 31 * result + storageKeys.hashCode()
+ return result
+ }
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (javaClass != other?.javaClass) return false
+
+ other as TransactionL1RecoveredData
+
+ if (type != other.type) return false
+ if (nonce != other.nonce) return false
+ if (maxPriorityFeePerGas != other.maxPriorityFeePerGas) return false
+ if (maxFeePerGas != other.maxFeePerGas) return false
+ if (gasLimit != other.gasLimit) return false
+ if (!from.contentEquals(other.from)) return false
+ if (!to.contentEquals(other.to)) return false
+ if (value != other.value) return false
+ if (!data.contentEquals(other.data)) return false
+ if (accessList != other.accessList) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = type.hashCode()
+ result = 31 * result + nonce.hashCode()
+ result = 31 * result + maxPriorityFeePerGas.hashCode()
+ result = 31 * result + maxFeePerGas.hashCode()
+ result = 31 * result + gasLimit.hashCode()
+ result = 31 * result + from.contentHashCode()
+ result = 31 * result + to.contentHashCode()
+ result = 31 * result + value.hashCode()
+ result = 31 * result + data.contentHashCode()
+ result = 31 * result + accessList.hashCode()
+ return result
+ }
+}
diff --git a/state-recover/clients/blobscan-client/build.gradle b/state-recover/clients/blobscan-client/build.gradle
new file mode 100644
index 000000000..85641b928
--- /dev/null
+++ b/state-recover/clients/blobscan-client/build.gradle
@@ -0,0 +1,65 @@
+import org.gradle.api.tasks.testing.logging.TestExceptionFormat
+import org.gradle.api.tasks.testing.logging.TestLogEvent
+
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+group = 'build.linea.staterecover'
+
+dependencies {
+ implementation(project(':jvm-libs:generic:extensions:futures'))
+ implementation(project(':jvm-libs:generic:extensions:kotlin'))
+ implementation(project(':jvm-libs:generic:extensions:tuweni'))
+ implementation(project(':jvm-libs:generic:http-rest'))
+ implementation(project(':jvm-libs:generic:json-rpc'))
+ implementation(project(':jvm-libs:generic:vertx-helper'))
+ implementation(project(':jvm-libs:linea:clients:linea-state-manager'))
+ implementation(project(':jvm-libs:linea:core:domain-models'))
+ implementation(project(':jvm-libs:linea:core:long-running-service'))
+ implementation(project(':state-recover:appcore:clients-interfaces'))
+ implementation("io.vertx:vertx-web-client:${libs.versions.vertx}")
+
+ testImplementation "com.github.tomakehurst:wiremock-jre8:${libs.versions.wiremock.get()}"
+ testImplementation "org.slf4j:slf4j-api:1.7.30"
+ testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${libs.versions.log4j}"
+ testImplementation "org.apache.logging.log4j:log4j-core:${libs.versions.log4j}"
+}
+
+sourceSets {
+ integrationTest {
+ kotlin {
+ compileClasspath += sourceSets.main.output
+ runtimeClasspath += sourceSets.main.output
+ }
+ compileClasspath += sourceSets.main.output + sourceSets.main.compileClasspath + sourceSets.test.compileClasspath
+ runtimeClasspath += sourceSets.main.output + sourceSets.main.runtimeClasspath + sourceSets.test.runtimeClasspath
+ }
+}
+
+task integrationTest(type: Test) {
+ test ->
+ description = "Runs integration tests."
+ group = "verification"
+ useJUnitPlatform()
+
+ classpath = sourceSets.integrationTest.runtimeClasspath
+ testClassesDirs = sourceSets.integrationTest.output.classesDirs
+
+ dependsOn(":localStackComposeUp")
+ dependsOn(rootProject.tasks.compileContracts)
+
+ testLogging {
+ events TestLogEvent.FAILED,
+ TestLogEvent.SKIPPED,
+ TestLogEvent.STANDARD_ERROR,
+ TestLogEvent.STARTED,
+ TestLogEvent.PASSED
+ exceptionFormat TestExceptionFormat.FULL
+ showCauses true
+ showExceptions true
+ showStackTraces true
+ // set showStandardStreams if you need to see test logs
+ showStandardStreams false
+ }
+}
diff --git a/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClient.kt b/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClient.kt
new file mode 100644
index 000000000..0091c94b0
--- /dev/null
+++ b/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClient.kt
@@ -0,0 +1,56 @@
+package build.linea.staterecover.clients.blobscan
+
+import build.linea.staterecover.clients.BlobFetcher
+import io.vertx.core.Vertx
+import io.vertx.core.json.JsonObject
+import io.vertx.ext.web.client.WebClient
+import io.vertx.ext.web.client.WebClientOptions
+import net.consensys.decodeHex
+import net.consensys.encodeHex
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import net.consensys.linea.vertx.setDefaultsFrom
+import org.apache.logging.log4j.LogManager
+import org.apache.logging.log4j.Logger
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+import java.net.URI
+
+class BlobScanClient(
+ private val restClient: RestClient,
+ private val log: Logger = LogManager.getLogger(BlobScanClient::class.java)
+) : BlobFetcher {
+ fun getBlobById(id: String): SafeFuture {
+ return restClient
+ .get("/blobs/$id")
+ .thenApply { response ->
+ if (response.statusCode == 200) {
+ response.body!!.getString("data").decodeHex()
+ } else {
+ throw RuntimeException(
+ "error fetching blobId=$id " +
+ "errorMessage=${response.body?.getString("message") ?: ""}"
+ )
+ }
+ }
+ }
+
+ override fun fetchBlobsByHash(blobVersionedHashes: List): SafeFuture> {
+ return SafeFuture.collectAll(blobVersionedHashes.map { hash -> getBlobById(hash.encodeHex()) }.stream())
+ }
+
+ companion object {
+ fun create(
+ vertx: Vertx,
+ endpoint: URI,
+ requestRetryConfig: RequestRetryConfig
+ ): BlobScanClient {
+ val restClient = VertxRestClient(
+ vertx = vertx,
+ webClient = WebClient.create(vertx, WebClientOptions().setDefaultsFrom(endpoint)),
+ responseParser = { it.toJsonObject() },
+ retryableErrorCodes = setOf(429, 503, 504),
+ requestRetryConfig = requestRetryConfig
+ )
+ return BlobScanClient(restClient)
+ }
+ }
+}
diff --git a/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/VertxRestClient.kt b/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/VertxRestClient.kt
new file mode 100644
index 000000000..e3b581d0e
--- /dev/null
+++ b/state-recover/clients/blobscan-client/src/main/kotlin/build/linea/staterecover/clients/blobscan/VertxRestClient.kt
@@ -0,0 +1,66 @@
+package build.linea.staterecover.clients.blobscan
+
+import io.vertx.core.Vertx
+import io.vertx.core.buffer.Buffer
+import io.vertx.ext.web.client.HttpRequest
+import io.vertx.ext.web.client.HttpResponse
+import io.vertx.ext.web.client.WebClient
+import net.consensys.linea.async.AsyncRetryer
+import net.consensys.linea.async.toSafeFuture
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+
+// TODO: move to a common module
+data class RestResponse(
+ val statusCode: Int,
+ val body: T?
+)
+
+interface RestClient {
+ fun get(path: String): SafeFuture>
+ // add remaining verbs as we need them
+}
+
+class VertxRestClient(
+ private val vertx: Vertx,
+ private val webClient: WebClient,
+ private val responseParser: (Buffer) -> Response,
+ private val retryableErrorCodes: Set = DEFAULT_RETRY_HTTP_CODES,
+ private val requestRetryConfig: RequestRetryConfig,
+ private val asyncRetryer: AsyncRetryer> = AsyncRetryer.retryer(
+ backoffDelay = requestRetryConfig.backoffDelay,
+ maxRetries = requestRetryConfig.maxRetries?.toInt(),
+ timeout = requestRetryConfig.timeout,
+ vertx = vertx
+ ),
+ private val requestHeaders: Map = mapOf("Accept" to "application/json")
+) : RestClient {
+ private fun makeRequestWithRetry(
+ request: HttpRequest
+ ): SafeFuture> {
+ return asyncRetryer
+ .retry(
+ stopRetriesPredicate = { response: HttpResponse ->
+ response.statusCode() !in retryableErrorCodes
+ }
+ ) {
+ request.send().toSafeFuture()
+ }
+ }
+
+ override fun get(path: String): SafeFuture> {
+ return makeRequestWithRetry(
+ webClient
+ .get(path)
+ .apply { requestHeaders.forEach(::putHeader) }
+ )
+ .thenApply { response ->
+ val parsedResponse = response.body()?.let(responseParser)
+ RestResponse(response.statusCode(), parsedResponse)
+ }
+ }
+
+ companion object {
+ val DEFAULT_RETRY_HTTP_CODES = setOf(429, 500, 503, 504)
+ }
+}
diff --git a/state-recover/clients/blobscan-client/src/test/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClientTest.kt b/state-recover/clients/blobscan-client/src/test/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClientTest.kt
new file mode 100644
index 000000000..a8ab315fe
--- /dev/null
+++ b/state-recover/clients/blobscan-client/src/test/kotlin/build/linea/staterecover/clients/blobscan/BlobScanClientTest.kt
@@ -0,0 +1,175 @@
+package build.linea.staterecover.clients.blobscan
+
+import com.github.tomakehurst.wiremock.WireMockServer
+import com.github.tomakehurst.wiremock.client.WireMock
+import com.github.tomakehurst.wiremock.core.WireMockConfiguration
+import com.github.tomakehurst.wiremock.http.RequestListener
+import io.micrometer.core.instrument.simple.SimpleMeterRegistry
+import io.vertx.core.Vertx
+import io.vertx.junit5.VertxExtension
+import net.consensys.encodeHex
+import net.consensys.linea.async.get
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import org.assertj.core.api.Assertions.assertThat
+import org.assertj.core.api.Assertions.assertThatThrownBy
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.extension.ExtendWith
+import java.net.URI
+import kotlin.time.Duration.Companion.milliseconds
+import kotlin.time.Duration.Companion.seconds
+
+@ExtendWith(VertxExtension::class)
+class BlobScanClientTest {
+
+ private lateinit var wiremock: WireMockServer
+ private lateinit var meterRegistry: SimpleMeterRegistry
+ private lateinit var serverURI: URI
+ private lateinit var blobScanClient: BlobScanClient
+
+ @BeforeEach
+ fun setUp(vertx: Vertx) {
+ wiremock = WireMockServer(
+ WireMockConfiguration.options()
+ .dynamicPort()
+ )
+ .apply {
+ addMockServiceRequestListener(object : RequestListener {
+ override fun requestReceived(
+ request: com.github.tomakehurst.wiremock.http.Request,
+ response: com.github.tomakehurst.wiremock.http.Response
+ ) {
+ // to debug
+ // println("request: ${request.url}")
+ }
+ })
+ }
+ wiremock.start()
+ meterRegistry = SimpleMeterRegistry()
+
+ serverURI = URI("http://127.0.0.1:${wiremock.port()}")
+ blobScanClient = BlobScanClient.create(
+ vertx = vertx,
+ endpoint = URI(wiremock.baseUrl()),
+ requestRetryConfig = RequestRetryConfig(
+ backoffDelay = 10.milliseconds,
+ maxRetries = 5u,
+ timeout = 5.seconds
+ )
+ )
+ }
+
+ @AfterEach
+ fun tearDown(vertx: Vertx) {
+ val vertxStopFuture = vertx.close()
+ wiremock.stop()
+ vertxStopFuture.get()
+ }
+
+ @Test
+ fun `when blobs exists shall return it`() {
+ val blobId = "0x0139f94e70bbbc39c821459ccd74245ff34212d76077df454d490f76790d563c"
+ val blobData = "0x0006eac4e2fac2ca844810be0dc9e398fa4961656c022b65"
+
+ wiremock.stubFor(
+ WireMock.get("/blobs/$blobId")
+ .withHeader("Accept", WireMock.containing("application/json"))
+ .willReturn(
+ WireMock.ok()
+ .withHeader("Content-type", "application/json")
+ .withBody(successResponseBody(blobId, blobData))
+ )
+ )
+
+ assertThat(blobScanClient.getBlobById(blobId).get().encodeHex()).isEqualTo(blobData)
+ }
+
+ @Test
+ fun `when blobs does not exists shall return error message`() {
+ val blobId = "0x0139f94e70bbbc39c821459ccd74245ff34212d76077df454d490f76790d563c"
+
+ wiremock.stubFor(
+ WireMock.get("/blobs/$blobId")
+ .withHeader("Accept", WireMock.containing("application/json"))
+ .willReturn(
+ WireMock
+ .notFound()
+ .withHeader("Content-type", "application/json")
+ .withBody(
+ """
+ {"message":"No blob with versioned hash or kzg commitment '$blobId'.","code":"NOT_FOUND"}
+ """.trimIndent()
+ )
+ )
+ )
+
+ assertThatThrownBy { blobScanClient.getBlobById(blobId).get() }
+ .hasMessageContaining("No blob with versioned hash or kzg commitment")
+ }
+
+ @Test
+ fun `when request failed shall retry it`() {
+ val blobId = "0x0139f94e70bbbc39c821459ccd74245ff34212d76077df454d490f76790d563c"
+ val blobData = "0x0006eac4e2fac2ca844810be0dc9e398fa4961656c022b65"
+
+ wiremock.stubFor(
+ WireMock.get("/blobs/$blobId")
+ .inScenario("SERVER_ERROR")
+ .willReturn(WireMock.status(503))
+ .willSetStateTo("SERVER_ERROR_1")
+ )
+ wiremock.stubFor(
+ WireMock.get("/blobs/$blobId")
+ .inScenario("SERVER_ERROR")
+ .whenScenarioStateIs("SERVER_ERROR_1")
+ .willReturn(WireMock.status(503))
+ .willSetStateTo("SERVER_OK")
+ )
+ wiremock.stubFor(
+ WireMock.get("/blobs/$blobId")
+ .inScenario("SERVER_ERROR")
+ .whenScenarioStateIs("SERVER_OK")
+ .willReturn(
+ WireMock.okJson(successResponseBody(blobId, blobData))
+ )
+ )
+
+ assertThat(blobScanClient.getBlobById(blobId).get().encodeHex()).isEqualTo(blobData)
+ }
+
+ private fun successResponseBody(
+ blobId: String,
+ blobData: String
+ ): String {
+ return """
+ {
+ "commitment": "0x86cddad176d1db92ac521c5dada895e1cca048a86618f131f271f54f07130daddd51af1f416be7ede789f6305d00d670",
+ "proof": "0x8ec34bdd70967eaa212b8c16c783f48940d6d0ab402b410290fd709511adb86a219bae75a41f295f0d7c6b0e22a74c38",
+ "size": 131072,
+ "versionedHash": "$blobId",
+ "data": "$blobData",
+ "dataStorageReferences": [
+ {
+ "blobStorage": "google",
+ "dataReference": "1/01/39/f9/0139f94e70bbbc39c821459ccd74245ff34212d76077df454d490f76790d563c.txt"
+ },
+ {
+ "blobStorage": "swarm",
+ "dataReference": "ff6758f14e3becd98f4a38588ff6371a4669aedbb4fd17604b501781b7646b41"
+ }
+ ],
+ "transactions": [
+ {
+ "hash": "0xe085a55b76df624824948f5611b363c3b23c8ff5db92df327004c8e66282227e",
+ "index": 0,
+ "blockHash": "0x5bc4cd40c9af4f3ec3d260b46c907c05f62f68ec12fc6b893a827d8cf8043b41",
+ "blockNumber": 20860690,
+ "blockTimestamp": "2024-09-30T03:11:23.000Z",
+ "rollup": "linea"
+ }
+ ]
+ }
+ """.trimIndent()
+ }
+}
diff --git a/state-recover/clients/blobscan-client/src/test/resources/log4j2.xml b/state-recover/clients/blobscan-client/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..047054dcf
--- /dev/null
+++ b/state-recover/clients/blobscan-client/src/test/resources/log4j2.xml
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/state-recover/clients/execution-layer-json-rpc-client/build.gradle b/state-recover/clients/execution-layer-json-rpc-client/build.gradle
new file mode 100644
index 000000000..70349a034
--- /dev/null
+++ b/state-recover/clients/execution-layer-json-rpc-client/build.gradle
@@ -0,0 +1,18 @@
+plugins {
+ id 'net.consensys.zkevm.kotlin-library-conventions'
+}
+
+dependencies {
+ implementation(project(':jvm-libs:generic:extensions:kotlin'))
+ implementation(project(':jvm-libs:generic:extensions:futures'))
+ implementation(project(':jvm-libs:generic:http-rest'))
+ implementation(project(':jvm-libs:generic:json-rpc'))
+ implementation(project(':jvm-libs:generic:vertx-helper'))
+ implementation(project(':jvm-libs:generic:serialization:jackson'))
+ implementation(project(':jvm-libs:linea:core:domain-models'))
+ implementation(project(':state-recover:appcore:clients-interfaces'))
+ implementation(project(':state-recover:appcore:domain-models'))
+
+ testImplementation "com.github.tomakehurst:wiremock-jre8:${libs.versions.wiremock.get()}"
+ testImplementation "net.javacrumbs.json-unit:json-unit-assertj:${libs.versions.jsonUnit.get()}"
+}
diff --git a/state-recover/clients/execution-layer-json-rpc-client/src/main/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClient.kt b/state-recover/clients/execution-layer-json-rpc-client/src/main/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClient.kt
new file mode 100644
index 000000000..813c0e2ed
--- /dev/null
+++ b/state-recover/clients/execution-layer-json-rpc-client/src/main/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClient.kt
@@ -0,0 +1,72 @@
+package build.linea.staterecover.clients.el
+
+import build.linea.s11n.jackson.ethApiObjectMapper
+import build.linea.staterecover.BlockL1RecoveredData
+import build.linea.staterecover.clients.ExecutionLayerClient
+import com.fasterxml.jackson.databind.JsonNode
+import net.consensys.decodeHex
+import net.consensys.encodeHex
+import net.consensys.fromHexString
+import net.consensys.linea.BlockNumberAndHash
+import net.consensys.linea.BlockParameter
+import net.consensys.linea.jsonrpc.client.JsonRpcClientFactory
+import net.consensys.linea.jsonrpc.client.JsonRpcV2Client
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import tech.pegasys.teku.infrastructure.async.SafeFuture
+import java.net.URI
+
+class ExecutionLayerJsonRpcClient internal constructor(
+ private val rpcClient: JsonRpcV2Client
+) : ExecutionLayerClient {
+ override fun getBlockNumberAndHash(blockParameter: BlockParameter): SafeFuture {
+ return rpcClient
+ .makeRequest(
+ method = "eth_getBlockByNumber",
+ params = listOf(blockParameter.toString(), false)
+ ) { result: Any? ->
+ @Suppress("UNCHECKED_CAST")
+ result as JsonNode
+ BlockNumberAndHash(
+ number = ULong.fromHexString(result.get("number").asText()),
+ hash = result.get("hash").asText().decodeHex()
+ )
+ }
+ }
+
+ override fun lineaEngineImportBlocksFromBlob(blocks: List): SafeFuture {
+ return rpcClient
+ .makeRequest(
+ method = "linea_engine_importBlocksFromBlob",
+ params = blocks,
+ resultMapper = { Unit }
+ )
+ }
+
+ override fun lineaEngineForkChoiceUpdated(
+ headBlockHash: ByteArray,
+ finalizedBlockHash: ByteArray
+ ): SafeFuture {
+ return rpcClient
+ .makeRequest(
+ method = "linea_engine_importForkChoiceUpdated",
+ params = listOf(headBlockHash, finalizedBlockHash).map { it.encodeHex() },
+ resultMapper = { Unit }
+ )
+ }
+
+ companion object {
+ fun create(
+ rpcClientFactory: JsonRpcClientFactory,
+ endpoint: URI,
+ requestRetryConfig: RequestRetryConfig
+ ): ExecutionLayerClient {
+ return ExecutionLayerJsonRpcClient(
+ rpcClient = rpcClientFactory.createJsonRpcV2Client(
+ endpoints = listOf(endpoint),
+ retryConfig = requestRetryConfig,
+ requestObjectMapper = ethApiObjectMapper
+ )
+ )
+ }
+ }
+}
diff --git a/state-recover/clients/execution-layer-json-rpc-client/src/test/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClientTest.kt b/state-recover/clients/execution-layer-json-rpc-client/src/test/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClientTest.kt
new file mode 100644
index 000000000..9b4ddad4d
--- /dev/null
+++ b/state-recover/clients/execution-layer-json-rpc-client/src/test/kotlin/build/linea/staterecover/clients/el/ExecutionLayerJsonRpcClientTest.kt
@@ -0,0 +1,206 @@
+package build.linea.staterecover.clients.el
+
+import build.linea.staterecover.BlockExtraData
+import build.linea.staterecover.BlockL1RecoveredData
+import build.linea.staterecover.TransactionL1RecoveredData
+import build.linea.staterecover.clients.ExecutionLayerClient
+import com.github.tomakehurst.wiremock.WireMockServer
+import com.github.tomakehurst.wiremock.client.WireMock.containing
+import com.github.tomakehurst.wiremock.client.WireMock.post
+import com.github.tomakehurst.wiremock.client.WireMock.status
+import com.github.tomakehurst.wiremock.core.WireMockConfiguration
+import io.micrometer.core.instrument.simple.SimpleMeterRegistry
+import io.vertx.junit5.VertxExtension
+import kotlinx.datetime.Instant
+import net.consensys.decodeHex
+import net.consensys.linea.BlockNumberAndHash
+import net.consensys.linea.BlockParameter
+import net.consensys.linea.jsonrpc.client.RequestRetryConfig
+import net.consensys.linea.jsonrpc.client.VertxHttpJsonRpcClientFactory
+import net.javacrumbs.jsonunit.assertj.assertThatJson
+import org.assertj.core.api.Assertions.assertThat
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.BeforeEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.extension.ExtendWith
+import java.net.URI
+import kotlin.time.Duration.Companion.milliseconds
+import kotlin.time.Duration.Companion.seconds
+
+@ExtendWith(VertxExtension::class)
+class ExecutionLayerJsonRpcClientTest {
+ private lateinit var client: ExecutionLayerClient
+ private lateinit var wiremock: WireMockServer
+ private lateinit var meterRegistry: SimpleMeterRegistry
+
+ @BeforeEach
+ fun setUp(vertx: io.vertx.core.Vertx) {
+ wiremock = WireMockServer(WireMockConfiguration.options().dynamicPort())
+ wiremock.start()
+
+ meterRegistry = SimpleMeterRegistry()
+ client = ExecutionLayerJsonRpcClient.create(
+ rpcClientFactory = VertxHttpJsonRpcClientFactory(vertx, meterRegistry),
+ endpoint = URI(wiremock.baseUrl()),
+ requestRetryConfig = RequestRetryConfig(
+ maxRetries = 3u,
+ backoffDelay = 10.milliseconds,
+ timeout = 2.seconds
+ )
+ )
+ }
+
+ @AfterEach
+ fun tearDown() {
+ wiremock.stop()
+ }
+
+ @Test
+ fun `getBlockNumberAndHash`() {
+ replyRequestWith(
+ 200,
+ """
+ {
+ "jsonrpc": "2.0",
+ "id": "53",
+ "result": {
+ "baseFeePerGas": "0x980b6e455",
+ "blobGasUsed": "0x0",
+ "difficulty": "0x0",
+ "excessBlobGas": "0x0",
+ "extraData": "0x6265617665726275696c642e6f7267",
+ "gasLimit": "0x1c9c380",
+ "gasUsed": "0x9428f2",
+ "hash": "0xaeb67fef93febef9db0f83b7777c1d7444919e8a0c372fd0b2a022775118150e",
+ "number": "0x13f2210",
+ "size": "0xb4f8",
+ "stateRoot": "0xf6ba9b93b98228e1d3217a7cb0fc4c5f1167854897add7b42f3fec8440234f8b",
+ "timestamp": "0x670403ff",
+ "transactions": [],
+ "transactionsRoot": "0x3059e5603e750ea6edd7e43e3f6599d8584c936dba9840ae0b3767ce01b9810c",
+ "uncles": [],
+ "withdrawals": [],
+ "withdrawalsRoot": "0xbbd14e124a749e443528b8cd53f988ee4e35a788bc1e8f60a1100d02eaa53bd0"
+ }
+ }
+ """.trimIndent()
+ )
+ client.getBlockNumberAndHash(BlockParameter.Tag.LATEST).get()
+ .also { response ->
+ assertThat(response).isEqualTo(
+ BlockNumberAndHash(
+ number = 0x13f2210u,
+ hash = "0xaeb67fef93febef9db0f83b7777c1d7444919e8a0c372fd0b2a022775118150e".decodeHex()
+ )
+ )
+ }
+
+ val requestJson = wiremock.serveEvents.serveEvents.first().request.bodyAsString
+ assertThatJson(requestJson)
+ .isEqualTo(
+ """{
+ "jsonrpc":"2.0",
+ "id":"${'$'}{json-unit.any-number}",
+ "method":"eth_getBlockByNumber",
+ "params":["${'$'}{json-unit.regex}(latest|LATEST)", false]
+ }"""
+ )
+ }
+
+ @Test
+ fun `lineaEngineImportBlocksFromBlob`() {
+ replyRequestWith(
+ 200,
+ """
+ {
+ "jsonrpc": "2.0",
+ "id": "53",
+ "result": null
+ }
+ """.trimIndent()
+ )
+ val block1 = BlockL1RecoveredData(
+ blockNumber = 0xa001u,
+ blockHash = "0xa011".decodeHex(),
+ coinbase = "0xa022".decodeHex(),
+ blockTimestamp = Instant.parse("2024-07-01T11:22:33Z"),
+ gasLimit = 0x1c9c380u,
+ difficulty = 0u,
+ extraData = BlockExtraData(beneficiary = "0x6265617665726275696c642e6f7267".decodeHex()),
+ transactions = listOf(
+ TransactionL1RecoveredData(
+ type = 0x01u,
+ nonce = 0xb010u,
+ maxPriorityFeePerGas = "b010011".toBigInteger(16),
+ maxFeePerGas = "b0100ff".toBigInteger(16),
+ gasLimit = 0xb0100aau,
+ from = "0xb011".decodeHex(),
+ to = "0xb012".decodeHex(),
+ value = 123.toBigInteger(),
+ data = "0xb013".decodeHex(),
+ accessList = listOf(
+ TransactionL1RecoveredData.AccessTuple(
+ address = "0xb014".decodeHex(),
+ storageKeys = listOf("0xb015".decodeHex(), "0xb015".decodeHex())
+ )
+ )
+ )
+ )
+ )
+
+ assertThat(client.lineaEngineImportBlocksFromBlob(listOf(block1)).get()).isEqualTo(Unit)
+
+ val requestJson = wiremock.serveEvents.serveEvents.first().request.bodyAsString
+ assertThatJson(requestJson)
+ .isEqualTo(
+ """{
+ "jsonrpc":"2.0",
+ "id":"${'$'}{json-unit.any-number}",
+ "method":"linea_engine_importBlocksFromBlob",
+ "params":[{
+ "blockNumber": "0xa001",
+ "blockHash": "0xa011",
+ "coinbase": "0xa022",
+ "blockTimestamp": "2024-07-01T11:22:33Z",
+ "gasLimit": "0x1c9c380",
+ "difficulty": "0x0",
+ "extraData": {
+ "beneficiary": "0x6265617665726275696c642e6f7267"
+ },
+ "transactions": [{
+ "type": "0x01",
+ "nonce": "0xb010",
+ "maxPriorityFeePerGas": "0xb010011",
+ "maxFeePerGas": "0xb0100ff",
+ "gasLimit": "0xb0100aa",
+ "from": "0xb011",
+ "to": "0xb012",
+ "value": "0x7b",
+ "data": "0xb013",
+ "accessList": [
+ {
+ "address": "0xb014",
+ "storageKeys": [
+ "0xb015",
+ "0xb015"
+ ]
+ }
+ ]
+ }]
+ }]
+ }"""
+ )
+ }
+
+ private fun replyRequestWith(statusCode: Int, body: String?) {
+ wiremock.stubFor(
+ post("/")
+ .withHeader("Content-Type", containing("application/json"))
+ .willReturn(
+ status(statusCode)
+ .withHeader("Content-type", "text/plain")
+ .apply { if (body != null) withBody(body) }
+ )
+ )
+ }
+}
diff --git a/traces-api-facade/app/build.gradle b/traces-api-facade/app/build.gradle
index 43d5a42cd..059e3061e 100644
--- a/traces-api-facade/app/build.gradle
+++ b/traces-api-facade/app/build.gradle
@@ -19,6 +19,7 @@ dependencies {
implementation "io.vertx:vertx-lang-kotlin"
implementation "io.vertx:vertx-config"
implementation "io.vertx:vertx-micrometer-metrics"
+ implementation "io.tmio:tuweni-units:${libs.versions.tuweni.get()}"
implementation "info.picocli:picocli:${libs.versions.picoli.get()}"
implementation "com.sksamuel.hoplite:hoplite-core:${libs.versions.hoplite.get()}"
implementation "com.sksamuel.hoplite:hoplite-toml:${libs.versions.hoplite.get()}"
diff --git a/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/app/api/RequestHandlersV1.kt b/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/app/api/RequestHandlersV1.kt
index 07d2fc1fe..37504ed64 100644
--- a/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/app/api/RequestHandlersV1.kt
+++ b/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/app/api/RequestHandlersV1.kt
@@ -10,6 +10,7 @@ import com.github.michaelbull.result.mapError
import io.vertx.core.Future
import io.vertx.core.json.JsonObject
import io.vertx.ext.auth.User
+import net.consensys.decodeHex
import net.consensys.linea.BlockNumberAndHash
import net.consensys.linea.TracesConflationServiceV1
import net.consensys.linea.TracesCountingServiceV1
@@ -21,12 +22,11 @@ import net.consensys.linea.jsonrpc.JsonRpcRequest
import net.consensys.linea.jsonrpc.JsonRpcRequestHandler
import net.consensys.linea.jsonrpc.JsonRpcRequestMapParams
import net.consensys.linea.jsonrpc.JsonRpcSuccessResponse
-import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
private fun parseBlockNumberAndHash(json: JsonObject) = BlockNumberAndHash(
json.getString("blockNumber").toULong(),
- Bytes32.fromHexString(json.getString("blockHash"))
+ json.getString("blockHash").decodeHex()
)
internal fun validateParams(request: JsonRpcRequest): Result {
diff --git a/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/repository/FilesystemTracesRepositoryV1.kt b/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/repository/FilesystemTracesRepositoryV1.kt
index cdd1b4fd2..79b6c34ed 100644
--- a/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/repository/FilesystemTracesRepositoryV1.kt
+++ b/traces-api-facade/app/src/main/kotlin/net/consensys/linea/traces/repository/FilesystemTracesRepositoryV1.kt
@@ -16,6 +16,7 @@ import net.consensys.linea.traces.TracesFileNameSupplier
import net.consensys.linea.traces.TracesFiles
import org.apache.logging.log4j.LogManager
import org.apache.logging.log4j.Logger
+import org.apache.tuweni.bytes.Bytes32
import tech.pegasys.teku.infrastructure.async.SafeFuture
import java.nio.file.Path
@@ -40,7 +41,7 @@ class FilesystemTracesRepositoryV1(
private fun findTracesFile(block: TracesFileIndex): Result {
val tracesFileName = fileNameSupplier(
block.number,
- block.hash,
+ Bytes32.wrap(block.hash),
block.version,
config.tracesFileExtension
)