Compare commits

..

12 Commits

Author SHA1 Message Date
yinyiqian1
2f25cb60c7 Merge branch 'develop' into account_mpts 2025-10-28 11:23:32 -04:00
Ayaz Salikhov
890279b15e Merge branch 'develop' into account_mpts 2025-10-24 08:53:50 +01:00
yinyiqian1
022f2a44b4 resolve new comments 2025-10-23 23:55:14 -04:00
yinyiqian1
187dd4ae90 fix doxygen 2025-10-21 10:35:14 -04:00
yinyiqian1
2c3e4dec74 Merge branch 'develop' into account_mpts 2025-10-20 21:13:29 -04:00
yinyiqian1
f2a40726c7 Merge branch 'develop' into account_mpts 2025-10-17 15:48:22 -04:00
yinyiqian1
e8a64d838c Merge branch 'develop' into account_mpts 2025-10-16 14:27:13 -04:00
yinyiqian1
c351ace6ec Merge branch 'develop' into account_mpts 2025-10-16 12:16:16 -04:00
yinyiqian1
05dd0b3412 Merge branch 'develop' into account_mpts 2025-10-15 13:49:34 -04:00
yinyiqian1
685fab5d17 resolve comments 2025-10-15 13:18:09 -04:00
Ayaz Salikhov
79fc0b10c3 Merge branch 'develop' into account_mpts 2025-10-08 17:46:31 +01:00
yinyiqian1
676f7f0d6a feat: Support account MPToken 2025-10-07 19:21:53 -04:00
29 changed files with 190 additions and 235 deletions

View File

@@ -4,7 +4,7 @@ import json
LINUX_OS = ["heavy", "heavy-arm64"]
LINUX_CONTAINERS = [
'{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
'{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
]
LINUX_COMPILERS = ["gcc", "clang"]

View File

@@ -44,7 +44,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Download Clio binary from artifact
if: ${{ inputs.artifact_name != null }}

View File

@@ -45,7 +45,7 @@ jobs:
build_type: [Release, Debug]
container:
[
'{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }',
'{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }',
]
static: [true]
@@ -75,7 +75,7 @@ jobs:
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
conan_profile: gcc
build_type: Debug
download_ccache: true
@@ -94,7 +94,7 @@ jobs:
uses: ./.github/workflows/reusable-build.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
conan_profile: gcc
build_type: Release
download_ccache: true
@@ -111,10 +111,10 @@ jobs:
needs: build-and-test
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:

View File

@@ -17,15 +17,15 @@ jobs:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -67,7 +67,7 @@ jobs:
needs: build
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
steps:
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
@@ -90,7 +90,7 @@ jobs:
issues: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Create an issue
uses: ./.github/actions/create-issue

View File

@@ -27,7 +27,7 @@ jobs:
if: github.event_name != 'push' || contains(github.event.head_commit.message, 'clang-tidy auto fixes')
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
permissions:
contents: write
@@ -35,12 +35,12 @@ jobs:
pull-requests: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true

View File

@@ -14,16 +14,16 @@ jobs:
build:
runs-on: ubuntu-latest
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
lfs: true
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true

View File

@@ -39,17 +39,17 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
- os: heavy
conan_profile: gcc
build_type: Debug
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
- os: heavy
conan_profile: gcc.ubsan
build_type: Release
static: false
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
uses: ./.github/workflows/reusable-build-test.yml
with:
@@ -73,7 +73,7 @@ jobs:
include:
- os: heavy
conan_profile: clang
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
static: true
- os: macos15
conan_profile: apple-clang
@@ -93,29 +93,18 @@ jobs:
targets: all
analyze_build_time: true
get_date:
name: Get Date
runs-on: ubuntu-latest
outputs:
date: ${{ steps.get_date.outputs.date }}
steps:
- name: Get current date
id: get_date
run: |
echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
nightly_release:
needs: [build-and-test, get_date]
needs: build-and-test
uses: ./.github/workflows/reusable-release.yml
with:
delete_pattern: "nightly-*"
overwrite_release: true
prerelease: true
title: "Clio development build (`nightly-${{ needs.get_date.outputs.date }}`)"
version: nightly-${{ needs.get_date.outputs.date }}
title: "Clio development (nightly) build"
version: nightly
header: >
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly-${{ needs.get_date.outputs.date }}>
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
generate_changelog: false
draft: false
@@ -141,7 +130,7 @@ jobs:
issues: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Create an issue
uses: ./.github/actions/create-issue

View File

@@ -11,4 +11,4 @@ jobs:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-pre-commit:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'

View File

@@ -29,7 +29,7 @@ jobs:
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
uses: ./.github/workflows/reusable-build-test.yml
with:
@@ -49,7 +49,7 @@ jobs:
needs: build-and-test
uses: ./.github/workflows/reusable-release.yml
with:
delete_pattern: ""
overwrite_release: false
prerelease: ${{ contains(github.ref_name, '-') }}
title: "${{ github.ref_name }}"
version: "${{ github.ref_name }}"

View File

@@ -86,7 +86,7 @@ jobs:
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0
# We need to fetch tags to have correct version in the release
@@ -95,7 +95,7 @@ jobs:
ref: ${{ github.ref }}
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: ${{ !inputs.download_ccache }}

View File

@@ -3,10 +3,10 @@ name: Make release
on:
workflow_call:
inputs:
delete_pattern:
description: "Pattern to delete previous releases"
overwrite_release:
description: "Overwrite the current release and tag"
required: true
type: string
type: boolean
prerelease:
description: "Create a prerelease"
@@ -42,7 +42,7 @@ jobs:
release:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
@@ -51,12 +51,12 @@ jobs:
contents: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true
@@ -94,19 +94,14 @@ jobs:
name: release_notes_${{ inputs.version }}
path: "${RUNNER_TEMP}/release_notes.md"
- name: Remove previous release with a pattern
if: ${{ github.event_name != 'pull_request' && inputs.delete_pattern != '' }}
- name: Remove current release and tag
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
shell: bash
env:
DELETE_PATTERN: ${{ inputs.delete_pattern }}
RELEASE_VERSION: ${{ inputs.version }}
run: |
RELEASES_TO_DELETE=$(gh release list --limit 50 --repo "${GH_REPO}" | grep -E "${DELETE_PATTERN}" | awk -F'\t' '{print $3}' || true)
if [ -n "$RELEASES_TO_DELETE" ]; then
for RELEASE in $RELEASES_TO_DELETE; do
echo "Deleting release: $RELEASE"
gh release delete "$RELEASE" --repo "${GH_REPO}" --yes --cleanup-tag
done
fi
gh release delete "${RELEASE_VERSION}" --yes || true
git push origin :"${RELEASE_VERSION}" || true
- name: Publish release
if: ${{ github.event_name != 'pull_request' }}

View File

@@ -50,7 +50,7 @@ jobs:
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@ea9970b7c211b18f4c8bcdb28c29f5711752029f
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
fetch-depth: 0

View File

@@ -44,7 +44,7 @@ jobs:
uses: ./.github/workflows/reusable-build-test.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a" }'
container: '{ "image": "ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d" }'
download_ccache: false
upload_ccache: false
conan_profile: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}

View File

@@ -52,7 +52,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -90,7 +90,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -128,7 +128,7 @@ jobs:
needs: [repo, gcc-amd64, gcc-arm64]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -179,7 +179,7 @@ jobs:
needs: repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -215,7 +215,7 @@ jobs:
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -246,7 +246,7 @@ jobs:
needs: [repo, gcc-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -277,7 +277,7 @@ jobs:
needs: [repo, tools-amd64, tools-arm64]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Get changed files
id: changed-files
@@ -312,7 +312,7 @@ jobs:
needs: [repo, tools-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -334,7 +334,7 @@ jobs:
needs: [repo, gcc-merge, clang, tools-merge]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: ./.github/actions/build-docker-image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -46,7 +46,7 @@ jobs:
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Calculate conan matrix
id: set-matrix
@@ -69,10 +69,10 @@ jobs:
CONAN_PROFILE: ${{ matrix.compiler }}${{ matrix.sanitizer_ext }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@8abb0722cbff83a9a2dc7d06c473f7a4964b7382
uses: XRPLF/actions/.github/actions/prepare-runner@7951b682e5a2973b28b0719a72f01fc4b0d0c34f
with:
disable_ccache: true

View File

@@ -11,10 +11,7 @@
#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
docs/doxygen-awesome-theme/.*
)$
exclude: ^(docs/doxygen-awesome-theme/|conan\.lock$)
repos:
# `pre-commit sample-config` default hooks

View File

@@ -55,4 +55,4 @@
]
},
"config_requires": []
}
}

View File

@@ -55,11 +55,8 @@ RUN pip install -q --no-cache-dir \
# lxml 6.0.0 is not compatible with our image
'lxml<6.0.0' \
cmake \
conan==2.22.1 \
gcovr \
# We're adding pre-commit to this image as well,
# because clang-tidy workflow requires it
pre-commit
conan==2.20.1 \
gcovr
# Install LLVM tools
ARG LLVM_TOOLS_VERSION=20

View File

@@ -5,17 +5,17 @@ It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but c
The image is based on Ubuntu 20.04 and contains:
- ccache 4.12.1
- ccache 4.11.3
- Clang 19
- ClangBuildAnalyzer 1.6.0
- Conan 2.22.1
- Doxygen 1.15.0
- Conan 2.20.1
- Doxygen 1.14
- GCC 15.2.0
- GDB 16.3
- gh 2.82.1
- git-cliff 2.10.1
- mold 2.40.4
- Python 3.8
- gh 2.74
- git-cliff 2.9.1
- mold 2.40.1
- Python 3.13
- and some other useful tools
Conan is set up to build Clio without any additional steps.

View File

@@ -1,6 +1,6 @@
services:
clio_develop:
image: ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
image: ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
volumes:
- clio_develop_conan_data:/root/.conan2/p
- clio_develop_ccache:/root/.ccache

View File

@@ -8,7 +8,7 @@ ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ARG BUILD_VERSION=0
ARG BUILD_VERSION=2
RUN apt-get update \
&& apt-get install -y --no-install-recommends --no-install-suggests \
@@ -24,7 +24,7 @@ RUN apt-get update \
WORKDIR /tmp
ARG MOLD_VERSION=2.40.4
ARG MOLD_VERSION=2.40.1
RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v${MOLD_VERSION}.tar.gz" \
&& tar xf "v${MOLD_VERSION}.tar.gz" \
&& cd "mold-${MOLD_VERSION}" \
@@ -34,7 +34,7 @@ RUN wget --progress=dot:giga "https://github.com/rui314/mold/archive/refs/tags/v
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG CCACHE_VERSION=4.12.1
ARG CCACHE_VERSION=4.11.3
RUN wget --progress=dot:giga "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
&& cd "ccache-${CCACHE_VERSION}" \
@@ -51,7 +51,7 @@ RUN apt-get update \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ARG DOXYGEN_VERSION=1.15.0
ARG DOXYGEN_VERSION=1.14.0
RUN wget --progress=dot:giga "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \
@@ -71,13 +71,13 @@ RUN wget --progress=dot:giga "https://github.com/aras-p/ClangBuildAnalyzer/archi
&& ninja install \
&& rm -rf /tmp/* /var/tmp/*
ARG GIT_CLIFF_VERSION=2.10.1
ARG GIT_CLIFF_VERSION=2.9.1
RUN wget --progress=dot:giga "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/local/bin/git-cliff \
&& rm -rf /tmp/* /var/tmp/*
ARG GH_VERSION=2.82.1
ARG GH_VERSION=2.74.0
RUN wget --progress=dot:giga "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/local/bin/gh \

View File

@@ -191,7 +191,7 @@ Open the `index.html` file in your browser to see the documentation pages.
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it ghcr.io/xrplf/clio-ci:c117f470f2ef954520ab5d1c8a5ed2b9e68d6f8a
docker run -it ghcr.io/xrplf/clio-ci:b2be4b51d1d81548ca48e2f2b8f67356b880c96d
git clone https://github.com/XRPLF/clio
cd clio
```

View File

@@ -36,7 +36,9 @@
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STAmount.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/UintTypes.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>

View File

@@ -64,8 +64,8 @@ public:
std::string issuer;
uint32_t sequence{};
std::optional<uint16_t> transferFee;
std::optional<uint8_t> assetScale;
std::optional<uint16_t> transferFee{};
std::optional<uint8_t> assetScale{};
std::optional<std::uint64_t> maximumAmount;
std::optional<std::uint64_t> outstandingAmount;

View File

@@ -29,7 +29,6 @@
#include <boost/json/conversion.hpp>
#include <boost/json/object.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_from.hpp>
#include <boost/json/value_to.hpp>
#include <xrpl/basics/strHex.h>
#include <xrpl/protocol/AccountID.h>
@@ -37,7 +36,9 @@
#include <xrpl/protocol/LedgerFormats.h>
#include <xrpl/protocol/LedgerHeader.h>
#include <xrpl/protocol/SField.h>
#include <xrpl/protocol/STAmount.h>
#include <xrpl/protocol/STLedgerEntry.h>
#include <xrpl/protocol/UintTypes.h>
#include <xrpl/protocol/jss.h>
#include <cstdint>

View File

@@ -26,7 +26,6 @@
#include "util/NameGenerator.hpp"
#include "util/TestObject.hpp"
#include <boost/asio/spawn.hpp>
#include <boost/json/parse.hpp>
#include <boost/json/value.hpp>
#include <boost/json/value_to.hpp>
@@ -41,7 +40,6 @@
#include <xrpl/protocol/STObject.h>
#include <cstdint>
#include <functional>
#include <optional>
#include <string>
#include <vector>
@@ -138,54 +136,48 @@ static auto
generateTestValuesForInvalidParamsTest()
{
return std::vector<AccountMPTokenIssuancesParamTestCaseBundle>{
{.testName = "NonHexLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashMalformed"},
{.testName = "NonStringLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashNotString"},
{.testName = "InvalidLedgerIndexString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledgerIndexMalformed"},
{.testName = "MarkerNotString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "markerNotString"},
{.testName = "InvalidMarkerContent",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Malformed cursor."},
{.testName = "AccountMissing",
.testJson = R"JSON({ "limit": 10 })JSON",
.expectedError = "invalidParams",
.expectedErrorMessage = "Required field 'account' missing"},
{.testName = "AccountNotString",
.testJson = R"JSON({ "account": 123 })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "AccountMalformed",
.testJson = R"JSON({ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp" })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "LimitNotInteger",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitNegative",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitZero",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitTypeInvalid",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."}
{"NonHexLedgerHash",
fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
"invalidParams",
"ledger_hashMalformed"},
{"NonStringLedgerHash",
fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
"invalidParams",
"ledger_hashNotString"},
{"InvalidLedgerIndexString",
fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
"invalidParams",
"ledgerIndexMalformed"},
{"MarkerNotString",
fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
"invalidParams",
"markerNotString"},
{"InvalidMarkerContent",
fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
"invalidParams",
"Malformed cursor."},
{"AccountMissing", R"JSON({ "limit": 10 })JSON", "invalidParams", "Required field 'account' missing"},
{"AccountNotString", R"JSON({ "account": 123 })JSON", "actMalformed", "Account malformed."},
{"AccountMalformed",
R"JSON({ "account": "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp" })JSON",
"actMalformed",
"Account malformed."},
{"LimitNotInteger",
fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitNegative",
fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitZero",
fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitTypeInvalid",
fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."}
};
}
@@ -444,7 +436,7 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, DefaultParameters)
TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
{
constexpr int kLIMIT = 20;
constexpr int limit = 20;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(ledgerHeader));
@@ -457,7 +449,7 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
std::vector<Blob> bbs;
for (int i = 0; i < 50; ++i) {
indexes.emplace_back(kISSUANCE_INDEX1);
indexes.emplace_back(ripple::uint256{kISSUANCE_INDEX1});
auto const issuance = createMptIssuanceObject(kACCOUNT, i);
bbs.push_back(issuance.getSerializer().peekData());
}
@@ -470,7 +462,7 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(3);
runSpawn([this, kLIMIT](auto yield) {
runSpawn([this, limit](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
@@ -478,7 +470,7 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
limit
)
);
@@ -487,7 +479,7 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, UseLimit)
ASSERT_TRUE(output);
auto const resultJson = (*output.result).as_object();
EXPECT_EQ(resultJson.at("mpt_issuances").as_array().size(), kLIMIT);
EXPECT_EQ(resultJson.at("mpt_issuances").as_array().size(), limit);
ASSERT_TRUE(resultJson.contains("marker"));
EXPECT_THAT(boost::json::value_to<std::string>(resultJson.at("marker")), EndsWith(",0"));
});
@@ -542,13 +534,11 @@ TEST_F(RPCAccountMPTokenIssuancesHandlerTest, MarkerOutput)
EXPECT_CALL(*backend_, doFetchLedgerObject).Times(3);
std::vector<ripple::uint256> indexes;
indexes.reserve(10);
for (int i = 0; i < 10; ++i) {
indexes.emplace_back(kISSUANCE_INDEX1);
}
std::vector<Blob> bbs;
bbs.reserve(kLIMIT);
for (int i = 0; i < kLIMIT; ++i) {
bbs.push_back(createMptIssuanceObject(kACCOUNT, i).getSerializer().peekData());
}

View File

@@ -112,54 +112,48 @@ static auto
generateTestValuesForInvalidParamsTest()
{
return std::vector<AccountMPTokensParamTestCaseBundle>{
{.testName = "NonHexLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashMalformed"},
{.testName = "NonStringLedgerHash",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledger_hashNotString"},
{.testName = "InvalidLedgerIndexString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "ledgerIndexMalformed"},
{.testName = "MarkerNotString",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "markerNotString"},
{.testName = "InvalidMarkerContent",
.testJson = fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Malformed cursor."},
{.testName = "AccountMissing",
.testJson = R"JSON({ "limit": 10 })JSON",
.expectedError = "invalidParams",
.expectedErrorMessage = "Required field 'account' missing"},
{.testName = "AccountNotString",
.testJson = R"JSON({ "account": 123 })JSON",
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "AccountMalformed",
.testJson = fmt::format(R"JSON({{ "account": "{}" }})JSON", "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"),
.expectedError = "actMalformed",
.expectedErrorMessage = "Account malformed."},
{.testName = "LimitNotInteger",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitNegative",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitZero",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."},
{.testName = "LimitTypeInvalid",
.testJson = fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
.expectedError = "invalidParams",
.expectedErrorMessage = "Invalid parameters."}
{"NonHexLedgerHash",
fmt::format(R"JSON({{ "account": "{}", "ledger_hash": "xxx" }})JSON", kACCOUNT),
"invalidParams",
"ledger_hashMalformed"},
{"NonStringLedgerHash",
fmt::format(R"JSON({{ "account": "{}", "ledger_hash": 123 }})JSON", kACCOUNT),
"invalidParams",
"ledger_hashNotString"},
{"InvalidLedgerIndexString",
fmt::format(R"JSON({{ "account": "{}", "ledger_index": "notvalidated" }})JSON", kACCOUNT),
"invalidParams",
"ledgerIndexMalformed"},
{"MarkerNotString",
fmt::format(R"JSON({{ "account": "{}", "marker": 9 }})JSON", kACCOUNT),
"invalidParams",
"markerNotString"},
{"InvalidMarkerContent",
fmt::format(R"JSON({{ "account": "{}", "marker": "123invalid" }})JSON", kACCOUNT),
"invalidParams",
"Malformed cursor."},
{"AccountMissing", R"JSON({ "limit": 10 })JSON", "invalidParams", "Required field 'account' missing"},
{"AccountNotString", R"JSON({ "account": 123 })JSON", "actMalformed", "Account malformed."},
{"AccountMalformed",
fmt::format(R"JSON({{ "account": "{}" }})JSON", "rf1BiGeXwwQoi8Z2ueFYTEXSwuJYfV2Jp"),
"actMalformed",
"Account malformed."},
{"LimitNotInteger",
fmt::format(R"JSON({{ "account": "{}", "limit": "t" }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitNegative",
fmt::format(R"JSON({{ "account": "{}", "limit": -1 }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitZero",
fmt::format(R"JSON({{ "account": "{}", "limit": 0 }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."},
{"LimitTypeInvalid",
fmt::format(R"JSON({{ "account": "{}", "limit": true }})JSON", kACCOUNT),
"invalidParams",
"Invalid parameters."}
};
}
@@ -394,7 +388,7 @@ TEST_F(RPCAccountMPTokensHandlerTest, DefaultParameters)
TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
{
constexpr int kLIMIT = 20;
constexpr int limit = 20;
auto ledgerHeader = createLedgerHeader(kLEDGER_HASH, 30);
ON_CALL(*backend_, fetchLedgerBySequence).WillByDefault(Return(ledgerHeader));
@@ -407,7 +401,7 @@ TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
std::vector<Blob> bbs;
for (int i = 0; i < 50; ++i) {
indexes.emplace_back(kTOKEN_INDEX1);
indexes.emplace_back(ripple::uint256{kTOKEN_INDEX1});
auto const token = createMpTokenObject(kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), i, 0, std::nullopt);
bbs.push_back(token.getSerializer().peekData());
}
@@ -420,7 +414,7 @@ TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
ON_CALL(*backend_, doFetchLedgerObjects).WillByDefault(Return(bbs));
EXPECT_CALL(*backend_, doFetchLedgerObjects).Times(3);
runSpawn([this, kLIMIT](auto yield) {
runSpawn([this, limit](auto yield) {
auto const input = json::parse(
fmt::format(
R"JSON({{
@@ -428,7 +422,7 @@ TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
"limit": {}
}})JSON",
kACCOUNT,
kLIMIT
limit
)
);
@@ -437,7 +431,7 @@ TEST_F(RPCAccountMPTokensHandlerTest, UseLimit)
ASSERT_TRUE(output);
auto const resultJson = (*output.result).as_object();
EXPECT_EQ(resultJson.at("mptokens").as_array().size(), kLIMIT);
EXPECT_EQ(resultJson.at("mptokens").as_array().size(), limit);
ASSERT_TRUE(resultJson.contains("marker"));
EXPECT_THAT(boost::json::value_to<std::string>(resultJson.at("marker")), EndsWith(",0"));
});
@@ -493,7 +487,6 @@ TEST_F(RPCAccountMPTokensHandlerTest, MarkerOutput)
ON_CALL(*backend_, doFetchLedgerObject(accountKk, _, _)).WillByDefault(Return(Blob{'f', 'a', 'k', 'e'}));
std::vector<Blob> bbs;
bbs.reserve(kLIMIT);
for (int i = 0; i < kLIMIT; ++i) {
bbs.push_back(createMpTokenObject(kACCOUNT, ripple::uint192(kISSUANCE_ID_HEX), i, 0, std::nullopt)
.getSerializer()
@@ -502,7 +495,6 @@ TEST_F(RPCAccountMPTokensHandlerTest, MarkerOutput)
EXPECT_CALL(*backend_, doFetchLedgerObjects).WillOnce(Return(bbs));
std::vector<ripple::uint256> indexes1;
indexes1.reserve(10);
for (int i = 0; i < 10; ++i) {
indexes1.emplace_back(kTOKEN_INDEX1);
}

View File

@@ -37,16 +37,15 @@
#include <boost/asio/io_context.hpp>
#include <boost/asio/post.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/asio/steady_timer.hpp>
#include <boost/beast/core/flat_buffer.hpp>
#include <boost/beast/http/status.hpp>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <chrono>
#include <condition_variable>
#include <cstddef>
#include <memory>
#include <mutex>
#include <ranges>
#include <string>
#include <thread>
@@ -109,39 +108,32 @@ TEST_F(WebWsConnectionTests, WasUpgraded)
});
}
TEST_F(WebWsConnectionTests, DisconnectClientOnInactivity)
// This test is either flaky or incorrect
// see https://github.com/XRPLF/clio/issues/2700
TEST_F(WebWsConnectionTests, DISABLED_DisconnectClientOnInactivity)
{
boost::asio::io_context clientCtx;
auto work = boost::asio::make_work_guard(clientCtx);
std::thread clientThread{[&clientCtx]() { clientCtx.run(); }};
std::mutex mutex;
std::condition_variable cv;
bool finished{false};
util::spawn(clientCtx, [&](boost::asio::yield_context yield) {
util::spawn(clientCtx, [&work, this](boost::asio::yield_context yield) {
auto expectedSuccess =
wsClient_.connect("localhost", httpServer_.port(), yield, std::chrono::milliseconds{100});
[&]() { ASSERT_TRUE(expectedSuccess.has_value()) << expectedSuccess.error().message(); }();
std::unique_lock lock{mutex};
// Wait for 2 seconds to not block the test infinitely in case of failure
auto const gotNotified = cv.wait_for(lock, std::chrono::seconds{2}, [&finished]() { return finished; });
[&]() { EXPECT_TRUE(gotNotified); }();
boost::asio::steady_timer timer{yield.get_executor(), std::chrono::milliseconds{5}};
timer.async_wait(yield);
work.reset();
});
runSpawn([&, this](boost::asio::yield_context yield) {
runSpawn([this](boost::asio::yield_context yield) {
auto wsConnection = acceptConnection(yield);
wsConnection->setTimeout(std::chrono::milliseconds{1});
// Client will not respond to pings because there is no reading operation scheduled for it.
auto const receivedMessage = wsConnection->receive(yield);
{
std::unique_lock const lock{mutex};
finished = true;
cv.notify_one();
}
auto const start = std::chrono::steady_clock::now();
auto const receivedMessage = wsConnection->receive(yield);
auto const end = std::chrono::steady_clock::now();
EXPECT_LT(end - start, std::chrono::milliseconds{4}); // Should be 2 ms, double it in case of slow CI.
EXPECT_FALSE(receivedMessage.has_value());
EXPECT_EQ(receivedMessage.error().value(), boost::asio::error::no_permission);