Compare commits

..

17 Commits

Author SHA1 Message Date
Bart Thomee
4306d9ccc3 Restore freshening caches of tree node cache 2025-09-17 12:17:35 -04:00
Bart Thomee
1a4d9732ca Merge branch 'develop' into bthomee/disable-cache 2025-09-17 11:43:06 -04:00
Bart
aad6edb6b1 Merge branch 'develop' into bthomee/disable-cache 2025-08-13 08:03:40 -04:00
Bart
a4a1c4eecf Merge branch 'develop' into bthomee/disable-cache 2025-07-03 15:43:50 -04:00
Bart
fca6a8768f Merge branch 'develop' into bthomee/disable-cache 2025-06-02 12:02:43 -04:00
Bart
d96c4164b9 Merge branch 'develop' into bthomee/disable-cache 2025-05-22 09:18:07 -04:00
Bart Thomee
965fc75e8a Reserve vector size 2025-05-20 10:07:12 -04:00
Bart Thomee
2fa1c711d3 Removed unused config values 2025-05-20 09:50:13 -04:00
Bart Thomee
4650e7d2c6 Removed unused caches from SHAMapStoreImp 2025-05-20 09:49:55 -04:00
Bart Thomee
a213127852 Remove cache from SHAMapStoreImp 2025-05-19 16:59:43 -04:00
Bart Thomee
6e7537dada Remove cache from DatabaseNodeImp 2025-05-19 16:51:32 -04:00
Bart Thomee
0777f7c64b Merge branch 'develop' into bthomee/disable-cache 2025-05-19 16:37:11 -04:00
Bart Thomee
39bfcaf95c Merge branch 'develop' into bthomee/disable-cache 2025-05-17 18:26:07 -04:00
Bart Thomee
61c9a19868 Merge branch 'develop' into bthomee/disable-cache 2025-05-07 11:02:43 -04:00
Bart Thomee
d01851bc5a Only disable the database cache 2025-04-01 13:24:18 -04:00
Bart Thomee
d1703842e7 Fully disable cache 2025-04-01 11:41:20 -04:00
Bart Thomee
8d31b1739d TEST: Disable tagged cache to measure performance 2025-03-28 13:21:19 -04:00
1050 changed files with 5263 additions and 245309 deletions

View File

@@ -33,6 +33,5 @@ slack_app: false
ignore:
- "src/test/"
- "src/tests/"
- "include/xrpl/beast/test/"
- "include/xrpl/beast/unit_test/"

6
.github/CODEOWNERS vendored
View File

@@ -1,2 +1,8 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

View File

@@ -4,10 +4,6 @@ description: "Install Conan dependencies, optionally forcing a rebuild of all de
# Note that actions do not support 'type' and all inputs are strings, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
inputs:
verbosity:
description: "The build verbosity."
required: false
default: "verbose"
build_dir:
description: "The directory where to build."
required: true
@@ -24,21 +20,14 @@ runs:
steps:
- name: Install Conan dependencies
shell: bash
env:
BUILD_DIR: ${{ inputs.build_dir }}
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
BUILD_TYPE: ${{ inputs.build_type }}
VERBOSITY: ${{ inputs.verbosity }}
run: |
echo 'Installing dependencies.'
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
mkdir -p ${{ inputs.build_dir }}
cd ${{ inputs.build_dir }}
conan install \
--output-folder . \
--build="${BUILD_OPTION}" \
--options:host='&:tests=True' \
--options:host='&:xrpld=True' \
--settings:all build_type="${BUILD_TYPE}" \
--conf:all tools.build:verbosity="${VERBOSITY}" \
--conf:all tools.compilation:verbosity="${VERBOSITY}" \
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
--options:host '&:tests=True' \
--options:host '&:xrpld=True' \
--settings:all build_type=${{ inputs.build_type }} \
..

96
.github/actions/build-test/action.yml vendored Normal file
View File

@@ -0,0 +1,96 @@
# This action build and tests the binary. The Conan dependencies must have
# already been installed (see the build-deps action).
name: Build and Test
description: "Build and test the binary."
# Note that actions do not support 'type' and all inputs are strings, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
inputs:
build_dir:
description: "The directory where to build."
required: true
build_only:
description: 'Whether to only build or to build and test the code ("true", "false").'
required: false
default: "false"
build_type:
description: 'The build type to use ("Debug", "Release").'
required: true
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
default: ""
cmake_target:
description: "The CMake target to build."
required: true
codecov_token:
description: "The Codecov token to use for uploading coverage reports."
required: false
default: ""
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: true
runs:
using: composite
steps:
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
echo 'Configuring CMake.'
cmake \
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
${{ inputs.cmake_args }} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
echo 'Building binary.'
cmake \
--build . \
--config ${{ inputs.build_type }} \
--parallel $(nproc) \
--target ${{ inputs.cmake_target }}
- name: Check linking
if: ${{ inputs.os == 'linux' }}
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
echo 'Checking linking.'
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Verify voidstar
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
shell: bash
working-directory: ${{ inputs.build_dir }}
run: |
echo 'Verifying presence of instrumentation.'
./rippled --version | grep libvoidstar
- name: Test the binary
if: ${{ inputs.build_only == 'false' }}
shell: bash
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
run: |
echo 'Testing binary.'
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
- name: Upload coverage report
if: ${{ inputs.cmake_target == 'coverage' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true
fail_ci_if_error: true
files: ${{ inputs.build_dir }}/coverage.xml
plugins: noop
token: ${{ inputs.codecov_token }}
verbose: true

View File

@@ -1,43 +0,0 @@
name: Print build environment
description: "Print environment and some tooling versions"
runs:
using: composite
steps:
- name: Check configuration (Windows)
if: ${{ runner.os == 'Windows' }}
shell: bash
run: |
echo 'Checking environment variables.'
set
echo 'Checking CMake version.'
cmake --version
echo 'Checking Conan version.'
conan --version
- name: Check configuration (Linux and macOS)
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
shell: bash
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking CMake version.'
cmake --version
echo 'Checking compiler version.'
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
echo 'Checking Conan version.'
conan --version
echo 'Checking Ninja version.'
ninja --version
echo 'Checking nproc version.'
nproc --version

View File

@@ -35,12 +35,9 @@ runs:
- name: Set up Conan remote
shell: bash
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
run: |
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
echo 'Listing Conan remotes.'
conan remote list

View File

@@ -72,15 +72,15 @@ It generates many files of [results](results):
desired as described above. In a perfect repo, this file will be
empty.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
Github workflow](../../workflows/check-levelization.yml) to validate
that nothing changed.
- [`ordering.txt`](results/ordering.txt): A list showing relationships
between modules where there are no loops as they actually exist, as
opposed to how they are desired as described above.
This file is committed to the repo, and is used by the [levelization
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
Github workflow](../../workflows/check-levelization.yml) to validate
that nothing changed.
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
- [`levelization.yml`](../../workflows/check-levelization.yml)
Github Actions workflow to test that levelization loops haven't
changed. Unfortunately, if changes are detected, it can't tell if
they are improvements or not, so if you have resolved any issues or

View File

@@ -7,6 +7,9 @@ Loop: test.jtx test.unit_test
Loop: xrpld.app xrpld.core
xrpld.app > xrpld.core
Loop: xrpld.app xrpld.ledger
xrpld.app > xrpld.ledger
Loop: xrpld.app xrpld.overlay
xrpld.overlay > xrpld.app

View File

@@ -2,10 +2,6 @@ libxrpl.basics > xrpl.basics
libxrpl.crypto > xrpl.basics
libxrpl.json > xrpl.basics
libxrpl.json > xrpl.json
libxrpl.ledger > xrpl.basics
libxrpl.ledger > xrpl.json
libxrpl.ledger > xrpl.ledger
libxrpl.ledger > xrpl.protocol
libxrpl.net > xrpl.basics
libxrpl.net > xrpl.net
libxrpl.protocol > xrpl.basics
@@ -25,11 +21,11 @@ test.app > test.unit_test
test.app > xrpl.basics
test.app > xrpld.app
test.app > xrpld.core
test.app > xrpld.ledger
test.app > xrpld.nodestore
test.app > xrpld.overlay
test.app > xrpld.rpc
test.app > xrpl.json
test.app > xrpl.ledger
test.app > xrpl.protocol
test.app > xrpl.resource
test.basics > test.jtx
@@ -48,8 +44,8 @@ test.consensus > test.unit_test
test.consensus > xrpl.basics
test.consensus > xrpld.app
test.consensus > xrpld.consensus
test.consensus > xrpld.ledger
test.consensus > xrpl.json
test.consensus > xrpl.ledger
test.core > test.jtx
test.core > test.toplevel
test.core > test.unit_test
@@ -67,9 +63,9 @@ test.json > xrpl.json
test.jtx > xrpl.basics
test.jtx > xrpld.app
test.jtx > xrpld.core
test.jtx > xrpld.ledger
test.jtx > xrpld.rpc
test.jtx > xrpl.json
test.jtx > xrpl.ledger
test.jtx > xrpl.net
test.jtx > xrpl.protocol
test.jtx > xrpl.resource
@@ -79,7 +75,7 @@ test.ledger > test.toplevel
test.ledger > xrpl.basics
test.ledger > xrpld.app
test.ledger > xrpld.core
test.ledger > xrpl.ledger
test.ledger > xrpld.ledger
test.ledger > xrpl.protocol
test.nodestore > test.jtx
test.nodestore > test.toplevel
@@ -138,10 +134,7 @@ test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
tests.libxrpl > xrpl.net
xrpl.json > xrpl.basics
xrpl.ledger > xrpl.basics
xrpl.ledger > xrpl.protocol
xrpl.net > xrpl.basics
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json
@@ -158,7 +151,6 @@ xrpld.app > xrpld.consensus
xrpld.app > xrpld.nodestore
xrpld.app > xrpld.perflog
xrpld.app > xrpl.json
xrpld.app > xrpl.ledger
xrpld.app > xrpl.net
xrpld.app > xrpl.protocol
xrpld.app > xrpl.resource
@@ -171,6 +163,9 @@ xrpld.core > xrpl.basics
xrpld.core > xrpl.json
xrpld.core > xrpl.net
xrpld.core > xrpl.protocol
xrpld.ledger > xrpl.basics
xrpld.ledger > xrpl.json
xrpld.ledger > xrpl.protocol
xrpld.nodestore > xrpl.basics
xrpld.nodestore > xrpld.core
xrpld.nodestore > xrpld.unity
@@ -191,9 +186,9 @@ xrpld.perflog > xrpl.basics
xrpld.perflog > xrpl.json
xrpld.rpc > xrpl.basics
xrpld.rpc > xrpld.core
xrpld.rpc > xrpld.ledger
xrpld.rpc > xrpld.nodestore
xrpld.rpc > xrpl.json
xrpld.rpc > xrpl.ledger
xrpld.rpc > xrpl.net
xrpld.rpc > xrpl.protocol
xrpld.rpc > xrpl.resource

View File

@@ -74,14 +74,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
continue
# RHEL:
# - 9 using GCC 12: Debug and Unity on linux/amd64.
# - 10 using Clang: Release and no Unity on linux/amd64.
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
# - 9.6 using Clang: Release and no Unity on linux/amd64.
if os['distro_name'] == 'rhel':
skip = True
if os['distro_version'] == '9':
if os['distro_version'] == '9.4':
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
skip = False
elif os['distro_version'] == '10':
elif os['distro_version'] == '9.6':
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
skip = False
if skip:
@@ -162,7 +162,7 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
'config_name': config_name,
'cmake_args': cmake_args,
'cmake_target': cmake_target,
'build_only': build_only,
'build_only': 'true' if build_only else 'false',
'build_type': build_type,
'os': os,
'architecture': architecture,

View File

@@ -14,169 +14,139 @@
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "6948666"
"compiler_version": "12"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "6948666"
"compiler_version": "13"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "6948666"
"compiler_version": "14"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "gcc",
"compiler_version": "15",
"image_sha": "6948666"
"compiler_version": "15"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "6948666"
"compiler_version": "16"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "6948666"
"compiler_version": "17"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "6948666"
"compiler_version": "18"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "6948666"
"compiler_version": "19"
},
{
"distro_name": "debian",
"distro_version": "bookworm",
"compiler_name": "clang",
"compiler_version": "20",
"image_sha": "6948666"
"compiler_version": "20"
},
{
"distro_name": "rhel",
"distro_version": "8",
"distro_version": "9.4",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "10e69b4"
"compiler_version": "12"
},
{
"distro_name": "rhel",
"distro_version": "8",
"distro_version": "9.4",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro_name": "rhel",
"distro_version": "9.4",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro_name": "rhel",
"distro_version": "9.6",
"compiler_name": "gcc",
"compiler_version": "13"
},
{
"distro_name": "rhel",
"distro_version": "9.6",
"compiler_name": "gcc",
"compiler_version": "14"
},
{
"distro_name": "rhel",
"distro_version": "9.4",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "10e69b4"
"compiler_version": "any"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "10e69b4"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "10e69b4"
},
{
"distro_name": "rhel",
"distro_version": "9",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "10e69b4"
},
{
"distro_name": "rhel",
"distro_version": "9",
"distro_version": "9.6",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "10e69b4"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "10e69b4"
},
{
"distro_name": "rhel",
"distro_version": "10",
"compiler_name": "clang",
"compiler_version": "any",
"image_sha": "10e69b4"
"compiler_version": "any"
},
{
"distro_name": "ubuntu",
"distro_version": "jammy",
"compiler_name": "gcc",
"compiler_version": "12",
"image_sha": "6948666"
"compiler_version": "12"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "13",
"image_sha": "6948666"
"compiler_version": "13"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "gcc",
"compiler_version": "14",
"image_sha": "6948666"
"compiler_version": "14"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "16",
"image_sha": "6948666"
"compiler_version": "16"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "17",
"image_sha": "6948666"
"compiler_version": "17"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "18",
"image_sha": "6948666"
"compiler_version": "18"
},
{
"distro_name": "ubuntu",
"distro_version": "noble",
"compiler_name": "clang",
"compiler_version": "19",
"image_sha": "6948666"
"compiler_version": "19"
}
],
"build_type": ["Debug", "Release"],

View File

@@ -10,8 +10,7 @@
"distro_name": "macos",
"distro_version": "",
"compiler_name": "",
"compiler_version": "",
"image_sha": ""
"compiler_version": ""
}
],
"build_type": ["Debug", "Release"],

View File

@@ -10,8 +10,7 @@
"distro_name": "windows",
"distro_version": "",
"compiler_name": "",
"compiler_version": "",
"image_sha": ""
"compiler_version": ""
}
],
"build_type": ["Debug", "Release"],

146
.github/workflows/build-test.yml vendored Normal file
View File

@@ -0,0 +1,146 @@
# This workflow builds and tests the binary for various configurations.
name: Build and test
# This workflow can only be triggered by other workflows. Note that the
# workflow_call event does not support the 'choice' input type, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
# so we use 'string' instead.
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: false
type: string
default: ".build"
dependencies_force_build:
description: "Force building of all dependencies."
required: false
type: boolean
default: false
dependencies_force_upload:
description: "Force uploading of all dependencies."
required: false
type: boolean
default: false
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: true
type: string
strategy_matrix:
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
required: false
type: string
default: "minimal"
secrets:
codecov_token:
description: "The Codecov token to use for uploading coverage reports."
required: false
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
os: ${{ inputs.os }}
strategy_matrix: ${{ inputs.strategy_matrix }}
# Build and test the binary.
build-test:
needs:
- generate-matrix
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
runs-on: ${{ matrix.architecture.runner }}
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
steps:
- name: Check strategy matrix
run: |
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
echo 'Build type: ${{ matrix.build_type }}'
echo 'Build only: ${{ matrix.build_only }}'
echo 'CMake arguments: ${{ matrix.cmake_args }}'
echo 'CMake target: ${{ matrix.cmake_target }}'
echo 'Config name: ${{ matrix.config_name }}'
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
with:
disable_ccache: false
- name: Check configuration (Windows)
if: ${{ inputs.os == 'windows' }}
run: |
echo 'Checking environment variables.'
set
echo 'Checking CMake version.'
cmake --version
echo 'Checking Conan version.'
conan --version
- name: Check configuration (Linux and MacOS)
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
run: |
echo 'Checking path.'
echo ${PATH} | tr ':' '\n'
echo 'Checking environment variables.'
env | sort
echo 'Checking CMake version.'
cmake --version
echo 'Checking compiler version.'
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
echo 'Checking Conan version.'
conan --version
echo 'Checking Ninja version.'
ninja --version
echo 'Checking nproc version.'
nproc --version
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ matrix.build_type }}
force_build: ${{ inputs.dependencies_force_build }}
- name: Build and test binary
uses: ./.github/actions/build-test
with:
build_dir: ${{ inputs.build_dir }}
build_only: ${{ matrix.build_only }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_target: ${{ matrix.cmake_target }}
codecov_token: ${{ secrets.codecov_token }}
os: ${{ inputs.os }}

View File

@@ -40,52 +40,47 @@ jobs:
upload:
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Generate outputs
id: generate
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
echo 'Generating user and channel.'
echo "user=clio" >> "${GITHUB_OUTPUT}"
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
echo 'Extracting version.'
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
- name: Calculate conan reference
id: conan_ref
run: |
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
- name: Set up Conan
uses: ./.github/actions/setup-conan
with:
conan_remote_name: ${{ inputs.conan_remote_name }}
conan_remote_url: ${{ inputs.conan_remote_url }}
- name: Log into Conan remote
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
- name: Upload package
env:
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
run: |
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.conan_ref.outputs.conan_ref }}
outputs:
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
notify:
needs: upload
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ secrets.clio_notify_token }}
steps:
- name: Notify Clio
env:
GH_TOKEN: ${{ secrets.clio_notify_token }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
-F "client_payload[pr_url]=${PR_URL}"
-F "client_payload[pr_url]=${{ github.event.pull_request.html_url }}"

View File

@@ -50,8 +50,8 @@ jobs:
files: |
# These paths are unique to `on-pr.yml`.
.github/scripts/levelization/**
.github/workflows/reusable-check-levelization.yml
.github/workflows/reusable-notify-clio.yml
.github/workflows/check-levelization.yml
.github/workflows/notify-clio.yml
.github/workflows/on-pr.yml
# Keep the paths below in sync with those in `on-trigger.yml`.
@@ -59,11 +59,8 @@ jobs:
.github/actions/build-test/**
.github/actions/setup-conan/**
.github/scripts/strategy-matrix/**
.github/workflows/reusable-build.yml
.github/workflows/reusable-build-test-config.yml
.github/workflows/reusable-build-test.yml
.github/workflows/build-test.yml
.github/workflows/reusable-strategy-matrix.yml
.github/workflows/reusable-test.yml
.codecov.yml
cmake/**
conan/**
@@ -96,27 +93,26 @@ jobs:
check-levelization:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-check-levelization.yml
uses: ./.github/workflows/check-levelization.yml
build-test:
needs: should-run
if: ${{ needs.should-run.outputs.go == 'true' }}
uses: ./.github/workflows/reusable-build-test.yml
uses: ./.github/workflows/build-test.yml
strategy:
fail-fast: false
matrix:
os: [linux, macos, windows]
with:
os: ${{ matrix.os }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
codecov_token: ${{ secrets.CODECOV_TOKEN }}
notify-clio:
needs:
- should-run
- build-test
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
uses: ./.github/workflows/reusable-notify-clio.yml
uses: ./.github/workflows/notify-clio.yml
secrets:
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}

View File

@@ -9,12 +9,12 @@ name: Trigger
on:
push:
branches:
- "develop"
- "release*"
- "master"
- develop
- release
- master
paths:
# These paths are unique to `on-trigger.yml`.
- ".github/workflows/reusable-check-missing-commits.yml"
- ".github/workflows/check-missing-commits.yml"
- ".github/workflows/on-trigger.yml"
- ".github/workflows/publish-docs.yml"
@@ -23,11 +23,8 @@ on:
- ".github/actions/build-test/**"
- ".github/actions/setup-conan/**"
- ".github/scripts/strategy-matrix/**"
- ".github/workflows/reusable-build.yml"
- ".github/workflows/reusable-build-test-config.yml"
- ".github/workflows/reusable-build-test.yml"
- ".github/workflows/build-test.yml"
- ".github/workflows/reusable-strategy-matrix.yml"
- ".github/workflows/reusable-test.yml"
- ".codecov.yml"
- "cmake/**"
- "conan/**"
@@ -46,8 +43,22 @@ on:
schedule:
- cron: "32 6 * * 1-5"
# Run when manually triggered via the GitHub UI or API.
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
# true, then the dependencies that were missing (`force_rebuild` is false) or
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
# dependencies if needed.
workflow_dispatch:
inputs:
dependencies_force_build:
description: "Force building of all dependencies."
required: false
type: boolean
default: false
dependencies_force_upload:
description: "Force uploading of all dependencies."
required: false
type: boolean
default: false
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -60,16 +71,15 @@ defaults:
jobs:
check-missing-commits:
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
uses: ./.github/workflows/reusable-check-missing-commits.yml
uses: ./.github/workflows/check-missing-commits.yml
build-test:
uses: ./.github/workflows/reusable-build-test.yml
uses: ./.github/workflows/build-test.yml
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
os: [linux, macos, windows]
with:
os: ${{ matrix.os }}
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
strategy_matrix: "minimal"
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
codecov_token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -7,9 +7,8 @@ on:
workflow_dispatch:
jobs:
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
run-hooks:
uses: XRPLF/actions/.github/workflows/pre-commit.yml@a8d7472b450eb53a1e5228f64552e5974457a21a
uses: XRPLF/actions/.github/workflows/pre-commit.yml@af1b0f0d764cda2e5435f5ac97b240d4bd4d95d3
with:
runs_on: ubuntu-latest
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit" }'

View File

@@ -27,7 +27,7 @@ env:
jobs:
publish:
runs-on: ubuntu-latest
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
container: ghcr.io/xrplf/ci/tools-rippled-documentation
permissions:
contents: write
steps:
@@ -48,8 +48,8 @@ jobs:
doxygen --version
- name: Build documentation
run: |
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
mkdir -p ${{ env.BUILD_DIR }}
cd ${{ env.BUILD_DIR }}
cmake -Donly_docs=ON ..
cmake --build . --target docs --parallel $(nproc)
- name: Publish documentation

View File

@@ -1,69 +0,0 @@
name: Build and test configuration
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: true
type: string
build_only:
description: 'Whether to only build or to build and test the code ("true", "false").'
required: true
type: boolean
build_type:
description: 'The build type to use ("Debug", "Release").'
type: string
required: true
cmake_args:
description: "Additional arguments to pass to CMake."
required: false
type: string
default: ""
cmake_target:
description: "The CMake target to build."
type: string
required: true
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The configuration string (used for naming artifacts and such)."
required: true
type: string
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
jobs:
build:
uses: ./.github/workflows/reusable-build.yml
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ inputs.build_type }}
cmake_args: ${{ inputs.cmake_args }}
cmake_target: ${{ inputs.cmake_target }}
runs_on: ${{ inputs.runs_on }}
image: ${{ inputs.image }}
config_name: ${{ inputs.config_name }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
test:
needs: build
uses: ./.github/workflows/reusable-test.yml
with:
run_tests: ${{ !inputs.build_only }}
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
runs_on: ${{ inputs.runs_on }}
image: ${{ inputs.image }}
config_name: ${{ inputs.config_name }}

View File

@@ -1,58 +0,0 @@
# This workflow builds and tests the binary for various configurations.
name: Build and test
# This workflow can only be triggered by other workflows. Note that the
# workflow_call event does not support the 'choice' input type, see
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
# so we use 'string' instead.
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: false
type: string
default: ".build"
os:
description: 'The operating system to use for the build ("linux", "macos", "windows").'
required: true
type: string
strategy_matrix:
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
required: false
type: string
default: "minimal"
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
os: ${{ inputs.os }}
strategy_matrix: ${{ inputs.strategy_matrix }}
# Build and test the binary for each configuration.
build-test-config:
needs:
- generate-matrix
uses: ./.github/workflows/reusable-build-test-config.yml
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
with:
build_dir: ${{ inputs.build_dir }}
build_only: ${{ matrix.build_only }}
build_type: ${{ matrix.build_type }}
cmake_args: ${{ matrix.cmake_args }}
cmake_target: ${{ matrix.cmake_target }}
runs_on: ${{ toJSON(matrix.architecture.runner) }}
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
config_name: ${{ matrix.config_name }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,122 +0,0 @@
name: Build rippled
on:
workflow_call:
inputs:
build_dir:
description: "The directory where to build."
required: true
type: string
build_type:
description: 'The build type to use ("Debug", "Release").'
required: true
type: string
cmake_args:
description: "Additional arguments to pass to CMake."
required: true
type: string
cmake_target:
description: "The CMake target to build."
required: true
type: string
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The name of the configuration."
required: true
type: string
secrets:
CODECOV_TOKEN:
description: "The Codecov token to use for uploading coverage reports."
required: true
defaults:
run:
shell: bash
jobs:
build:
name: Build ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 60
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Prepare runner
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
with:
disable_ccache: false
- name: Print build environment
uses: ./.github/actions/print-env
- name: Setup Conan
uses: ./.github/actions/setup-conan
- name: Build dependencies
uses: ./.github/actions/build-deps
with:
build_dir: ${{ inputs.build_dir }}
build_type: ${{ inputs.build_type }}
- name: Configure CMake
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_ARGS: ${{ inputs.cmake_args }}
run: |
cmake \
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
${CMAKE_ARGS} \
..
- name: Build the binary
shell: bash
working-directory: ${{ inputs.build_dir }}
env:
BUILD_TYPE: ${{ inputs.build_type }}
CMAKE_TARGET: ${{ inputs.cmake_target }}
run: |
cmake \
--build . \
--config "${BUILD_TYPE}" \
--parallel $(nproc) \
--target "${CMAKE_TARGET}"
- name: Upload rippled artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: rippled-${{ inputs.config_name }}
path: ${{ inputs.build_dir }}/${{ runner.os == 'Windows' && inputs.build_type || '' }}/rippled${{ runner.os == 'Windows' && '.exe' || '' }}
retention-days: 3
if-no-files-found: error
- name: Upload coverage report
if: ${{ inputs.cmake_target == 'coverage' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
disable_search: true
disable_telem: true
fail_ci_if_error: true
files: ${{ inputs.build_dir }}/coverage.xml
plugins: noop
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -35,7 +35,4 @@ jobs:
- name: Generate strategy matrix
working-directory: .github/scripts/strategy-matrix
id: generate
env:
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
run: ./generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }} >> "${GITHUB_OUTPUT}"

View File

@@ -1,70 +0,0 @@
name: Test rippled
on:
workflow_call:
inputs:
verify_voidstar:
description: "Whether to verify the presence of voidstar instrumentation."
required: true
type: boolean
run_tests:
description: "Whether to run unit tests"
required: true
type: boolean
runs_on:
description: Runner to run the job on as a JSON string
required: true
type: string
image:
description: "The image to run in (leave empty to run natively)"
required: true
type: string
config_name:
description: "The name of the configuration."
required: true
type: string
jobs:
test:
name: Test ${{ inputs.config_name }}
runs-on: ${{ fromJSON(inputs.runs_on) }}
container: ${{ inputs.image != '' && inputs.image || null }}
timeout-minutes: 30
steps:
- name: Download rippled artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: rippled-${{ inputs.config_name }}
- name: Make binary executable (Linux and macOS)
shell: bash
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
run: |
chmod +x ./rippled
- name: Check linking (Linux)
if: ${{ runner.os == 'Linux' }}
shell: bash
run: |
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: Verifying presence of instrumentation
if: ${{ inputs.verify_voidstar }}
shell: bash
run: |
./rippled --version | grep libvoidstar
- name: Test the binary
if: ${{ inputs.run_tests }}
shell: bash
run: |
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure

View File

@@ -24,10 +24,13 @@ on:
branches: [develop]
paths:
- .github/workflows/upload-conan-deps.yml
- .github/workflows/reusable-strategy-matrix.yml
- .github/actions/build-deps/action.yml
- .github/actions/setup-conan/action.yml
- ".github/scripts/strategy-matrix/**"
- conanfile.py
- conan.lock
@@ -40,13 +43,11 @@ concurrency:
cancel-in-progress: true
jobs:
# Generate the strategy matrix to be used by the following job.
generate-matrix:
uses: ./.github/workflows/reusable-strategy-matrix.yml
with:
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
# Build and upload the dependencies for each configuration.
run-upload-conan-deps:
needs:
- generate-matrix
@@ -55,7 +56,8 @@ jobs:
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
max-parallel: 10
runs-on: ${{ matrix.architecture.runner }}
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
steps:
- name: Cleanup workspace
if: ${{ runner.os == 'macOS' }}
@@ -79,16 +81,11 @@ jobs:
build_dir: .build
build_type: ${{ matrix.build_type }}
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
# The verbosity is set to "quiet" for Windows to avoid an excessive amount of logs, while it
# is set to "verbose" otherwise to provide more information during the build process.
verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
- name: Log into Conan remote
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
run: conan remote login ${{ env.CONAN_REMOTE_NAME }} "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
- name: Upload Conan packages
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
env:
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
run: conan upload "*" -r=${{ env.CONAN_REMOTE_NAME }} --confirm ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}

View File

@@ -39,12 +39,17 @@ found here](./docs/build/environment.md).
- [Python 3.11](https://www.python.org/downloads/), or higher
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
- [CMake 3.22](https://cmake.org/download/), or higher
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
[^1]:
It is possible to build with Conan 1.60+, but the instructions are
significantly different, which is why we are not recommending it.
[^2]:
CMake 4 is not yet supported by all dependencies required by this project.
If you are affected by this issue, follow [conan workaround for cmake
4](#workaround-for-cmake-4)
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
The [minimum compiler versions][2] required are:
@@ -277,6 +282,21 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
```
#### Workaround for CMake 4
If your system CMake is version 4 rather than 3, you may have to configure Conan
profile to use CMake version 3 for dependencies, by adding the following two
lines to your profile:
```text
[tool_requires]
!cmake/*: cmake/[>=3 <4]
```
This will force Conan to download and use a locally cached CMake 3 version, and
is needed because some of the dependencies used by this project do not support
CMake 4.
#### Clang workaround for grpc
If your compiler is clang, version 19 or later, or apple-clang, version 17 or

View File

@@ -99,8 +99,6 @@ add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
add_subdirectory(external/antithesis-sdk)
add_subdirectory(external/libff)
add_library(libff::ff ALIAS ff)
find_package(gRPC REQUIRED)
find_package(lz4 REQUIRED)
# Target names with :: are not allowed in a generator expression.
@@ -122,96 +120,6 @@ endif()
find_package(nudb REQUIRED)
find_package(date REQUIRED)
find_package(xxHash REQUIRED)
find_package(wasm-xrplf REQUIRED)
# Silence noisy warnings just inside libff
# Treat vendored headers as SYSTEM so their warnings don't bubble up.
# Remove it if printing warnings is desired
target_include_directories(ff SYSTEM PRIVATE
${CMAKE_SOURCE_DIR}/external/libff/src
${CMAKE_SOURCE_DIR}/external/libff/depends/xbyak/xbyak
${CMAKE_SOURCE_DIR}/external/libff/depends/ate-pairing/include
)
# Suppress GCC array-bounds false positives (and related) only for libff.
# Remove it if printing warnings is desired
if (CMAKE_CXX_COMPILER_ID MATCHES "GNU")
target_compile_options(ff PRIVATE
-Wno-array-bounds
-Wno-stringop-overflow
-fno-strict-aliasing
)
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
target_compile_options(ff PRIVATE
-Wno-array-bounds
)
endif()
# Slience redundant constexpr data member if using C++17
# Remove it if printing warnings is desired
set(LIBFF_GF_SOURCES
${PROJECT_SOURCE_DIR}/external/libff/libff/algebra/fields/binary/gf32.cpp
${PROJECT_SOURCE_DIR}/external/libff/libff/algebra/fields/binary/gf64.cpp
${PROJECT_SOURCE_DIR}/external/libff/libff/algebra/fields/binary/gf128.cpp
${PROJECT_SOURCE_DIR}/external/libff/libff/algebra/fields/binary/gf192.cpp
${PROJECT_SOURCE_DIR}/external/libff/libff/algebra/fields/binary/gf256.cpp
)
if (CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang")
set_source_files_properties(${LIBFF_GF_SOURCES}
PROPERTIES COMPILE_OPTIONS "-Wno-deprecated;-Wno-redundant-decls")
endif()
# --- Try config-mode first (works if someone provides GMPConfig.cmake) ---
find_package(GMP CONFIG QUIET)
# --- If no config package, synthesize imported targets with find_* and pkg-config ---
if (NOT TARGET GMP::gmp)
find_package(PkgConfig QUIET)
if (PkgConfig_FOUND)
pkg_check_modules(PC_GMP QUIET gmp)
pkg_check_modules(PC_GMPXX QUIET gmpxx)
endif()
# headers
find_path(GMP_INCLUDE_DIR
NAMES gmp.h
HINTS ${PC_GMP_INCLUDE_DIRS} ${PC_GMPXX_INCLUDE_DIRS}
)
# libraries
find_library(GMP_LIBRARY
NAMES gmp
HINTS ${PC_GMP_LIBRARY_DIRS}
)
find_library(GMPXX_LIBRARY
NAMES gmpxx
HINTS ${PC_GMPXX_LIBRARY_DIRS} ${PC_GMP_LIBRARY_DIRS}
)
if (GMP_INCLUDE_DIR AND GMP_LIBRARY)
add_library(GMP::gmp UNKNOWN IMPORTED)
set_target_properties(GMP::gmp PROPERTIES
IMPORTED_LOCATION "${GMP_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${GMP_INCLUDE_DIR}"
)
endif()
if (GMP_INCLUDE_DIR AND GMPXX_LIBRARY)
add_library(GMP::gmpxx UNKNOWN IMPORTED)
set_target_properties(GMP::gmpxx PROPERTIES
IMPORTED_LOCATION "${GMPXX_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${GMP_INCLUDE_DIR}"
INTERFACE_LINK_LIBRARIES GMP::gmp
)
endif()
endif()
# Final guard
if (NOT TARGET GMP::gmp OR NOT TARGET GMP::gmpxx)
message(FATAL_ERROR "GMP/GMPXX not found. Install libgmp-dev (and gmpxx) or set CMAKE_PREFIX_PATH/GMP_DIR.")
endif()
# Link them transitively through ripple_libs
target_link_libraries(ripple_libs INTERFACE GMP::gmp GMP::gmpxx)
target_link_libraries(ripple_libs INTERFACE
ed25519::ed25519
@@ -221,7 +129,6 @@ target_link_libraries(ripple_libs INTERFACE
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
libff::ff
)
# Work around changes to Conan recipe for now.
@@ -246,4 +153,4 @@ include(RippledValidatorKeys)
if(tests)
include(CTest)
add_subdirectory(src/tests/libxrpl)
endif()
endif()

View File

@@ -940,23 +940,7 @@
#
# path Location to store the database
#
# Optional keys
#
# cache_size Size of cache for database records. Default is 16384.
# Setting this value to 0 will use the default value.
#
# cache_age Length of time in minutes to keep database records
# cached. Default is 5 minutes. Setting this value to
# 0 will use the default value.
#
# Note: if neither cache_size nor cache_age is
# specified, the cache for database records will not
# be created. If only one of cache_size or cache_age
# is specified, the cache will be created using the
# default value for the unspecified parameter.
#
# Note: the cache will not be created if online_delete
# is specified.
# Optional keys for NuDB and RocksDB:
#
# fast_load Boolean. If set, load the last persisted ledger
# from disk upon process start before syncing to
@@ -964,8 +948,6 @@
# if sufficient IOPS capacity is available.
# Default 0.
#
# Optional keys for NuDB or RocksDB:
#
# earliest_seq The default is 32570 to match the XRP ledger
# network's earliest allowed sequence. Alternate
# networks may set this value. Minimum value of 1.
@@ -975,47 +957,6 @@
# number of ledger records online. Must be greater
# than or equal to ledger_history.
#
# Optional keys for NuDB only:
#
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
# Must be a power of 2 between 4096 and 32768. Default is 4096.
#
# This parameter controls the fundamental storage unit
# size for NuDB's internal data structures. The choice
# of block size can significantly impact performance
# depending on your storage hardware and filesystem:
#
# - 4096 bytes: Optimal for most standard SSDs and
# traditional filesystems (ext4, NTFS, HFS+).
# Provides good balance of performance and storage
# efficiency. Recommended for most deployments.
# Minimizes memory footprint and provides consistent
# low-latency access patterns across diverse hardware.
#
# - 8192-16384 bytes: May improve performance on
# high-end NVMe SSDs and copy-on-write filesystems
# like ZFS or Btrfs that benefit from larger block
# alignment. Can reduce metadata overhead for large
# databases. Offers better sequential throughput and
# reduced I/O operations at the cost of higher memory
# usage per operation.
#
# - 32768 bytes (32K): Maximum supported block size
# for high-performance scenarios with very fast
# storage. May increase memory usage and reduce
# efficiency for smaller databases. Best suited for
# enterprise environments with abundant RAM.
#
# Performance testing is recommended before deploying
# any non-default block size in production environments.
#
# Note: This setting cannot be changed after database
# creation without rebuilding the entire database.
# Choose carefully based on your hardware and expected
# database size.
#
# Example: nudb_block_size=4096
#
# These keys modify the behavior of online_delete, and thus are only
# relevant if online_delete is defined and non-zero:
#
@@ -1290,39 +1231,6 @@
# Example:
# owner_reserve = 2000000 # 2 XRP
#
# extension_compute_limit = <gas>
#
# The extension compute limit is the maximum amount of gas that can be
# consumed by a single transaction. The gas limit is used to prevent
# transactions from consuming too many resources.
#
# If this parameter is unspecified, rippled will use an internal
# default. Don't change this without understanding the consequences.
#
# Example:
# extension_compute_limit = 1000000 # 1 million gas
#
# extension_size_limit = <bytes>
#
# The extension size limit is the maximum size of a WASM extension in
# bytes. The size limit is used to prevent extensions from consuming
# too many resources.
#
# If this parameter is unspecified, rippled will use an internal
# default. Don't change this without understanding the consequences.
#
# Example:
# extension_size_limit = 100000 # 100 kb
#
# gas_price = <bytes>
#
# The gas price is the conversion between WASM gas and its price in drops.
#
# If this parameter is unspecified, rippled will use an internal
# default. Don't change this without understanding the consequences.
#
# Example:
# gas_price = 1000000 # 1 drop per gas
#-------------------------------------------------------------------------------
#
# 9. Misc Settings
@@ -1545,7 +1453,6 @@ secure_gateway = 127.0.0.1
[node_db]
type=NuDB
path=/var/lib/rippled/db/nudb
nudb_block_size=4096
online_delete=512
advisory_delete=0

View File

@@ -65,14 +65,8 @@ target_link_libraries(xrpl.imports.main
xrpl.libpb
xxHash::xxhash
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
wasm-xrplf::wasm-xrplf
)
if (WIN32)
target_link_libraries(xrpl.imports.main INTERFACE ntdll)
endif()
include(add_module)
include(target_link_modules)
@@ -117,12 +111,6 @@ target_link_libraries(xrpl.libxrpl.net PUBLIC
add_module(xrpl server)
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
add_module(xrpl ledger)
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
xrpl.libxrpl.basics
xrpl.libxrpl.json
xrpl.libxrpl.protocol
)
add_library(xrpl.libxrpl)
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
@@ -143,7 +131,6 @@ target_link_modules(xrpl PUBLIC
resource
server
net
ledger
)
# All headers in libxrpl are in modules.

View File

@@ -33,7 +33,7 @@ setup_target_for_coverage_gcovr(
FORMAT ${coverage_format}
EXECUTABLE rippled
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
DEPENDENCIES rippled
)

View File

@@ -18,7 +18,6 @@ install (
xrpl.libxrpl.json
xrpl.libxrpl.protocol
xrpl.libxrpl.resource
xrpl.libxrpl.ledger
xrpl.libxrpl.server
xrpl.libxrpl.net
xrpl.libxrpl
@@ -38,7 +37,7 @@ install(CODE "
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(xrpl \
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
")
install (EXPORT RippleExports
@@ -72,7 +71,7 @@ if (is_root_project AND TARGET rippled)
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
include(create_symbolic_link)
create_symbolic_link(rippled${suffix} \
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
")
endif ()

View File

@@ -1,4 +1,4 @@
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
if (validator_keys)
git_branch (current_branch)
@@ -6,15 +6,17 @@ if (validator_keys)
if (NOT (current_branch STREQUAL "release"))
set (current_branch "master")
endif ()
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
FetchContent_Declare (
validator_keys
validator_keys_src
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
GIT_TAG "${current_branch}"
)
FetchContent_MakeAvailable(validator_keys)
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
FetchContent_GetProperties (validator_keys_src)
if (NOT validator_keys_src_POPULATED)
message (STATUS "Pausing to download ValidatorKeys...")
FetchContent_Populate (validator_keys_src)
endif ()
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
endif ()

View File

@@ -1,25 +1,22 @@
{
"version": "0.5",
"requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1733936244.862",
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
"wasm-xrplf/2.4.1-xrplf#dc67c558e283593ef0edd7eb00e9fa0d%1759862247.891",
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
"openssl/3.5.2#0c5a5e15ae569f45dff57adcf1770cf7%1756234259.61",
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1743433196.251",
"libsodium/1.0.20#d2baa92ed999abe295ff63e2ee25b4f3%1743063880.072",
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1722218217.276",
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
"gmp/6.3.0#76a423d206f8aedd6bf8fc4e271467d2%1756240296.179",
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
@@ -28,7 +25,7 @@
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
],
"build_requires": [
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1733936244.862",
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",

View File

@@ -1,6 +1,9 @@
# Global configuration for Conan. This is used to set the number of parallel
# downloads, uploads, and build jobs.
# downloads, uploads, and build jobs. The verbosity is set to verbose to
# provide more information during the build process.
core:non_interactive=True
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ os.cpu_count() - 1 }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose

View File

@@ -29,3 +29,6 @@ tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}
[tool_requires]
!cmake/*: cmake/[>=3 <4]

View File

@@ -2,7 +2,6 @@ from conan import ConanFile, __version__ as conan_version
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
import re
class Xrpl(ConanFile):
name = 'xrpl'
@@ -28,10 +27,9 @@ class Xrpl(ConanFile):
'grpc/1.50.1',
'libarchive/3.8.1',
'nudb/2.0.9',
'openssl/3.5.4',
'openssl/3.5.2',
'soci/4.0.3',
'zlib/1.3.1',
'wasm-xrplf/2.4.1-xrplf',
]
test_requires = [
@@ -113,9 +111,6 @@ class Xrpl(ConanFile):
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/3.21.12', force=True)
self.requires('sqlite3/3.49.1', force=True)
self.requires('gmp/6.3.0', force=True)
self.requires('libsodium/1.0.20', force=True)
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
@@ -138,7 +133,6 @@ class Xrpl(ConanFile):
self.folders.generators = 'build/generators'
generators = 'CMakeDeps'
def generate(self):
tc = CMakeToolchain(self)
tc.variables['tests'] = self.options.tests
@@ -196,7 +190,6 @@ class Xrpl(ConanFile):
'protobuf::libprotobuf',
'soci::soci',
'sqlite3::sqlite',
'wasm-xrplf::wasm-xrplf',
'xxhash::xxhash',
'zlib::zlib',
]

View File

@@ -1,10 +0,0 @@
SCIPR Lab:
Eli Ben-Sasson
Alessandro Chiesa
Eran Tromer
Madars Virza
Howard Wu
External contributors:
Alexander Chernyakhovsky (Google Inc.)
Aleksejs Popovs

View File

@@ -1,52 +0,0 @@
## v0.3.0
This update introduces new field and curve API's, and enforces they are used consistently across the library. Furthermore it makes it possible to use fields, without having to initialize elliptic curves.
### Breaking Changes
- #23 Remove unused exponent param of curves
- #58 Add a defined API for every field type, and have minor tweaks to all fields to implement it (Thanks @alexander-zw)
- #79 Separate field initialization from curves (Thanks @alexander-zw)
### Features
- #71 Add BLS12-381 (Thanks @yelhousni)
- #80 Add clang-tidy checks to library and CI
- #82 Convert tests to use Google test (Thanks @alexander-zw)
- #83 Make run-clang-tidy return an error when linting fails
- #85 Add more unit tests for fields (Thanks @alexander-zw)
- #86 Add binary fields from [libiop](https://github.com/scipr-lab/libiop)
- #100 Move utils in from [libiop](https://github.com/scipr-lab/libiop)
### Bug fixes
- #75 Get rid of warning for unused constant PI, in complex field
- #78 Reduce prints when inhibit_profiling_info is set
- #79 & #87 Use std::size_t for all code, fix bugs introduced by #58
- #94 & #96 Fix bugs that make libff incompatible with
[libfqfft](https://github.com/scipr-lab/libfqfft) and [libiop](https://github.com/scipr-lab/libiop)
- #103 Fix bugs that make libff incompatible with [libsnark](https://github.com/scipr-lab/libsnark)
and add more tests for the field utils
## v0.2.0
_Special thanks to all downstream projects upstreaming their patches!_
### Breaking Changes
- File structure changed: All field utils are now in `libff/algebra/field_utils/`, `Fp_model` is
now in `libff/algebra/fields/prime_base/`, and all other F_p^n fields in
`libff/algebra/fields/prime_extension/`.
- The function `base_field_char()` of all fields and curves have been renamed to `field_char()`.
- The provided fields used in curves have been moved to separate files so that they can be imported
separately from `[field name]_fields.hpp`. However they are still accessible from the init file.
### Features
- #20 Improve operator+ speed for alt_bn, correct the corresponding docs, and reduce code duplication.
- #50 Add mul_by_cofactor to elliptic curve groups
- #50 Add sage scripts for altbn and mnt curves, to verify cofactors and generators
- #52 Change default procps build flags to work with Mac OS
### Bug fixes
- #19 Fix is_little_endian always returning true
- #20 Fix operator+ not contributing to alt_bn_128 profiling opcount
- #26 Remove unused warnings in release build
- #39 Update Travis Config for newer Ubuntu mirror defaults
- #50 Fix incorrect mnt4 g2 generator
- #54 Fix is_power_of_two for n > 2^32
- #55 Throw informative error for division by zero in div_ceil

View File

@@ -1,236 +0,0 @@
cmake_minimum_required(VERSION 3.5...3.25)
project (libff)
# Default to RelWithDebInfo configuration if no configuration is explicitly specified.
if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Build type on single-configuration generators" FORCE)
endif()
set(
CURVE
"BN128"
CACHE
STRING
"Default curve: one of BLS12_381, ALT_BN128, BN128, EDWARDS, MNT4, MNT6"
)
option(
DEBUG
"Enable debugging mode"
OFF
)
option(
LOWMEM
"Limit the size of multi-exponentiation tables, for low-memory platforms"
OFF
)
option(
MULTICORE
"Enable parallelized execution, using OpenMP"
OFF
)
option(
BINARY_OUTPUT
"In serialization, output raw binary data (instead of decimal), which is smaller and faster."
ON
)
option(
MONTGOMERY_OUTPUT
"Use Montgomery representations for serialization and words representation of Fp elements (faster but not human-readable)"
ON
)
option(
USE_PT_COMPRESSION
"Use point compression"
ON
)
option(
PROFILE_OP_COUNTS
"Collect counts for field and curve operations"
OFF
)
option(
USE_MIXED_ADDITION
"Convert each element of the key pair to affine coordinates"
OFF
)
# This option does not work on macOS, since there is no /proc
option(
WITH_PROCPS
"Use procps for memory profiling"
OFF
)
option(
CPPDEBUG
"Enable debugging of C++ STL (does not imply DEBUG)"
OFF
)
option(
PERFORMANCE
"Enable link-time and aggressive optimizations"
OFF
)
option(
USE_ASM
"Use architecture-specific optimized assembly code"
ON
)
option(
IS_LIBFF_PARENT
"Install submodule dependencies if caller originates from here"
ON
)
if(CMAKE_COMPILER_IS_GNUCXX OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
# Common compilation flags and warning configuration
set(
CMAKE_CXX_FLAGS
"${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wextra -Wfatal-errors"
)
if("${MULTICORE}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp")
endif()
endif()
find_path(GMP_INCLUDE_DIR NAMES gmp.h)
find_library(GMP_LIBRARY gmp)
if(GMP_LIBRARY MATCHES ${CMAKE_SHARED_LIBRARY_SUFFIX})
set(gmp_library_type SHARED)
else()
set(gmp_library_type STATIC)
endif()
message(STATUS "GMP: ${GMP_LIBRARY}, ${GMP_INCLUDE_DIR}")
add_library(GMP::gmp ${gmp_library_type} IMPORTED)
set_target_properties(
GMP::gmp PROPERTIES
IMPORTED_LOCATION ${GMP_LIBRARY}
INTERFACE_INCLUDE_DIRECTORIES ${GMP_INCLUDE_DIR}
)
if("${WITH_PROCPS}")
if(APPLE)
message(WARNING "WITH_PROCPS must be set to OFF for macOS build")
endif()
include(FindPkgConfig)
pkg_check_modules(
PROCPS
REQUIRED
libprocps
)
else()
add_definitions(
-DNO_PROCPS
)
endif()
add_definitions(
-DCURVE_${CURVE}
)
enable_testing()
if(${CURVE} STREQUAL "BN128")
include_directories(depends)
add_definitions(
-DBN_SUPPORT_SNARK=1
)
endif()
if("${DEBUG}")
add_definitions(-DDEBUG=1)
endif()
if("${LOWMEM}")
add_definitions(-DLOWMEM=1)
endif()
if("${MULTICORE}")
add_definitions(-DMULTICORE=1)
endif()
if("${BINARY_OUTPUT}")
add_definitions(-DBINARY_OUTPUT)
endif()
if("${MONTGOMERY_OUTPUT}")
add_definitions(-DMONTGOMERY_OUTPUT)
endif()
if(NOT "${USE_PT_COMPRESSION}")
add_definitions(-DNO_PT_COMPRESSION=1)
endif()
if("${PROFILE_OP_COUNTS}")
add_definitions(-DPROFILE_OP_COUNTS=1)
endif()
if("${USE_MIXED_ADDITION}")
add_definitions(-DUSE_MIXED_ADDITION=1)
endif()
if("${CPPDEBUG}")
add_definitions(-D_GLIBCXX_DEBUG -D_GLIBCXX_DEBUG_PEDANTIC)
endif()
if("${PERFORMANCE}")
add_definitions(-DNDEBUG)
set(
CMAKE_CXX_FLAGS
"${CMAKE_CXX_FLAGS} -flto -fuse-linker-plugin"
)
set(
CMAKE_EXE_LINKER_FLAGS
"${CMAKE_EXE_LINKER_FLAGS} -flto"
)
endif()
if("${USE_ASM}")
add_definitions(-DUSE_ASM)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mpclmul -msse4.1") # Used for binary fields.
endif()
# Configure CCache if available
find_program(CCACHE_FOUND ccache)
if(CCACHE_FOUND)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
endif(CCACHE_FOUND)
if ("${IS_LIBFF_PARENT}")
find_program(
MARKDOWN
markdown_py
DOC "Path to markdown_py binary"
)
if(MARKDOWN-NOTFOUND)
else()
add_custom_target(
doc
${MARKDOWN} -f ${CMAKE_CURRENT_BINARY_DIR}/README.html -x toc -x extra --noisy ${CMAKE_CURRENT_SOURCE_DIR}/README.md
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Translating from markdown to HTML" VERBATIM
)
endif()
# Add a `make check` target that builds and tests
add_custom_target(check COMMAND ${CMAKE_CTEST_COMMAND})
# Add a `make profile` target that builds and profiles
add_custom_target(
profile
COMMAND ${CMAKE_COMMAND}
-E echo 'Built target finished'
)
add_subdirectory(depends)
# Add a 'make clang-tidy' target that runs clang-tidy using checks specified in .clang-tidy
include(clang-tidy.cmake)
endif()
add_subdirectory(libff)

View File

@@ -1,72 +0,0 @@
# Contributing
Thank you for considering making contributions to libff!
Contributing to this repo can be done in several forms, such as participating in discussion or proposing code changes.
To ensure a smooth workflow for all contributors, the following general procedure for contributing has been established:
1) Either open or find an issue you'd like to help with
2) Participate in thoughtful discussion on that issue
3) If you would like to contribute:
* If the issue is a feature proposal, ensure that the proposal has been accepted
* Ensure that nobody else has already begun working on this issue.
If they have, please try to contact them to collaborate
* If nobody has been assigned for the issue and you would like to work on it, make a comment on the issue to inform the community of your intentions to begin work. (So we can avoid duplication of efforts)
* We suggest using standard Github best practices for contributing: fork the repo, branch from the HEAD of develop, make some commits on your branch, and submit a PR from the branch to develop.
More detail on this is below
* Be sure to include a relevant change log entry in the Pending section of CHANGELOG.md (see file for log format)
Note that for very small or clear problems (such as typos), or well isolated improvements, it is not required to an open issue to submit a PR. But be aware that for more complex problems/features touching multiple parts of the codebase, if a PR is opened before an adequate design discussion has taken place in a github issue, that PR runs a larger likelihood of being rejected.
Looking for a good place to start contributing? How about checking out some ["good first issues"](https://github.com/scipr-lab/libff/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
## Branch Structure
Libff has its default branch as `develop`, which is where PRs are merged into. The `master` branch should only contain code that is part of a release. Releases will be periodically made. All other branches should be assumed to be miscellaneous feature development branches.
All downstream users should be using tagged versions of the library.
## How to work on a fork
Please skip this section if you're familiar with contributing to open-source github projects.
First, fork the repo from the github UI, and clone your fork locally (we denote
by `<path-to-libff>` the path to your `libff` fork on your machine).
After cloning your fork on your machine, it may be useful to add the upstream
project as a git remote (to periodically update your fork for instance).
You can do so by running the following command:
```bash
# Go to your fork cloned repository (replacing <path-to-libff> by the appropriate path)
cd <path-to-libff>
# Add the upstream project to your remotes
git remote add upstream git@github.com:scipr-lab/libff.git
# (Optional) Check the addition by inspecting your remotes
git remote -vv
```
Then the way you make code contributions is to first think of a branch name that describes your change.
Then do the following:
```bash
# Make sure your "develop" branch is up to date with the upstream repository
git pull upstream develop
# Create a branch for your contribution (replacing <your-branch-name> by the appropriate value)
git checkout -b <your-branch-name>
```
and then work as normal on that branch, and PR'ing to upstream `develop` when you're done =)
## Updating documentation
All PRs should aim to leave the code more documented than it started with.
Please don't assume that its easy to infer what the code is doing,
as that is usually not the case for these complex protocols unless one has been working with it recently.
(Even if you understand the paper!)
Its often very useful to describe what is the high level view of what a code block is doing,
and either refer to the relevant section of a paper or include a short proof/argument for why it makes sense before the actual logic.
## Performance improvements
All performance improvements should be accompanied with benchmarks improving, or otherwise have it be clear that things have improved.
For some areas of the codebase, performance roughly follows the number of field multiplications, but there are also many areas where
low level system effects such as cache locality and superscalar operations become important for performance.
Thus performance can often become very non-intuitive / diverge from minimizing the number of arithmetic operations.

BIN
external/libff/LICENSE vendored

Binary file not shown.

View File

@@ -1,137 +0,0 @@
<h1 align="center">libff</h1>
<h4 align="center">C++ library for Finite Fields and Elliptic Curves</h4>
___libff___ is a C++ library for finite fields and elliptic curves. The library is developed by [SCIPR Lab] and contributors (see [AUTHORS] file) and is released under the MIT License (see [LICENSE] file).
## Table of contents
- [Directory structure](#directory-structure)
- [Elliptic curve choices](#elliptic-curve-choices)
- [Build guide](#build-guide)
## Directory structure
The directory structure is as follows:
* [__libff__](libff): C++ source code, containing the following modules:
* [__algebra__](libff/algebra): fields and elliptic curve groups
* [__common__](libff/common): miscellaneous utilities
* [__depends__](depends): dependency libraries
## Elliptic curve choices
The libsnark library currently provides three options:
* `edwards`:
an instantiation based on an Edwards curve, providing 80 bits of security.
* `bn128`:
an instantiation based on a Barreto-Naehrig curve, providing 128
bits of security. The underlying curve implementation is
\[ate-pairing], which has incorporated our patch that changes the
BN curve to one suitable for SNARK applications.
* This implementation uses dynamically-generated machine code for the curve
arithmetic. Some modern systems disallow execution of code on the heap, and
will thus block this implementation.
For example, on Fedora 20 at its default settings, you will get the error
`zmInit ERR:can't protect` when running this code. To solve this,
run `sudo setsebool -P allow_execheap 1` to allow execution,
or use `make CURVE=ALT_BN128` instead.
* `alt_bn128`:
an alternative to `bn128`, somewhat slower but avoids dynamic code generation.
Note that `bn128` requires an x86-64 CPU while the other curve choices
should be architecture-independent.
## Build guide
The library has the following dependencies:
* [Boost](http://www.boost.org/)
* [CMake](http://cmake.org/)
* [GMP](http://gmplib.org/)
* [libsodium](https://libsodium.gitbook.io/doc/)
* [libprocps](http://packages.ubuntu.com/trusty/libprocps-dev) (turned off by default)
The library has been tested on Linux, but it is compatible with Windows and MacOS.
### Installation
On Ubuntu 14.04 LTS:
```
sudo apt-get install build-essential git libboost-all-dev cmake libgmp3-dev libssl-dev libprocps3-dev pkg-config libsodium-dev
```
On MacOS, all of the libraries from the previous section can be installed with `brew`, except for `libprocps`, which is turned off by default.
Fetch dependencies from their GitHub repos:
```
git submodule init && git submodule update
```
### Compilation
To compile, starting at the project root directory, create the build directory and Makefile:
```
mkdir build && cd build
cmake ..
```
If you are on macOS, change the cmake command to be
```
cmake .. -DOPENSSL_ROOT_DIR=$(brew --prefix openssl)
```
Other build flags include:
| Flag | Value | Description |
| ---- | ----- | ----------- |
| MAKE_INSTALL_PREFIX | (your path) | Specifies the desired install location. |
| CMAKE_BUILD_TYPE | Debug | Enables asserts. Note that tests now use gtest instead of asserts. |
| WITH_PROCPS | ON | Enables `libprocps`, which is by default turned off since it is not supported on some systems such as MacOS. |
Then, to compile and install the library, run this within the build directory:
```
make
make install
```
This will install `libff.a` into `/install/path/lib`; so your application should be linked using `-L/install/path/lib -lff`. It also installs the requisite headers into `/install/path/include`; so your application should be compiled using `-I/install/path/include`.
## Testing
To build and execute the tests for this library, run:
```
make check
```
## Code formatting and linting
To run clang-tidy on this library, specify the variable `USE_CLANG_TIDY` (eg. `cmake .. -D USE_CLANG_TIDY=ON`).
Then, run:
```
make clang-tidy
```
One can specify which clang-tidy checks to run and which files to run clang-tidy on using the `.clang-tidy` file in the root directory of the project.
## Profile
To compile the multi-exponentiation profiler in this library, run:
```
make profile
```
The resulting profiler is named `multiexp_profile` and can be found in the `libff` folder under the build directory.
[SCIPR Lab]: http://www.scipr-lab.org/ (Succinct Computational Integrity and Privacy Research Lab)
[LICENSE]: LICENSE (LICENSE file in top directory of libff distribution)
[AUTHORS]: AUTHORS (AUTHORS file in top directory of libff distribution)

View File

@@ -1,30 +0,0 @@
option(
USE_CLANG_TIDY
"Use clang-tidy if the program is found."
OFF
)
if(USE_CLANG_TIDY)
find_program(CLANG_TIDY clang-tidy)
if(CLANG_TIDY)
file(DOWNLOAD
https://raw.githubusercontent.com/llvm-mirror/clang-tools-extra/master/clang-tidy/tool/run-clang-tidy.py
${PROJECT_BINARY_DIR}/run-clang-tidy.py
)
find_program(RUN_CLANG_TIDY run-clang-tidy.py)
if(RUN_CLANG_TIDY)
message("Using clang-tidy. Creating target... To run, use: make clang-tidy")
add_custom_target(
clang-tidy
COMMAND python3 run-clang-tidy.py ../libff/algebra ../libff/common -quiet 2>&1
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
else()
message(
FATAL_ERROR
"run-clang-tidy.py not found. (Download and place in PATH). Aborting...")
endif()
else()
message(FATAL_ERROR "clang-tidy not found. Aborting...")
endif()
endif()

View File

@@ -1,13 +0,0 @@
add_subdirectory(gtest EXCLUDE_FROM_ALL)
if(${CURVE} STREQUAL "BN128")
include_directories(ate-pairing/include)
include_directories(xbyak)
add_library(
zm
STATIC
ate-pairing/src/zm.cpp
ate-pairing/src/zm2.cpp
)
endif()

View File

@@ -1,7 +0,0 @@
*~
\#*\#
.\#*
*.omc
.omakedb*
lib/
CVS

View File

@@ -1,10 +0,0 @@
all:
$(MAKE) -C src
$(MAKE) -C test
clean:
$(MAKE) -C src clean
$(MAKE) -C test clean
check:
$(MAKE) -C test check

View File

@@ -1,38 +0,0 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD>
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Express 2012 for Windows Desktop
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "zmlib", "proj\11\zmlib\zmlib.vcxproj", "{2F9B80B9-D6A5-4534-94A3-7B42F5623193}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "sample", "proj\11\sample\sample.vcxproj", "{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}"
ProjectSection(ProjectDependencies) = postProject
{2F9B80B9-D6A5-4534-94A3-7B42F5623193} = {2F9B80B9-D6A5-4534-94A3-7B42F5623193}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_bn", "proj\11\test_bn\test_bn.vcxproj", "{F4B8F9CD-2E24-4374-9BF6-6886347E861B}"
ProjectSection(ProjectDependencies) = postProject
{2F9B80B9-D6A5-4534-94A3-7B42F5623193} = {2F9B80B9-D6A5-4534-94A3-7B42F5623193}
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|x64 = Debug|x64
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{2F9B80B9-D6A5-4534-94A3-7B42F5623193}.Debug|x64.ActiveCfg = Debug|x64
{2F9B80B9-D6A5-4534-94A3-7B42F5623193}.Debug|x64.Build.0 = Debug|x64
{2F9B80B9-D6A5-4534-94A3-7B42F5623193}.Release|x64.ActiveCfg = Release|x64
{2F9B80B9-D6A5-4534-94A3-7B42F5623193}.Release|x64.Build.0 = Release|x64
{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}.Debug|x64.ActiveCfg = Debug|x64
{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}.Debug|x64.Build.0 = Debug|x64
{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}.Release|x64.ActiveCfg = Release|x64
{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}.Release|x64.Build.0 = Release|x64
{F4B8F9CD-2E24-4374-9BF6-6886347E861B}.Debug|x64.ActiveCfg = Debug|x64
{F4B8F9CD-2E24-4374-9BF6-6886347E861B}.Debug|x64.Build.0 = Debug|x64
{F4B8F9CD-2E24-4374-9BF6-6886347E861B}.Release|x64.ActiveCfg = Release|x64
{F4B8F9CD-2E24-4374-9BF6-6886347E861B}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal

View File

@@ -1,14 +0,0 @@
<?xml version="1.0" encoding="shift_jis"?>
<VisualStudioPropertySheet
ProjectType="Visual C++"
Version="8.00"
Name="ate"
>
<Tool
Name="VCCLCompilerTool"
AdditionalIncludeDirectories="$(SolutionDir)../xbyak;$(SolutionDir)src;$(SolutionDir)include;$(SolutionDir)test"
PreprocessorDefinitions="WIN32;NOMINMAX;WIN32_LEAN_AND_MEAN"
WarningLevel="4"
DisableSpecificWarnings="4996"
/>
</VisualStudioPropertySheet>

View File

@@ -1,55 +0,0 @@
# common definition for Makefile
# for GNU c++
#CCACHE=$(shell eval ls /usr/local/bin/ccache 2>/dev/null)
#CXX = g++
#CC = gcc
#LD = g++
CP = cp -f
AR = ar r
MKDIR=mkdir -p
RM=rm -f
CFLAGS = -fPIC -O3 -fomit-frame-pointer -DNDEBUG -msse2 -mfpmath=sse -march=native
CFLAGS_WARN=-Wall -Wextra -Wformat=2 -Wcast-qual -Wcast-align -Wwrite-strings -Wfloat-equal -Wpointer-arith #-Wswitch-enum -Wstrict-aliasing=2
CFLAGS_ALWAYS = -D_FILE_OFFSET_BITS=64 -DMIE_ATE_USE_GMP
LDFLAGS = -lm -lzm $(LIB_DIR) -lgmp -lgmpxx
AS = nasm
AFLAGS = -f elf -D__unix__
ifeq ($(SUPPORT_SNARK),1)
CFLAGS += -DBN_SUPPORT_SNARK
endif
ifneq ($(VUINT_BIT_LEN),)
CFLAGS += -D"MIE_ZM_VUINT_BIT_LEN=$(VUINT_BIT_LEN)"
endif
# for only 64-bit
BIT=-m64
#BIT=-m32
#ifeq ($(shell uname -s),x86_64)
#BIT=-m64
#endif
#ifeq ($(shell uname -s),Darwin)
#BIT=-m64
#endif
ifeq ($(shell uname -s),Cygwin)
# install mingw64-x86_64-gcc-g++
CXX=x86_64-w64-mingw32-g++
LD=x86_64-w64-mingw32-g++
AR=x86_64-w64-mingw32-ar r
#LDFLAGS+=-L/usr/x86_64-w64-mingw32/sys-root/mingw/lib
endif
ifeq ($(DBG),on)
CFLAGS += -O0 -g3 -UNDEBUG
LDFLAGS += -g3
endif
.SUFFIXES: .cpp
.cpp.o:
$(CXX) -c $< -o $@ $(CFLAGS) $(CFLAGS_WARN) $(CFLAGS_ALWAYS) $(INC_DIR) $(BIT)
.c.o:
$(CC) -c $< -o $@ $(CFLAGS) $(CFLAGS_WARN) $(CFLAGS_ALWAYS) $(INC_DIR) $(BIT)
INC_DIR+= -I../src -I../../xbyak -I../include
LIB_DIR+= -L../lib

View File

@@ -1,27 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<OutDir>$(SolutionDir)bin\</OutDir>
</PropertyGroup>
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(SolutionDir)../xbyak;$(SolutionDir)include</AdditionalIncludeDirectories>
</ClCompile>
</ItemDefinitionGroup>
<ItemDefinitionGroup>
<ClCompile>
<WarningLevel>Level4</WarningLevel>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeaderFile />
<PrecompiledHeaderOutputFile />
<PreprocessorDefinitions>_MBCS;%(PreprocessorDefinitions);NOMINMAX</PreprocessorDefinitions>
<OpenMPSupport>false</OpenMPSupport>
</ClCompile>
<Link>
<AdditionalLibraryDirectories>$(SolutionDir)lib\</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
<ItemGroup />
</Project>

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<TargetName>$(ProjectName)d</TargetName>
</PropertyGroup>
<ItemDefinitionGroup>
<ClCompile>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
</ClCompile>
</ItemDefinitionGroup>
<ItemGroup />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LocalDebuggerWorkingDirectory>$(SolutionDir)bin</LocalDebuggerWorkingDirectory>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@@ -1,169 +0,0 @@
#pragma once
/**
@file
@brief measure exec time of function
@author MITSUNARI Shigeo
*/
#if defined(_MSC_VER) && (MSC_VER <= 1500)
#include <cybozu/inttype.hpp>
#else
#include <stdint.h>
#endif
#include <stdio.h>
#if defined(_M_IX86) || defined(_M_X64) || defined(__i386__) || defined(__x86_64__)
#define CYBOZU_BENCH_USE_RDTSC
#endif
#ifdef CYBOZU_BENCH_USE_RDTSC
#ifdef _MSC_VER
#include <intrin.h>
#endif
#else
#include <cybozu/time.hpp>
#endif
namespace cybozu {
#ifdef CYBOZU_BENCH_USE_RDTSC
class CpuClock {
public:
static inline uint64_t getRdtsc()
{
#ifdef _MSC_VER
return __rdtsc();
#else
unsigned int eax, edx;
__asm__ volatile("rdtsc" : "=a"(eax), "=d"(edx));
return ((uint64_t)edx << 32) | eax;
#endif
}
CpuClock()
: clock_(0)
, count_(0)
{
}
void begin()
{
clock_ -= getRdtsc();
}
void end()
{
clock_ += getRdtsc();
count_++;
}
int getCount() const { return count_; }
uint64_t getClock() const { return clock_; }
void clear() { count_ = 0; clock_ = 0; }
void put(const char *msg = 0, int N = 1) const
{
double t = getClock() / double(getCount()) / N;
if (msg && *msg) printf("%s ", msg);
if (t > 1e6) {
printf("%7.3fMclk", t * 1e-6);
} else if (t > 1e3) {
printf("%7.3fKclk", t * 1e-3);
} else {
printf("%6.2f clk", t);
}
if (msg && *msg) printf("\n");
}
// adhoc constatns for CYBOZU_BENCH
static const int loopN1 = 1000;
static const int loopN2 = 1000000;
static const uint64_t maxClk = (uint64_t)3e8;
private:
uint64_t clock_;
int count_;
};
#else
class CpuClock {
cybozu::Time t_;
uint64_t clock_;
int count_;
public:
CpuClock() : clock_(0), count_(0) { t_.setTime(0, 0); }
void begin()
{
if (count_ == 0) t_.setCurrentTime(); // start
}
/*
@note QQQ ; this is not same api as rdtsc version
*/
void end()
{
cybozu::Time cur(true);
int diffSec = (int)(cur.getTime() - t_.getTime());
int diffMsec = cur.getMsec() - t_.getMsec();
const int diff = diffSec * 1000 + diffMsec;
clock_ = diff;
count_++;
}
int getCount() const { return count_; }
uint64_t getClock() const { return clock_; }
void clear() { t_.setTime(0, 0); clock_ = 0; count_ = 0; }
void put(const char *msg = 0, int N = 1) const
{
double t = getClock() / double(getCount()) / N;
if (msg && *msg) printf("%s ", msg);
if (t > 1) {
printf("%6.2fmsec", t);
} else if (t > 1e-3) {
printf("%6.2fusec", t * 1e3);
} else {
printf("%6.2fnsec", t * 1e6);
}
if (msg && *msg) printf("\n");
}
// adhoc constatns for CYBOZU_BENCH
static const int loopN1 = 1000000;
static const int loopN2 = 1000;
static const uint64_t maxClk = (uint64_t)500;
};
#endif
namespace bench {
static CpuClock g_clk;
#ifdef __GNUC__
#define CYBOZU_UNUSED __attribute__((unused))
#else
#define CYBOZU_UNUSED
#endif
static int CYBOZU_UNUSED g_loopNum;
} // cybozu::bench
/*
loop counter is automatically determined
CYBOZU_BENCH(<msg>, <func>, <param1>, <param2>, ...);
if msg == "" then only set g_clk, g_loopNum
*/
#define CYBOZU_BENCH(msg, func, ...) \
{ \
const uint64_t maxClk = cybozu::CpuClock::maxClk; \
cybozu::CpuClock clk; \
for (int i = 0; i < cybozu::CpuClock::loopN2; i++) { \
clk.begin(); \
for (int j = 0; j < cybozu::CpuClock::loopN1; j++) { func(__VA_ARGS__); } \
clk.end(); \
if (clk.getClock() > maxClk) break; \
} \
if (msg && *msg) clk.put(msg, cybozu::CpuClock::loopN1); \
cybozu::bench::g_clk = clk; cybozu::bench::g_loopNum = cybozu::CpuClock::loopN1; \
}
/*
loop counter N is given
CYBOZU_BENCH_C(<msg>, <counter>, <func>, <param1>, <param2>, ...);
if msg == "" then only set g_clk, g_loopNum
*/
#define CYBOZU_BENCH_C(msg, _N, func, ...) \
{ \
cybozu::CpuClock clk; \
clk.begin(); \
for (int j = 0; j < _N; j++) { func(__VA_ARGS__); } \
clk.end(); \
if (msg && *msg) clk.put(msg, _N); \
cybozu::bench::g_clk = clk; cybozu::bench::g_loopNum = _N; \
}
} // cybozu

View File

@@ -1,121 +0,0 @@
#pragma once
/**
@file
@brief int type definition and macros
Copyright (C) 2008 Cybozu Labs, Inc., all rights reserved.
*/
#if defined(_MSC_VER) && (MSC_VER <= 1500) && !defined(CYBOZU_DEFINED_INTXX)
#define CYBOZU_DEFINED_INTXX
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
typedef unsigned int uint32_t;
typedef int int32_t;
typedef unsigned short uint16_t;
typedef short int16_t;
typedef unsigned char uint8_t;
typedef signed char int8_t;
#else
#include <stdint.h>
#endif
#ifdef _MSC_VER
#ifndef CYBOZU_DEFINED_SSIZE_T
#define CYBOZU_DEFINED_SSIZE_T
#ifdef _WIN64
typedef int64_t ssize_t;
#else
typedef int32_t ssize_t;
#endif
#endif
#else
#include <unistd.h> // for ssize_t
#endif
#ifndef CYBOZU_ALIGN
#ifdef _MSC_VER
#define CYBOZU_ALIGN(x) __declspec(align(x))
#else
#define CYBOZU_ALIGN(x) __attribute__((aligned(x)))
#endif
#endif
#ifndef CYBOZU_FORCE_INLINE
#ifdef _MSC_VER
#define CYBOZU_FORCE_INLINE __forceinline
#else
#define CYBOZU_FORCE_INLINE __attribute__((always_inline))
#endif
#endif
#ifndef CYBOZU_ALLOCA
#ifdef _MSC_VER
#include <malloc.h>
#define CYBOZU_ALLOCA(x) _malloca(x)
#else
#define CYBOZU_ALLOCA_(x) __builtin_alloca(x)
#endif
#endif
#ifndef CYBOZU_NUM_OF_ARRAY
#define CYBOZU_NUM_OF_ARRAY(x) (sizeof(x) / sizeof(*x))
#endif
#ifndef CYBOZU_SNPRINTF
#if defined(_MSC_VER) && (_MSC_VER < 1900)
#define CYBOZU_SNPRINTF(x, len, ...) (void)_snprintf_s(x, len, len - 1, __VA_ARGS__)
#else
#define CYBOZU_SNPRINTF(x, len, ...) (void)snprintf(x, len, __VA_ARGS__)
#endif
#endif
#define CYBOZU_CPP_VERSION_CPP03 0
#define CYBOZU_CPP_VERSION_TR1 1
#define CYBOZU_CPP_VERSION_CPP11 2
#if (__cplusplus >= 201103) || (_MSC_VER >= 1500) || defined(__GXX_EXPERIMENTAL_CXX0X__)
#if defined(_MSC_VER) && (_MSC_VER <= 1600)
#define CYBOZU_CPP_VERSION CYBOZU_CPP_VERSION_TR1
#else
#define CYBOZU_CPP_VERSION CYBOZU_CPP_VERSION_CPP11
#endif
#elif (__GNUC__ >= 4 && __GNUC_MINOR__ >= 5) || (__clang_major__ >= 3)
#define CYBOZU_CPP_VERSION CYBOZU_CPP_VERSION_TR1
#else
#define CYBOZU_CPP_VERSION CYBOZU_CPP_VERSION_CPP03
#endif
#if (CYBOZU_CPP_VERSION == CYBOZU_CPP_VERSION_TR1)
#define CYBOZU_NAMESPACE_STD std::tr1
#define CYBOZU_NAMESPACE_TR1_BEGIN namespace tr1 {
#define CYBOZU_NAMESPACE_TR1_END }
#else
#define CYBOZU_NAMESPACE_STD std
#define CYBOZU_NAMESPACE_TR1_BEGIN
#define CYBOZU_NAMESPACE_TR1_END
#endif
#ifndef CYBOZU_OS_BIT
#if defined(_WIN64) || defined(__x86_64__)
#define CYBOZU_OS_BIT 64
#else
#define CYBOZU_OS_BIT 32
#endif
#endif
#ifndef CYBOZU_ENDIAN
#define CYBOZU_ENDIAN_UNKNOWN 0
#define CYBOZU_ENDIAN_LITTLE 1
#define CYBOZU_ENDIAN_BIG 2
#if defined(_M_IX86) || defined(_M_AMD64) || defined(__x86_64__) || defined(__i386__)
#define CYBOZU_ENDIAN CYBOZU_ENDIAN_LITTLE
#else
#define CYBOZU_ENDIAN CYBOZU_ENDIAN_UNKNOWN
#endif
#endif
namespace cybozu {
template<class T>
void disable_warning_unused_variable(const T&) { }
template<class T, class S>
T cast(const S* ptr) { return static_cast<T>(static_cast<const void*>(ptr)); }
template<class T, class S>
T cast(S* ptr) { return static_cast<T>(static_cast<void*>(ptr)); }
} // cybozu

View File

@@ -1,2 +0,0 @@
see
https://github.com/herumi/cybozulib/

View File

@@ -1,371 +0,0 @@
#pragma once
/**
@file
@brief unit test class
Copyright (C) 2008 Cybozu Labs, Inc., all rights reserved.
*/
#include <stdio.h>
#include <string.h>
#include <string>
#include <list>
#include <iostream>
#include <utility>
#if defined(_MSC_VER) && (MSC_VER <= 1500)
#include <cybozu/inttype.hpp>
#else
#include <stdint.h>
#endif
namespace cybozu { namespace test {
class AutoRun {
typedef void (*Func)();
typedef std::list<std::pair<const char*, Func> > UnitTestList;
public:
AutoRun()
: init_(0)
, term_(0)
, okCount_(0)
, ngCount_(0)
, exceptionCount_(0)
{
}
void setup(Func init, Func term)
{
init_ = init;
term_ = term;
}
void append(const char *name, Func func)
{
list_.push_back(std::make_pair(name, func));
}
void set(bool isOK)
{
if (isOK) {
okCount_++;
} else {
ngCount_++;
}
}
std::string getBaseName(const std::string& name) const
{
#ifdef _WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
size_t pos = name.find_last_of(sep);
std::string ret = name.substr(pos + 1);
pos = ret.find('.');
return ret.substr(0, pos);
}
int run(int, char *argv[])
{
std::string msg;
try {
if (init_) init_();
for (UnitTestList::const_iterator i = list_.begin(), ie = list_.end(); i != ie; ++i) {
std::cout << "ctest:module=" << i->first << std::endl;
try {
(i->second)();
} catch (std::exception& e) {
exceptionCount_++;
std::cout << "ctest: " << i->first << " is stopped by exception " << e.what() << std::endl;
} catch (...) {
exceptionCount_++;
std::cout << "ctest: " << i->first << " is stopped by unknown exception" << std::endl;
}
}
if (term_) term_();
} catch (std::exception& e) {
msg = std::string("ctest:err:") + e.what();
} catch (...) {
msg = "ctest:err: catch unknown exception";
}
fflush(stdout);
if (msg.empty()) {
std::cout << "ctest:name=" << getBaseName(*argv)
<< ", module=" << list_.size()
<< ", total=" << (okCount_ + ngCount_ + exceptionCount_)
<< ", ok=" << okCount_
<< ", ng=" << ngCount_
<< ", exception=" << exceptionCount_ << std::endl;
return 0;
} else {
std::cout << msg << std::endl;
return 1;
}
}
static inline AutoRun& getInstance()
{
static AutoRun instance;
return instance;
}
private:
Func init_;
Func term_;
int okCount_;
int ngCount_;
int exceptionCount_;
UnitTestList list_;
};
static AutoRun& autoRun = AutoRun::getInstance();
inline void test(bool ret, const std::string& msg, const std::string& param, const char *file, int line)
{
autoRun.set(ret);
if (!ret) {
printf("%s(%d):ctest:%s(%s);\n", file, line, msg.c_str(), param.c_str());
}
}
template<typename T, typename U>
bool isEqual(const T& lhs, const U& rhs)
{
return lhs == rhs;
}
// avoid warning of comparision of integers of different signs
inline bool isEqual(size_t lhs, int rhs)
{
return lhs == size_t(rhs);
}
inline bool isEqual(int lhs, size_t rhs)
{
return size_t(lhs) == rhs;
}
inline bool isEqual(const char *lhs, const char *rhs)
{
return strcmp(lhs, rhs) == 0;
}
inline bool isEqual(char *lhs, const char *rhs)
{
return strcmp(lhs, rhs) == 0;
}
inline bool isEqual(const char *lhs, char *rhs)
{
return strcmp(lhs, rhs) == 0;
}
inline bool isEqual(char *lhs, char *rhs)
{
return strcmp(lhs, rhs) == 0;
}
// avoid to compare float directly
inline bool isEqual(float lhs, float rhs)
{
union fi {
float f;
uint32_t i;
} lfi, rfi;
lfi.f = lhs;
rfi.f = rhs;
return lfi.i == rfi.i;
}
// avoid to compare double directly
inline bool isEqual(double lhs, double rhs)
{
union di {
double d;
uint64_t i;
} ldi, rdi;
ldi.d = lhs;
rdi.d = rhs;
return ldi.i == rdi.i;
}
} } // cybozu::test
#ifndef CYBOZU_TEST_DISABLE_AUTO_RUN
int main(int argc, char *argv[])
{
return cybozu::test::autoRun.run(argc, argv);
}
#endif
/**
alert if !x
@param x [in]
*/
#define CYBOZU_TEST_ASSERT(x) cybozu::test::test(!!(x), "CYBOZU_TEST_ASSERT", #x, __FILE__, __LINE__)
/**
alert if x != y
@param x [in]
@param y [in]
*/
#define CYBOZU_TEST_EQUAL(x, y) { \
bool eq = cybozu::test::isEqual(x, y); \
cybozu::test::test(eq, "CYBOZU_TEST_EQUAL", #x ", " #y, __FILE__, __LINE__); \
if (!eq) { \
std::cout << "ctest: lhs=" << (x) << std::endl; \
std::cout << "ctest: rhs=" << (y) << std::endl; \
} \
}
/**
alert if fabs(x, y) >= eps
@param x [in]
@param y [in]
*/
#define CYBOZU_TEST_NEAR(x, y, eps) { \
bool isNear = fabs((x) - (y)) < eps; \
cybozu::test::test(isNear, "CYBOZU_TEST_NEAR", #x ", " #y, __FILE__, __LINE__); \
if (!isNear) { \
std::cout << "ctest: lhs=" << (x) << std::endl; \
std::cout << "ctest: rhs=" << (y) << std::endl; \
} \
}
#define CYBOZU_TEST_EQUAL_POINTER(x, y) { \
bool eq = x == y; \
cybozu::test::test(eq, "CYBOZU_TEST_EQUAL_POINTER", #x ", " #y, __FILE__, __LINE__); \
if (!eq) { \
std::cout << "ctest: lhs=" << static_cast<const void*>(x) << std::endl; \
std::cout << "ctest: rhs=" << static_cast<const void*>(y) << std::endl; \
} \
}
/**
alert if x[] != y[]
@param x [in]
@param y [in]
@param n [in]
*/
#define CYBOZU_TEST_EQUAL_ARRAY(x, y, n) { \
for (size_t i = 0, ie = (size_t)(n); i < ie; i++) { \
bool eq = cybozu::test::isEqual(x, y); \
cybozu::test::test(eq, "CYBOZU_TEST_EQUAL_ARRAY", #x ", " #y, __FILE__, __LINE__); \
if (!eq) { \
std::cout << "ctest: i=" << i << std::endl; \
std::cout << "ctest: lhs=" << (x) << std::endl; \
std::cout << "ctest: rhs=" << (y) << std::endl; \
} \
} \
}
/**
always alert
@param msg [in]
*/
#define CYBOZU_TEST_FAIL(msg) cybozu::test::test(false, "CYBOZU_TEST_FAIL", msg, __FILE__, __LINE__)
/**
verify message in exception
*/
#define CYBOZU_TEST_EXCEPTION_MESSAGE(statement, Exception, msg) \
{ \
int ret = 0; \
std::string errMsg; \
try { \
statement; \
ret = 1; \
} catch (const Exception& e) { \
errMsg = e.what(); \
if (errMsg.find(msg) == std::string::npos) { \
ret = 2; \
} \
} catch (...) { \
ret = 3; \
} \
if (ret) { \
cybozu::test::test(false, "CYBOZU_TEST_EXCEPTION_MESSAGE", #statement ", " #Exception ", " #msg, __FILE__, __LINE__); \
if (ret == 1) { \
std::cout << "ctest: no exception" << std::endl; \
} else if (ret == 2) { \
std::cout << "ctest: bad exception msg:" << errMsg << std::endl; \
} else { \
std::cout << "ctest: unexpected exception" << std::endl; \
} \
} else { \
cybozu::test::autoRun.set(true); \
} \
}
#define CYBOZU_TEST_EXCEPTION(statement, Exception) \
{ \
int ret = 0; \
try { \
statement; \
ret = 1; \
} catch (const Exception&) { \
} catch (...) { \
ret = 2; \
} \
if (ret) { \
cybozu::test::test(false, "CYBOZU_TEST_EXCEPTION", #statement ", " #Exception, __FILE__, __LINE__); \
if (ret == 1) { \
std::cout << "ctest: no exception" << std::endl; \
} else { \
std::cout << "ctest: unexpected exception" << std::endl; \
} \
} else { \
cybozu::test::autoRun.set(true); \
} \
}
/**
verify statement does not throw
*/
#define CYBOZU_TEST_NO_EXCEPTION(statement) \
try { \
statement; \
cybozu::test::autoRun.set(true); \
} catch (...) { \
cybozu::test::test(false, "CYBOZU_TEST_NO_EXCEPTION", #statement, __FILE__, __LINE__); \
}
/**
append auto unit test
@param name [in] module name
*/
#define CYBOZU_TEST_AUTO(name) \
void cybozu_test_ ## name(); \
struct cybozu_test_local_ ## name { \
cybozu_test_local_ ## name() \
{ \
cybozu::test::autoRun.append(#name, cybozu_test_ ## name); \
} \
} cybozu_test_local_instance_ ## name; \
void cybozu_test_ ## name()
/**
append auto unit test with fixture
@param name [in] module name
*/
#define CYBOZU_TEST_AUTO_WITH_FIXTURE(name, Fixture) \
void cybozu_test_ ## name(); \
void cybozu_test_real_ ## name() \
{ \
Fixture f; \
cybozu_test_ ## name(); \
} \
struct cybozu_test_local_ ## name { \
cybozu_test_local_ ## name() \
{ \
cybozu::test::autoRun.append(#name, cybozu_test_real_ ## name); \
} \
} cybozu_test_local_instance_ ## name; \
void cybozu_test_ ## name()
/**
setup fixture
@param Fixture [in] class name of fixture
@note cstr of Fixture is called before test and dstr of Fixture is called after test
*/
#define CYBOZU_TEST_SETUP_FIXTURE(Fixture) \
Fixture *cybozu_test_local_fixture; \
void cybozu_test_local_init() \
{ \
cybozu_test_local_fixture = new Fixture(); \
} \
void cybozu_test_local_term() \
{ \
delete cybozu_test_local_fixture; \
} \
struct cybozu_test_local_fixture_setup_ { \
cybozu_test_local_fixture_setup_() \
{ \
cybozu::test::autoRun.setup(cybozu_test_local_init, cybozu_test_local_term); \
} \
} cybozu_test_local_fixture_setup_instance_;

File diff suppressed because it is too large Load Diff

View File

@@ -1,432 +0,0 @@
#pragma once
/**
Fp : finite field with characteristic 254bit prime
t = - 2^62 - 2^55 + 2^0
p = 36*t*t*t*t + 36*t*t*t + 24*t*t + 6*t + 1
*/
#include "zm.h"
#ifdef MIE_ATE_USE_GMP
#include <gmpxx.h>
#endif
namespace mie {
class Fp : public local::addsubmul<Fp,
local::comparable<Fp,
local::hasNegative<Fp,
local::inversible<Fp> > > > {
public:
typedef mie::Unit value_type;
/*
double size of Fp
*/
enum {
N = 32 / sizeof(Unit)
};
Fp()
{
}
MIE_FORCE_INLINE Fp(int x)
{
set(x);
}
MIE_FORCE_INLINE explicit Fp(const std::string& str)
{
set(str);
}
MIE_FORCE_INLINE explicit Fp(const mie::Unit *x)
{
std::copy(x, x + N, v_);
}
Fp(const mie::Vuint& rhs)
{
set(rhs);
}
void set(int x)
{
if (x == 0) {
clear();
} else if (x == 1) {
const mie::Vuint& r = getMontgomeryR();
assert(r.size() == N);
std::copy(&r[0], &r[0] + N, v_);
} else if (x > 0) {
v_[0] = x;
std::fill(v_ + 1, v_ + N, 0);
mul(*this, *this, montgomeryR2_);
} else {
v_[0] = -x;
std::fill(v_ + 1, v_ + N, 0);
mul(*this, *this, montgomeryR2_);
neg(*this, *this);
}
}
void set(const std::string& str)
{
set(mie::Vuint(str));
}
void set(const mie::Vuint& x)
{
assert(x < getModulo());
mie::Vuint y(x);
// count++;std::cout << "count=" << count << ", x=" << x << std::endl;
y *= getMontgomeryR();
y %= getModulo();
setDirect(*this, y);
}
static inline int compare(const Fp& x, const Fp& y)
{
return mie::local::PrimitiveFunction::compare(&x[0], N, &y[0], N);
}
static void (*add)(Fp& out, const Fp& x, const Fp& y);
// add without mod
static void (*addNC)(Fp& out, const Fp& x, const Fp& y);
static void (*subNC)(Fp& out, const Fp& x, const Fp& y);
static void (*shr1)(Fp& out, const Fp& x);
static void (*shr2)(Fp& out, const Fp& x);
static void (*sub)(Fp& out, const Fp& x, const Fp& y);
static void (*neg)(Fp& out, const Fp& x);
static void (*mul)(Fp& out, const Fp& x, const Fp& y);
static int (*preInv)(Fp& r, const Fp& x);
/*
z = 3z + 2x
*/
static inline void _3z_add_2xC(Fp& z, const Fp& x)
{
addNC(z, z, x);
addNC(z, z, z);
addNC(z, z, x);
fast_modp(z);
}
/*
z = 2z + 3x
*/
static inline void _2z_add_3x(Fp& z, const Fp& x)
{
addNC(z, x, z);
addNC(z, z, z);
addNC(z, z, x);
fast_modp(z);
}
inline friend std::ostream& operator<<(std::ostream& os, const Fp& x)
{
return os << x.toString(os.flags() & std::ios_base::hex ? 16 : 10);
}
inline friend std::istream& operator>>(std::istream& is, Fp& x)
{
std::string str;
mie::local::getDigits(is, str);
x.set(str);
return is;
}
MIE_FORCE_INLINE bool isZero() const
{
Unit t = 0;
for (size_t i = 0; i < N; i++) {
t |= v_[i];
}
return t == 0;
}
MIE_FORCE_INLINE void clear()
{
std::fill(v_, v_ + N, 0);
}
static inline void fromMont(Fp& y, const Fp& x)
{
mul(y, x, one_);
}
static inline void toMont(Fp& y, const Fp& x)
{
mul(y, x, montgomeryR2_);
}
// return real low value
Unit getLow() const
{
Fp t;
fromMont(t, *this);
return t.v_[0];
}
bool isOdd() const
{
return (getLow() & 1) != 0;
}
mie::Vuint get() const
{
Fp t;
fromMont(t, *this);
mie::Vuint ret(t.v_, N);
return ret;
}
static inline void inv(Fp& out, const Fp& x)
{
#ifdef MIE_USE_X64ASM
Fp r;
int k = preInv(r, x);
#else
static const Fp p(&p_[0]);
Fp u, v, r, s;
u = p;
v = x;
r.clear();
s.clear(); s[0] = 1; // s is real 1
int k = 0;
while (!v.isZero()) {
if ((u[0] & 1) == 0) {
shr1(u, u);
addNC(s, s, s);
} else if ((v[0] & 1) == 0) {
shr1(v, v);
addNC(r, r, r);
} else if (v >= u) {
subNC(v, v, u);
addNC(s, s, r);
shr1(v, v);
addNC(r, r, r);
} else {
subNC(u, u, v);
addNC(r, r, s);
shr1(u, u);
addNC(s, s, s);
}
k++;
}
if (r >= p) {
subNC(r, r, p);
}
assert(!r.isZero());
subNC(r, p, r);
#endif
/*
xr = 2^k
R = 2^256
get r2^(-k)R^2 = r 2^(512 - k)
*/
mul(out, r, invTbl_[k]);
}
void inverse()
{
inv(*this, *this);
}
static inline void divBy2(Fp &z, const Fp &x)
{
unsigned int i = x[0] & 0x1;
shr1(z, x);
addNC(z, z, halfTbl_[i]);
}
static inline void divBy4(Fp &z, const Fp &x)
{
unsigned int i = x[0] & 0x3;
shr2(z, x);
addNC(z, z, quarterTbl_[i]);
}
/* z <- z mod p for z in [0, 6p] */
static inline void fast_modp(Fp &z)
{
uint64_t t = z.v_[3] >> 61;
z -= getDirectP((int)t);
}
template<class T>
static MIE_FORCE_INLINE void setDirect(Fp& out, const T& in)
{
const size_t n = in.size();
// assert(n <= N);
if (n < N) {
std::copy(&in[0], &in[0] + n, out.v_);
std::fill(out.v_ + n, out.v_ + N, 0);
} else {
// ignore in[i] for i >= N
std::copy(&in[0], &in[0] + N, out.v_);
}
}
std::string toString(int base = 10) const { return get().toString(base); }
MIE_FORCE_INLINE const Unit& operator[](size_t i) const { return v_[i]; }
MIE_FORCE_INLINE Unit& operator[](size_t i) { return v_[i]; }
MIE_FORCE_INLINE size_t size() const { return N; }
static void setModulo(const mie::Vuint& p, int mode, bool useMulx = true, bool definedBN_SUPPORT_SNARK =
#ifdef BN_SUPPORT_SNARK
true
#else
false
#endif
);
static inline const mie::Vuint& getModulo() { return p_; }
static const Fp& getDirectP(int n); /* n = 0..6 */
static inline const mie::Vuint& getMontgomeryR() { return montgomeryR_; }
private:
MIE_ALIGN(16) Unit v_[N];
static mie::Vuint p_;
static mie::Fp invTbl_[512];
public:
static mie::Fp *halfTbl_; // [2] = [0, 1/2 mod p]
private:
static mie::Fp *quarterTbl_; // [4] = [0, 1/4, 2/4, 3/4]
static mie::Vuint montgomeryR_; // 1 = 1r
static mie::Vuint p_add1_div4_; // (p + 1) / 4
static mie::Fp montgomeryR2_; // m(x, r^2) = xr ; x -> xr
static mie::Fp one_; // 1
// m(xr, r^(-2)r) = xr^(-1) ; xr -> xr^(-1)
static void setTablesForDiv(const mie::Vuint& p);
public:
static inline void square(Fp& out, const Fp& x) { mul(out, x, x); }
#ifdef MIE_ATE_USE_GMP
static void toMpz(mpz_class& y, const Fp& x)
{
mpz_import(y.get_mpz_t(), N, -1, sizeof(Unit), 0, 0, x.v_);
}
static void fromMpz(Fp& y, const mpz_class& x)
{
size_t size;
mpz_export(y.v_, &size, -1, sizeof(Unit), 0, 0, x.get_mpz_t());
for (size_t i = size; i < N; i++) {
y.v_[i] = 0;
}
}
#endif
static bool squareRoot(Fp& y, const Fp& x)
{
Fp t;
t = mie::power(x, p_add1_div4_);
if (t * t != x) return false;
y = t;
return true;
}
struct Dbl : public local::addsubmul<Dbl,
local::comparable<Dbl,
local::hasNegative<Dbl> > > {
enum {
SIZE = sizeof(Unit) * N * 2
};
static MIE_FORCE_INLINE void setDirect(Dbl &out, const mie::Vuint &in)
{
const size_t n = in.size();
if (n < N * 2) {
std::copy(&in[0], &in[0] + n, out.v_);
std::fill(out.v_ + n, out.v_ + N * 2, 0);
} else {
// ignore in[i] for i >= N * 2
std::copy(&in[0], &in[0] + N * 2, out.v_);
}
}
static MIE_FORCE_INLINE void setDirect(Dbl &out, const std::string &in)
{
mie::Vuint t(in);
setDirect(out, t);
}
template<class T>
void setDirect(const T &in) { setDirect(*this, in); }
MIE_FORCE_INLINE void clear()
{
std::fill(v_, v_ + N * 2, 0);
}
Unit *ptr() { return v_; }
const Unit *const_ptr() const { return v_; }
mie::Vuint getDirect() const { return mie::Vuint(v_, N * 2); }
MIE_FORCE_INLINE const Unit& operator[](size_t i) const { return v_[i]; }
MIE_FORCE_INLINE Unit& operator[](size_t i) { return v_[i]; }
MIE_FORCE_INLINE size_t size() const { return N * 2; }
std::string toString(int base = 10) const
{
return ("Dbl(" + getDirect().toString(base) + ")");
}
friend inline std::ostream& operator<<(std::ostream& os, const Dbl& x)
{
return os << x.toString(os.flags() & std::ios_base::hex ? 16 : 10);
}
Dbl() {}
explicit Dbl(const Fp &x)
{
mul(*this, x, montgomeryR2_);
}
explicit Dbl(const std::string &str) { setDirect(*this, str); }
static inline int compare(const Dbl& x, const Dbl& y)
{
return mie::local::PrimitiveFunction::compare(&x[0], N * 2, &y[0], N * 2);
}
typedef void (uni_op)(Dbl &z, const Dbl &x);
typedef void (bin_op)(Dbl &z, const Dbl &x, const Dbl &y);
/*
z = (x + y) mod px
*/
static bin_op *add;
static bin_op *addNC;
static uni_op *neg;
/*
z = (x - y) mod px
*/
static bin_op *sub;
static bin_op *subNC;
static void subOpt1(Dbl &z, const Dbl &x, const Dbl &y)
{
assert(&z != &x);
assert(&z != &y);
addNC(z, x, pNTbl_[1]);
subNC(z, z, y);
}
/*
z = x * y
*/
static void (*mul)(Dbl &z, const Fp &x, const Fp &y);
/*
z = MontgomeryReduction(x)
*/
static void (*mod)(Fp &z, const Dbl &x);
/*
x <- x mod pN
*/
static Dbl *pNTbl_; // [4];
private:
MIE_ALIGN(16) Unit v_[N * 2];
};
};
namespace util {
template<>
struct IntTag<mie::Fp> {
typedef size_t value_type;
static inline value_type getBlock(const mie::Fp&, size_t)
{
err();
return 0;
}
static inline size_t getBlockSize(const mie::Fp&)
{
err();
return 0;
}
static inline void err()
{
printf("Use mie::Vuint intead of Fp for the 3rd parameter for ScalarMulti\n");
exit(1);
}
};
} // mie::util
} // mie

View File

@@ -1,95 +0,0 @@
import java.io.*;
import mcl.bn254.*;
public class BN254Test {
static {
System.loadLibrary("bn254_if_wrap");
}
public static void main(String argv[]) {
try {
BN254.SystemInit();
Fp aa = new Fp("12723517038133731887338407189719511622662176727675373276651903807414909099441");
Fp ab = new Fp("4168783608814932154536427934509895782246573715297911553964171371032945126671");
Fp ba = new Fp("13891744915211034074451795021214165905772212241412891944830863846330766296736");
Fp bb = new Fp("7937318970632701341203597196594272556916396164729705624521405069090520231616");
Ec1 g1 = new Ec1(new Fp(-1), new Fp(1));
Ec2 g2 = new Ec2(new Fp2(aa, ab), new Fp2(ba, bb));
System.out.println("g1=" + g1);
System.out.println("g2=" + g2);
assertBool("g1 is on EC", g1.isValid());
assertBool("g2 is on twist EC", g2.isValid());
Mpz r = BN254.GetParamR();
System.out.println("r=" + r);
{
Ec1 t = new Ec1(g1);
t.mul(r);
assertBool("orgder of g1 == r", t.isZero());
}
{
Ec2 t = new Ec2(g2);
t.mul(r);
assertBool("order of g2 == r", t.isZero());
}
Mpz a = new Mpz("123456789012345");
Mpz b = new Mpz("998752342342342342424242421");
// scalar-multiplication sample
{
Mpz c = new Mpz(a);
c.add(b);
Ec1 Pa = new Ec1(g1); Pa.mul(a);
Ec1 Pb = new Ec1(g1); Pb.mul(b);
Ec1 Pc = new Ec1(g1); Pc.mul(c);
Ec1 out = new Ec1(Pa);
out.add(Pb);
assertEqual("check g1 * c = g1 * a + g1 * b", Pc, out);
}
Fp12 e = new Fp12();
// calc e : G2 x G1 -> G3 pairing
e.pairing(g2, g1); // e = e(g2, g1)
System.out.println("e=" + e);
{
Fp12 t = new Fp12(e);
t.power(r);
assertEqual("order of e == r", t, new Fp12(1));
}
Ec2 g2a = new Ec2(g2);
g2a.mul(a);
Fp12 ea1 = new Fp12();
ea1.pairing(g2a, g1);
Fp12 ea2 = new Fp12(e);
ea2.power(a); // ea2 = e^a
assertEqual("e(g2 * a, g1) = e(g2, g1)^a", ea1, ea2);
Ec1 q1 = new Ec1(g1);
q1.mul(new Mpz(12345));
assertBool("q1 is on EC", q1.isValid());
Fp12 e1 = new Fp12();
Fp12 e2 = new Fp12();
e1.pairing(g2, g1); // e1 = e(g2, g1)
e2.pairing(g2, q1); // e2 = e(g2, q1)
Ec1 q2 = new Ec1(g1);
q2.add(q1);
e.pairing(g2, q2); // e = e(g2, q2)
e1.mul(e2);
assertEqual("e = e1 * e2", e, e1);
} catch (RuntimeException e) {
System.out.println("unknown exception :" + e);
}
}
public static void assertBool(String msg, Boolean b) {
if (b) {
System.out.println("OK : " + msg);
} else {
System.out.println("NG : " + msg);
}
}
public static void assertEqual(String msg, Object lhs, Object rhs) {
if (lhs.equals(rhs)) {
System.out.println("OK : " + msg);
} else {
System.out.println("NG : " + msg + ", lhs = " + lhs + ", rhs = " + rhs);
}
}
}

View File

@@ -1,48 +0,0 @@
MODULE_NAME=bn254
JAVA_NAME=BN254
IF_NAME=$(MODULE_NAME)_if
WRAP_CXX=$(IF_NAME)_wrap.cxx
include ../common.mk
LDFLAGS=-lgmp -lgmpxx
ifeq ($(UNAME_S),Darwin)
JAVA_HOME=$(shell /usr/libexec/java_home)
JAVA_INC=$(addprefix -I$(JAVA_HOME)/include,/ /darwin)
LIB_SUF=dylib
else
JAVA_HOME=$(realpath $(dir $(realpath $(shell which javac)))..)
JAVA_INC=-I$(JAVA_HOME)/include
LIB_SUF=so
CFLAGS+=-z noexecstack
LIB+=-lrt
endif
CFLAGS+= -shared -fPIC $(JAVA_INC)
PACKAGE_NAME=mcl.$(MODULE_NAME)
PACKAGE_DIR=$(subst .,/,$(PACKAGE_NAME))
TARGET=../bin/lib$(IF_NAME)_wrap.$(LIB_SUF)
JAVA_EXE=cd ../bin && LD_LIBRARY_PATH=./:$(LD_LIBRARY_PATH) java -classpath ../java
all: $(TARGET)
$(IF_NAME)_wrap.cxx: $(IF_NAME).i $(IF_NAME).hpp
$(MKDIR) $(PACKAGE_DIR)
swig -java -package $(PACKAGE_NAME) -outdir $(PACKAGE_DIR) -c++ -Wall $<
$(TARGET): $(IF_NAME)_wrap.cxx
$(MKDIR) ../bin
$(PRE)$(CXX) $? -o $@ $(CFLAGS) $(LDFLAGS) -I../include ../src/zm2.cpp ../src/zm.cpp -I../../xbyak
%.class: %.java
javac $<
$(JAVA_NAME)Test.class: $(JAVA_NAME)Test.java $(TARGET)
jar:
jar cvf $(MODULE_NAME).jar mcl
test: $(JAVA_NAME)Test.class $(TARGET)
$(JAVA_EXE) $(JAVA_NAME)Test
clean:
rm -rf *.class $(TARGET) $(PACKAGE_DIR)/*.class $(IF_NAME)_wrap.cxx

View File

@@ -1,282 +0,0 @@
#pragma once
/**
@file
@brief api for Java
@author herumi
@note modified new BSD license
http://opensource.org/licenses/BSD-3-Clause
*/
#ifndef MIE_ATE_USE_GMP
#define MIE_ATE_USE_GMP
#endif
#include "bn.h"
inline void SystemInit() throw(std::exception)
{
::bn::Param::init();
}
class Fp2;
class Fp12;
class Ec1;
class Ec2;
class Mpz {
mpz_class self_;
friend class Fp;
friend class Fp2;
friend class Fp12;
friend class Ec1;
friend class Ec2;
public:
Mpz() {}
Mpz(const Mpz& x) : self_(x.self_) {}
Mpz(int x) throw(std::exception) : self_(x) {}
Mpz(const std::string& str) throw(std::exception)
{
set(str);
}
void set(int x) throw(std::exception) { self_ = x; }
void set(const std::string& str) throw(std::exception)
{
self_.set_str(str, 0);
}
std::string toString() const throw(std::exception)
{
return self_.get_str();
}
bool equals(const Mpz& rhs) const { return self_ == rhs.self_; }
int compareTo(const Mpz& rhs) const { return mpz_cmp(self_.get_mpz_t(), rhs.self_.get_mpz_t()); }
void add(const Mpz& rhs) throw(std::exception) { self_ += rhs.self_; }
void sub(const Mpz& rhs) throw(std::exception) { self_ -= rhs.self_; }
void mul(const Mpz& rhs) throw(std::exception) { self_ *= rhs.self_; }
void mod(const Mpz& rhs) throw(std::exception) { self_ %= rhs.self_; }
};
class Fp {
::bn::Fp self_;
friend class Fp2;
friend class Ec1;
public:
Fp() {}
Fp(const Fp& x) : self_(x.self_) {}
Fp(int x) : self_(x) {}
Fp(const std::string& str) throw(std::exception)
{
self_.set(str);
}
void set(int x) { self_ = x; }
void set(const std::string& str) throw(std::exception)
{
self_.set(str);
}
std::string toString() const throw(std::exception)
{
return self_.toString();
}
bool equals(const Fp& rhs) const { return self_ == rhs.self_; }
void add(const Fp& rhs) throw(std::exception) { self_ += rhs.self_; }
void sub(const Fp& rhs) throw(std::exception) { self_ -= rhs.self_; }
void mul(const Fp& rhs) throw(std::exception) { self_ *= rhs.self_; }
void power(const Mpz& x)
{
self_ = mie::power(self_, x.self_);
}
};
class Fp2 {
::bn::Fp2 self_;
friend class Ec2;
public:
Fp2() {}
Fp2(const Fp2& x) : self_(x.self_) {}
Fp2(int a) : self_(a) {}
Fp2(int a, int b) : self_(a, b) {}
Fp2(const Fp& a, const Fp& b) throw(std::exception)
: self_(a.self_, b.self_)
{
}
Fp2(const std::string& a, const std::string& b) throw(std::exception)
: self_(Fp(a).self_, Fp(b).self_)
{
}
Fp& getA() { return *reinterpret_cast<Fp*>(&self_.a_); }
Fp& getB() { return *reinterpret_cast<Fp*>(&self_.b_); }
void set(const std::string& str) throw(std::exception)
{
self_.set(str);
}
std::string toString() const throw(std::exception)
{
return self_.toString();
}
bool equals(const Fp2& rhs) const { return self_ == rhs.self_; }
void add(const Fp2& rhs) throw(std::exception) { self_ += rhs.self_; }
void sub(const Fp2& rhs) throw(std::exception) { self_ -= rhs.self_; }
void mul(const Fp2& rhs) throw(std::exception) { self_ *= rhs.self_; }
void power(const Mpz& x)
{
self_ = mie::power(self_, x.self_);
}
};
class Fp12 {
::bn::Fp12 self_;
public:
Fp12() {}
Fp12(const Fp12& x) : self_(x.self_) {}
Fp12(int x) : self_(x) {}
void set(const std::string& str) throw(std::exception)
{
std::istringstream iss(str);
iss >> self_;
}
std::string toString() const throw(std::exception)
{
std::ostringstream oss;
oss << self_;
return oss.str();
}
bool equals(const Fp12& rhs) const { return self_ == rhs.self_; }
void add(const Fp12& rhs) throw(std::exception) { self_ += rhs.self_; }
void sub(const Fp12& rhs) throw(std::exception) { self_ -= rhs.self_; }
void mul(const Fp12& rhs) throw(std::exception) { self_ *= rhs.self_; }
void pairing(const Ec2& ec2, const Ec1& ec1);
void power(const Mpz& x)
{
self_ = mie::power(self_, x.self_);
}
};
class Ec1 {
::bn::Ec1 self_;
friend class Fp12;
public:
Ec1() { self_.clear(); }
Ec1(const Ec1& x) : self_(x.self_) {}
Ec1(const Fp& x, const Fp& y) throw(std::exception)
{
set(x, y);
}
Ec1(const Fp& x, const Fp& y, const Fp& z) throw(std::exception)
{
set(x, y, z);
}
bool isValid() const { return self_.isValid(); }
void set(const Fp& x, const Fp& y) throw(std::exception)
{
self_.set(x.self_, y.self_);
}
void set(const Fp& x, const Fp& y, const Fp& z) throw(std::exception)
{
self_.set(x.self_, y.self_, z.self_);
}
void set(const std::string& str) throw(std::exception)
{
std::istringstream iss(str);
iss >> self_;
}
std::string toString() const throw(std::exception)
{
std::ostringstream oss;
oss << self_;
return oss.str();
}
bool equals(const Ec1& rhs) const { return self_ == rhs.self_; }
bool isZero() const { return self_.isZero(); }
void clear() { self_.clear(); }
void dbl() { ::bn::Ec1::dbl(self_, self_); }
void neg() { ::bn::Ec1::neg(self_, self_); }
void add(const Ec1& rhs) { ::bn::Ec1::add(self_, self_, rhs.self_); }
void sub(const Ec1& rhs) { ::bn::Ec1::sub(self_, self_, rhs.self_); }
void mul(const Mpz& rhs) { ::bn::Ec1::mul(self_, self_, rhs.self_); }
Fp& getX() { return *reinterpret_cast<Fp*>(&self_.p[0]); }
Fp& getY() { return *reinterpret_cast<Fp*>(&self_.p[1]); }
Fp& getZ() { return *reinterpret_cast<Fp*>(&self_.p[2]); }
};
class Ec2 {
::bn::Ec2 self_;
friend class Fp12;
public:
Ec2() {}
Ec2(const Ec2& x) : self_(x.self_) {}
Ec2(const Fp2& x, const Fp2& y) throw(std::exception)
{
set(x, y);
}
Ec2(const Fp2& x, const Fp2& y, const Fp2& z) throw(std::exception)
{
set(x, y, z);
}
bool isValid() const { return self_.isValid(); }
void set(const Fp2& x, const Fp2& y) throw(std::exception)
{
self_.set(x.self_, y.self_);
}
void set(const Fp2& x, const Fp2& y, const Fp2& z) throw(std::exception)
{
self_.set(x.self_, y.self_, z.self_);
}
void set(const std::string& str) throw(std::exception)
{
std::istringstream iss(str);
iss >> self_;
}
std::string toString() const throw(std::exception)
{
std::ostringstream oss;
oss << self_;
return oss.str();
}
bool equals(const Ec2& rhs) const { return self_ == rhs.self_; }
bool isZero() const { return self_.isZero(); }
void clear() { self_.clear(); }
void dbl() { ::bn::Ec2::dbl(self_, self_); }
void neg() { ::bn::Ec2::neg(self_, self_); }
void add(const Ec2& rhs) { ::bn::Ec2::add(self_, self_, rhs.self_); }
void sub(const Ec2& rhs) { ::bn::Ec2::sub(self_, self_, rhs.self_); }
void mul(const Mpz& rhs) { ::bn::Ec2::mul(self_, self_, rhs.self_); }
Fp2& getX() { return *reinterpret_cast<Fp2*>(&self_.p[0]); }
Fp2& getY() { return *reinterpret_cast<Fp2*>(&self_.p[1]); }
Fp2& getZ() { return *reinterpret_cast<Fp2*>(&self_.p[2]); }
};
void Fp12::pairing(const Ec2& ec2, const Ec1& ec1)
{
::bn::opt_atePairing(self_, ec2.self_, ec1.self_);
}
inline const Mpz& GetParamR()
{
static Mpz r("16798108731015832284940804142231733909759579603404752749028378864165570215949");
return r;
}
#ifdef _MSC_VER
#if _MSC_VER == 1900
#ifdef _DEBUG
#pragma comment(lib, "14/mpird.lib")
#pragma comment(lib, "14/mpirxxd.lib")
#else
#pragma comment(lib, "14/mpir.lib")
#pragma comment(lib, "14/mpirxx.lib")
#endif
#elif _MSC_VER == 1800
#ifdef _DEBUG
#pragma comment(lib, "12/mpird.lib")
#pragma comment(lib, "12/mpirxxd.lib")
#else
#pragma comment(lib, "12/mpir.lib")
#pragma comment(lib, "12/mpirxx.lib")
#endif
#else
#ifdef _DEBUG
#pragma comment(lib, "mpird.lib")
#pragma comment(lib, "mpirxxd.lib")
#else
#pragma comment(lib, "mpir.lib")
#pragma comment(lib, "mpirxx.lib")
#endif
#endif
#endif

View File

@@ -1,59 +0,0 @@
%module BN254
%include "std_string.i"
%include "std_except.i"
%{
#include "bn254_if.hpp"
%}
%typemap(javacode) Mpz %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%typemap(javacode) Fp %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%typemap(javacode) Fp2 %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%typemap(javacode) Fp12 %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%typemap(javacode) Ec1 %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%typemap(javacode) Ec2 %{
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof $javaclassname)) return false;
return equals(($javaclassname)obj);
}
%}
%include "bn254_if.hpp"

View File

@@ -1,126 +0,0 @@
# Java class files(under construction)
## Build
* Install [swig](http://www.swig.org/) and Java.
### Windows
* set SWIG to the path to swig in make_wrap.bat
* set JAVA_DIR to the path to java in set-java-path.bat.
* Use the follogin commands:
```
> cd java
> make_wrap.bat
```
* bin/bn254_if_wrap.dll is a dll for java.
### Linux
* set JAVA_INC to the path to Java in Makefile
> make test
* bin/libbn254_if_wrap.so is a shared library for java.
## API and Class
### Setup
* At first, call these functions.
```
> System.loadLibrary("bn254_if_wrap");
> BN254.SystemInit();
```
### class Mpz
* a wrapped class of mpz_class of GMP.
* Mpz(int), Mpz(String)
* void set(int x)
* Set x to this.
* void set(String x)
* Set x to this.
* void add(Mpz x)
* Set (this + x) to this.
* void sub(Mpz x)
* Set (this - x) to this.
* void mul(Mpz x)
* Set (this * x) to this.
* void mod(Mpz x)
* Set (this % x) to this.
### class Fp, Fp2, Fp12
* a wrapped class of bn::Fp, bn::Fp2, bn::Fp12.
#### common method
* Fp(int), Fp(String)
* void set(int x)
* Set x to this.
* void set(String x)
* Set x to this.
* The format of Fp is "123", "0xabc"
* The format of Fp2 is "[a,b]" ; a, b are the format of Fp
* The format of Fp12 is "[[[a0,b0],[a1,b1],[a2,b2]], [[a3,b3],[a4,b4],[a5,b5]]]"
* void add(Fp x)
* Set (this + x) to this.
* void sub(Fp x)
* Set (this - x) to this.
* void mul(Fp x)
* Set (this * x) to this.
* void power(Mpz x)
* Set (this ^ x) to this.
#### Fp2
* Fp2(int a, int b)
* Set (a, b) to this.
* Fp2(Fp a, Fp b)
* Set (a, b) to this.
* Fp2(String a, String b)
* Set (a, b) to this.
* Fp getA()
* Return the reference to a where this = (a, b).
* Fp getB()
* Return the reference to b where this = (a, b).
#### Fp12
* pairing(Ec2 ec2, Ec1 ec1)
* Set opt_ate_pairing(ec2, ec1) to this.
### Ec1, Ec2
* a wrapped class of bn::Ec1 and bn::Ec2.
* Ec1(Fp x, Fp y)
* Set (x, y, 1) to this.
* Ec1(Fp x, Fp y, Fp z)
* Set (x:y:z) to this.
* Ec1(String x)
* Set x to this.
* The format of Ec1 is "x_y" or "0" ; x, y are the format of Fp. "0" is the infinity point.
* The format of Ec2 is "x_y" or "0" ; x, y are the format of Fp2.
* Boolean isValid()
* Is (x:y:z) on the curve?
* Boolean isZero()
* Is this equal to the infinity point?
* void clear()
* set this to the infinity point.
* dbl()
* set (this * 2) to this.
* neg()
* Set (-this) to this.
* add(Ec1 x)
* Set (this + x) to this.
* sub(Ec1 x)
* Set (this - x) to this.
* mul(Mpz& x)
* Set (this * x) to this.
* Fp getX()
* Return the value of x.
* Fp getY()
* Return the value of y.
* Fp getZ()
* Return the value of z.

View File

@@ -1,19 +0,0 @@
@echo off
call set-java-path.bat
set JAVA_INCLUDE=%JAVA_DIR%\include
set SWIG=..\..\swig\swigwin-3.0.2\swig.exe
set PACKAGE_NAME=mcl.bn254
set PACKAGE_DIR=%PACKAGE_NAME:.=\%
echo [[run swig]]
mkdir %PACKAGE_DIR%
echo %SWIG% -java -package %PACKAGE_NAME% -outdir %PACKAGE_DIR% -c++ -Wall bn254_if.i
%SWIG% -java -package %PACKAGE_NAME% -outdir %PACKAGE_DIR% -c++ -Wall bn254_if.i
echo [[make dll]]
mkdir ..\bin
cl /MD /DNOMINMAX /DNDEBUG /LD /Ox /EHsc bn254_if_wrap.cxx ../src/zm.cpp ../src/zm2.cpp -I%JAVA_INCLUDE% -I%JAVA_INCLUDE%\win32 -I../include -I../../cybozulib_ext/mpir/include -I../../xbyak /link /LIBPATH:../../cybozulib_ext/mpir/lib /OUT:../bin/bn254_if_wrap.dll
call run-bn254.bat
echo [[make jar]]
%JAVA_DIR%\bin\jar cvf bn254.jar mcl

View File

@@ -1,8 +0,0 @@
@echo off
echo [[compile BN254Test.java]]
%JAVA_DIR%\bin\javac BN254Test.java
echo [[run BN254Test]]
pushd ..\bin
%JAVA_DIR%\bin\java -classpath ..\java BN254Test %1 %2 %3 %4 %5 %6
popd

View File

@@ -1,8 +0,0 @@
@echo off
if "%JAVA_HOME%"=="" (
set JAVA_DIR=c:/p/Java/jdk
) else (
set JAVA_DIR=%JAVA_HOME%
)
echo JAVA_DIR=%JAVA_DIR%
rem set PATH=%PATH%;%JAVA_DIR%\bin

View File

@@ -1,71 +0,0 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}</ProjectGuid>
<RootNamespace>sample</RootNamespace>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)debug.props" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)release.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<TargetName>sample</TargetName>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<TargetName>sample</TargetName>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<Optimization>Disabled</Optimization>
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<Link>
<AdditionalDependencies>kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);zmlib.lib</AdditionalDependencies>
<SubSystem>Console</SubSystem>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="$(SolutionDir)test\sample.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,71 +0,0 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{F4B8F9CD-2E24-4374-9BF6-6886347E861B}</ProjectGuid>
<RootNamespace>test_bn</RootNamespace>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)debug.props" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)release.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<TargetName>test_bn</TargetName>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<TargetName>test_bn</TargetName>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<Optimization>Disabled</Optimization>
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<Link>
<AdditionalDependencies>kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);zmlib.lib</AdditionalDependencies>
<SubSystem>Console</SubSystem>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="$(SolutionDir)test\bn.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,80 +0,0 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{2F9B80B9-D6A5-4534-94A3-7B42F5623193}</ProjectGuid>
<RootNamespace>zmlib</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v110</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)debug.props" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(SolutionDir)common.props" />
<Import Project="$(SolutionDir)release.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<OutDir>$(SolutionDir)lib\</OutDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<OutDir>$(SolutionDir)lib\</OutDir>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<Optimization>Disabled</Optimization>
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<SDLCheck>true</SDLCheck>
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="$(SolutionDir)src\zm.cpp" />
<ClCompile Include="$(SolutionDir)src\zm2.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,245 +0,0 @@
High-Speed Software Implementation of the Optimal Ate Pairing over Barreto-Naehrig Curves
=============
This library provides functionality to compute the optimal ate pairing over Barreto-Naehrig (BN) curves.
It is released under the [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause).
Now I'm developing a new pairing library [mcl](https://github.com/herumi/mcl/), which is more portable and supports larger primes than this library though it is a little slower.
History
-------------
* 2015/May/15: add [java api](java/java.md)
* 2014/Jun/15: support a BN curve for SNARKs, incorporating code from [libsnark](https://github.com/scipr-lab/libsnark)
* 2013/Jun/02: support `mulx` on [Haswell](http://en.wikipedia.org/wiki/Haswell_%28microarchitecture%29)
* 2013/Mar/08: add elliptic curve class
* 2012/Jan/30: rewrite ate pairing according to [Faster explicit formulas for computing pairings over ordinary curves](http://www.patricklonga.bravehost.com/speed_pairing.html)
* 2010/Sep/8: change twist xi from u + 12 to u
* 2010/Jul/15: use cyclotomic squaring for final exponentiation
* 2010/Jun/18: first release
Overview
-------------
The following two BN curves are supported:
1. a BN curve over the 254-bit prime p = 36z^4 + 36z^3 + 24z^2 + 6z + 1 where z = -(2^62 + 2^55 + 1); and
2. a BN curve over a 254-bit prime p such that n := p + 1 - t has high 2-adicity.
By default, the first curve (we call it as CurveFp254BNb) is used; when setting the flag `SUPPORT_SNARK`, the second curve (we call it as CurveSNARK) is used instead.
* __CurveFp254BNb__
The value of z is found by [\[NASKM\]](http://dx.doi.org/10.1007/978-3-540-85538-5_13) first.
The curve instantiated by z is investigated by [\[PSNB\]](http://eprint.iacr.org/2010/429) for an efficient implementation.
Our library implements a fast algorithm, which is proposed by [\[AKLGL\]](http://eprint.iacr.org/2010/526) for this curve.
The performance of this library is competitive to the state-of-the-art implementation report in [\[ABLR\]](http://sac2013.irmacs.sfu.ca/slides/s1.pdf).
* __CurveSNARK__
Support for the second curve builds on code provided by [SCIPR Lab](http://www.scipr-lab.org/) in [libsnark](https://github.com/scipr-lab/libsnark). The curve was specifically selected for speeding up __Succinct Non-interactive ARguments of Knowledge__ (SNARKs), which benefit from its high 2-adicity (see [\[BCGTV13\]](http://eprint.iacr.org/2013/507) and [\[BCTV14\]](http://eprint.iacr.org/2013/879)).
Pairing computations on the first curve are more efficient, and the performance numbers reported below (and in our papers) are achieved using this curve (which is prefered for applications that do not benefit from high 2-adicity).
Note that the old parameters in \[BDMOHT\] are not used now.
Parameters
-------------
The curve equation for a BN curve is:
E/Fp: y^2 = x^3 + b .
The two supported BN curves have the following parameters:
1. b = 2 and p = 16798108731015832284940804142231733909889187121439069848933715426072753864723; and
2. b = 3 and p = 21888242871839275222246405745257275088696311157297823662689037894645226208583.
As usual,
* the cyclic group G1 (aka Ec1) is instantiated as E(Fp)[n] where n := p + 1 - t;
* the cyclic group G2 (aka Ec2) is instantiated as the inverse image of E'(Fp^2)[n] under a twisting isomorphism from E' to E; and
* the pairing e: G1 x G2 -> Fp12 is the optimal ate pairing.
The field Fp12 is constructed via the following tower:
* Fp2 = Fp[u] / (u^2 + 1)
* Fp6 = Fp2[v] / (v^3 - Xi) where Xi = u + 1
* Fp12 = Fp6[w] / (w^2 - v)
Requirements
-------------
* OS: 64-bit Windows; 64-bit Linux; Mac OS X
* CPU: x64 Intel; AMD processor
* C++ compiler: Visual Studio 2012; gcc 4.4.1 or later
Build instructions
-------------
### Windows
> git clone git://github.com/herumi/xbyak.git
> git clone git://github.com/herumi/ate-pairing.git
> git clone git://github.com/herumi/cybozulib-ext.git ; compiled binary of mpir
Open `ate/ate.sln` and compile `test_bn` with Release mode. The produced binary is `ate/x64/Release/test_bn.exe`.
### Cygwin
Install `mingw64-x86_64-gcc-g++` (run Cygwin setup and search `mingw64`). Then use the following commands:
PATH=/usr/x86_64-w64-mingw32/sys-root/mingw/bin/:$PATH
make -j
test/bn.exe
Note that `test/bn.exe` uses `mulx` if possible; if you do not want to use it, run the executable as `test/bn.exe -mulx 0`. (This allows you to verify the difference with/without mulx on Haswell.)
### Linux
Use the following commands:
$ git clone git://github.com/herumi/xbyak.git
$ git clone git://github.com/herumi/ate-pairing.git
$ cd ate-pairing
$ make -j
$ test/bn
The library [xbyak](https://github.com/herumi/xbyak) is a x86/x86-64 JIT assembler for C++, developed for efficient pairing implementations. (See also [this webpage](http://homepage1.nifty.com/herumi/soft/xbyak_e.html).) Note that binaries other than `test/bn` are used for testing purposes only.
* This implementation uses dynamically-generated code, so you will get the error
`zmInit ERR:can't protect` if execution of code on the heap is disallowed by
some modern systems.
For example, on Fedora 20, run `sudo setsebool -P allow_execheap 1` to allow execution to solve this.
By the default, the first BN curve is used. If instead you want to use the second BN curve (specialized to SNARKs), modify the fourth line above to:
$ make -j SUPPORT_SNARK=1
* REMARK. You *defined* `BN_SUPPORT_SNARK` macro for a compile when if you use a library(libzm.a) made by `SUPPORT_SNARK=1`.
Usage
-------------
See the function `sample2()` in [sample.cpp](https://github.com/herumi/ate-pairing/blob/master/test/sample.cpp). Also, use can use `mpz_class` for scalar multiplication of points on the elliptic curves,
if `MIE_ATE_USE_GMP` is defined. For instance:
using namespace bn;
Param::init();
const Ec2 g2(...);
const Ec1 g1(...);
mpz_class a("123456789");
mpz_class b("98765432");
Ec1 g1a = g1 * a;
Ec2 g2b = g2 * b;
Fp12 e;
opt_atePairing(e, g2b, g1a);
Usage for Java
-------------
See [java.md](java/java.md).
A sample code is [BN254Test.java](java/BN254Test.java).
Operation costs
-------------
Let mu be the cost of _unreduced multiplication_ producing double-precision result (i.e., 256-bit int x 256-bit int to 512-bit int); and let r be the cost of _modular reduction_ of double-precision integers (i.e., 512-bit int to 256-bit int in Fp). Then, for us,
* Fp::mul = mu + r
* Fp2::mul = 3mu + 2r
* Fp2::square = 2mu + 2r
Next, we compare the costs of our library with the one of [\[AKLGL10\]](http://eprint.iacr.org/2010/526):
Phase | [AKLGL10] | This work
--------------------|---------------|---------------
Miller loop | 6792mu + 3022r| 6785mu + 3022r
Final exponentiation| 3753mu + 2006r| 3526mu + 1932r
Optimal ate pairing |10545mu + 5028r|10311mu + 4954r
Note: [\[Table 2 in p. 17, AKLGL10\]](http://eprint.iacr.org/2010/526) does not contain the cost of (m, r) so we have added the costs of (282m + 6mu + 4r) and (30m + 75mu + 50r) to ML and FE respectively.
Finally, at the moment, our implementation does not support the algorithm in [PSNB10](https://eprint.iacr.org/2010/429).
Benchmark
-------------
The cost of a pairing is __1.17M__ clock cycles on Core i7 4700MQ (Haswell) 2.4GHz processor with TurboBoost disabled. Below, we also include clock cycle counts on Core i7 2600 3.4GHz, Xeon X5650 2.6GHz, and Core i7 4700MQ 2.4GHz.
The formal benchmark is written in \[ZPMRTH\].
% sudo sh -c "echo 0 > /sys/devices/system/cpu/cpufreq/boost"
% cat /sys/devices/system/cpu/cpufreq/boost
0
operation | i7 2600|Xeon X5650|Haswell|Haswell with mulx
------------|--------|----------|-------|-----------------
TurboBoost |on |on |off |off
| | | |
mu | 50 |60 |42 |38
r | 80 |98 |69 |65
Fp:mul |124 |146 |98 |90
Fp2:mul |360 |412 | |
Fp2:square |288 |335 | |
| | | |
G1::double |1150 |1300 | |
G1::add |2200 |2600 | |
G2::double |2500 |2900 | |
G2::add |5650 |6500 | |
Fp12::square|4500 |5150 | |
Fp12::mul |6150 |7000 | |
| | | |
Miller loop |0.83M |0.97M |0.82M |0.71M
final_exp |0.53M |0.63M |0.51M |0.46M
| | | |
pairing |1.36M |1.60M |1.33M |1.17M
References
-------------
* \[ABLR\] [_The Realm of the Pairings_](http://dx.doi.org/10.1007/978-3-662-43414-7_1) (Invited Talk),
Diego F. Aranha, Paulo S. L. M. Barreto, Patrick Longa, and Jefferson E. Ricardini,
SAC 2013, ([preprint](http://eprint.iacr.org/2013/722), [slide](http://sac2013.irmacs.sfu.ca/slides/s1.pdf))
* \[NASKM\] [_Integer Variable chi-Based Ate Pairing_](http://dx.doi.org/10.1007/978-3-540-85538-5_13), Y. Nogami, M. Akane, Y. Sakemi, H. Kato, and Y. Morikawa,
Pairing 2008
* \[PSNB\] [_A Family of Implementation-Friendly BN Elliptic Curves_](http://dx.doi.org/10.1016/j.jss.2011.03.083),
G.C.C.F. Pereira, M.A. Simplicio Jr, M. Naehrig, P.S.L.M. Barreto, J. Systems and Software 2011, ([preprint](http://eprint.iacr.org/2010/429))
* \[AKLGL\] [_Faster Explicit Formulas for Computing Pairings over Ordinary Curves_](http://dx.doi.org/10.1007/978-3-642-20465-4_5),
D.F. Aranha, K. Karabina, P. Longa, C.H. Gebotys, J. Lopez,
EUROCRYPTO 2011, ([preprint](http://eprint.iacr.org/2010/526))
* \[BDMOHT\] [_High-Speed Software Implementation of the Optimal Ate Pairing over Barreto-Naehrig Curves_](http://dx.doi.org/10.1007/978-3-642-17455-1_2),
Jean-Luc Beuchat, Jorge Enrique González Díaz, Shigeo Mitsunari, Eiji Okamoto, Francisco Rodríguez-Henríquez, Tadanori Teruya,
Pairing 2010, ([preprint](http://eprint.iacr.org/2010/354))
* [_A Fast Implementation of the Optimal Ate Pairing over BN curve on Intel Haswell Processor_](http://eprint.iacr.org/2013/362),
Shigeo Mitsunari,
IACR ePrint 2013/362
* [_Succinct Non-Interactive Zero Knowledge for a von Neumann Architecture_](http://eprint.iacr.org/2013/879),
Eli Ben-Sasson, Alessandro Chiesa, Eran Tromer, Madars Virza,
USENIX Security 2014
* \[ZPMRTH\] [_Software implementation of an Attribute-Based Encryption scheme_](http://dx.doi.org/10.1109/TC.2014.2329681),
Eric Zavattoni and Luis J. Dominguez Perez and Shigeo Mitsunari and Ana H. Sanchez-Ramirez and Tadanori Teruya and Francisco Rodriguez-Henriquez,
IEEE Transactions on Computers, To appear, ([preprint](https://eprint.iacr.org/2014/401), [project Web page and source code](http://sandia.cs.cinvestav.mx/index.php?n=Site.CPABE))
* [This library's old webpage](http://homepage1.nifty.com/herumi/crypt/ate-pairing.html)
Authors
-------------
* MITSUNARI Shigeo (`herumi@nifty.com`)
* TERUYA Tadanori (`tadanori.teruya@gmail.com`)
Contributors
-------------
* Alessandro Chiesa (`alexch@mit.edu`)
* Madars Virza (`madars@mit.edu`)

View File

@@ -1,29 +0,0 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup>
<ClCompile>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<Optimization>MaxSpeed</Optimization>
<InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
<IntrinsicFunctions>true</IntrinsicFunctions>
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
<OmitFramePointers>true</OmitFramePointers>
<BufferSecurityCheck>false</BufferSecurityCheck>
<FunctionLevelLinking>true</FunctionLevelLinking>
<EnableEnhancedInstructionSet>AdvancedVectorExtensions</EnableEnhancedInstructionSet>
<FloatingPointModel>Fast</FloatingPointModel>
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemGroup />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LocalDebuggerWorkingDirectory>$(SolutionDir)bin</LocalDebuggerWorkingDirectory>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
</Project>

View File

@@ -1,24 +0,0 @@
<?xml version="1.0" encoding="shift_jis"?>
<VisualStudioPropertySheet
ProjectType="Visual C++"
Version="8.00"
Name="release"
>
<Tool
Name="VCCLCompilerTool"
Optimization="3"
InlineFunctionExpansion="2"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
PreprocessorDefinitions="NDEBUG;_SECURE_SCL=0"
BufferSecurityCheck="false"
FloatingPointModel="2"
BrowseInformation="1"
/>
<Tool
Name="VCLinkerTool"
GenerateDebugInformation="true"
LinkTimeCodeGeneration="1"
/>
</VisualStudioPropertySheet>

View File

@@ -1 +0,0 @@
*.o

View File

@@ -1,24 +0,0 @@
include ../common.mk
ifeq ($(UNAME_S),Darwin)
LIB_SUF=dylib
else
LIB_SUF=so
endif
TARGET=../lib/libzm.a
all:$(TARGET)
OBJ=zm2.o zm.o
clean:
$(RM) *.o $(TARGET)
../lib/libzm.a: $(OBJ)
$(MKDIR) ../lib
-$(RM) $@
$(AR) $@ $(OBJ)
$(CXX) -fPIC -shared -o ../lib/zm.$(LIB_SUF) $(OBJ)
zm.o: zm.cpp ../include/zm.h
zm2.o: zm2.cpp ../include/zm2.h ../include/bn.h

View File

@@ -1,570 +0,0 @@
#include "zm.h"
#include <cstdio>
using namespace mie;
#ifdef MIE_USE_X64ASM
#define XBYAK_NO_OP_NAMES
#include "xbyak/xbyak.h"
using namespace Xbyak;
#endif
/**
out[] = x[] + y[]
@note the sizeof out >= n
@return size of x[] + y[]
*/
static inline bool in_addN(Unit *out, const Unit *x, const Unit *y, size_t n)
{
assert(n > 0);
Unit c = 0;
for (size_t i = 0; i < n; i++) {
Unit xc = x[i] + c;
if (xc < c) {
// x[i] = Unit(-1) and c = 1
out[i] = y[i];
} else {
xc += y[i];
c = y[i] > xc ? 1 : 0;
out[i] = xc;
}
}
return c != 0;
}
/**
out[] = x[] + y
*/
static inline bool in_add(Unit *out, const Unit *x, size_t n, Unit y)
{
assert(n > 0);
Unit xc = x[0] + y;
Unit c = y > xc ? 1 : 0;
out[0] = xc;
for (size_t i = 1; i < n; i++) {
Unit xc = x[i] + c;
if (xc < c) {
out[i] = 0;
} else {
out[i] = xc;
c = 0;
}
}
return c != 0;
}
/**
out[] = x[] - y[]
*/
static inline bool in_subN(Unit *out, const Unit *x, const Unit *y, size_t n)
{
assert(n > 0);
Unit c = 0;
for (size_t i = 0; i < n; i++) {
Unit yc = y[i] + c;
if (yc < c) {
// y[i] = Unit(-1) and c = 1
out[i] = x[i];
} else {
c = x[i] < yc ? 1 : 0;
out[i] = x[i] - yc;
}
}
return c != 0;
}
/**
out[] = x[] - y
*/
static inline bool in_sub(Unit *out, const Unit *x, size_t n, Unit y)
{
assert(n > 0);
Unit c = x[0] < y ? 1 : 0;
out[0] = x[0] - y;
for (size_t i = 1; i < n; i++) {
if (x[i] < c) {
out[i] = Unit(-1);
} else {
out[i] = x[i] - c;
c = 0;
}
}
return c != 0;
}
/*
[H:L] <= a * b
@return L
*/
static inline Unit mulUnit(Unit *H, Unit a, Unit b)
{
#ifdef MIE_USE_UNIT32
#if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
uint64_t t = __emulu(a, b);
#else
uint64_t t = uint64_t(a) * b;
#endif
uint32_t L;
split64(H, &L, t);
return L;
#else
#if defined(_WIN64) && !defined(__INTEL_COMPILER)
return _umul128(a, b, H);
#else
typedef __attribute__((mode(TI))) unsigned int uint128;
uint128 t = uint128(a) * b;
*H = uint64_t(t >> 64);
return uint64_t(t);
#endif
#endif
}
/*
out[0..n + 1] = x[0..n] * y
@note accept out == x
*/
static inline void in_mul(Unit *out, const Unit *x, size_t n, Unit y)
{
assert(n > 0);
Unit H = 0;
for (size_t i = 0; i < n; i++) {
Unit t = H;
Unit L = mulUnit(&H, x[i], y);
out[i] = t + L;
if (out[i] < t) {
H++;
}
}
out[n] = H;
}
/*
q = [H:L] / y
r = [H:L] % y
return q
*/
static inline Unit divUnit(Unit *r, Unit H, Unit L, Unit y)
{
#ifdef MIE_USE_UNIT32
uint64_t t = make64(H, L);
uint32_t q = uint32_t(t / y);
*r = Unit(t % y);
return q;
#elif defined(_MSC_VER)
#pragma
fprintf(stderr, "not implemented divUnit\n");
exit(1);
#else
typedef __attribute__((mode(TI))) unsigned int uint128;
uint128 t = (uint128(H) << 64) | L;
uint64_t q = uint64_t(t / y);
*r = Unit(t % y);
return q;
#endif
}
/*
q = x[] / y
@retval r = x[] % y
@note accept q == x
*/
static inline Unit in_div(Unit *q, const Unit *x, size_t xn, Unit y)
{
Unit r = 0;
for (int i = (int)xn - 1; i >= 0; i--) {
q[i] = divUnit(&r, r, x[i], y);
}
return r;
}
static inline Unit in_mod(const Unit *x, size_t xn, Unit y)
{
Unit r = 0;
for (int i = (int)xn - 1; i >= 0; i--) {
divUnit(&r, r, x[i], y);
}
return r;
}
bool (*mie::local::PrimitiveFunction::addN)(Unit *out, const Unit *x, const Unit *y, size_t n) = &in_addN;
bool (*mie::local::PrimitiveFunction::add1)(Unit *out, const Unit *x, size_t n, Unit y) = &in_add;
bool (*mie::local::PrimitiveFunction::subN)(Unit *out, const Unit *x, const Unit *y, size_t n) = &in_subN;
bool (*mie::local::PrimitiveFunction::sub1)(Unit *out, const Unit *x, size_t n, Unit y) = &in_sub;
void (*mie::local::PrimitiveFunction::mul1)(Unit *out, const Unit *x, size_t n, Unit y) = &in_mul;
Unit (*mie::local::PrimitiveFunction::div1)(Unit *q, const Unit *x, size_t n, Unit y) = &in_div;
Unit (*mie::local::PrimitiveFunction::mod1)(const Unit *x, size_t n, Unit y) = &in_mod;
#ifdef MIE_USE_X64ASM
class Code : public Xbyak::CodeGenerator {
void genAddSub(bool isAdd)
{
using namespace Xbyak;
inLocalLabel();
const Reg64& a = rax;
#ifdef XBYAK64_WIN
const Reg64& out = rcx;
const Reg64& x = rdx;
const Reg64& y = r8;
const Reg64& n = r9;
const Reg64& t0 = r10;
const Reg64& t1 = r11;
const Reg64& t2 = rsi;
#else
const Reg64& out = rdi;
const Reg64& x = rsi;
const Reg64& y = rdx;
const Reg64& n = rcx;
const Reg64& t0 = r8;
const Reg64& t1 = r9;
const Reg64& t2 = r10;
#endif
cmp(n, 4);
jge(".main", T_NEAR);
cmp(n, 1);
jne("@f");
// n == 1
mov(a, ptr [x]);
if (isAdd) {
add(a, ptr [y]);
} else {
sub(a, ptr [y]);
}
mov(ptr [out], a);
mov(a, 0);
setc(al);
ret();
L("@@");
cmp(n, 2);
jne("@f");
// n == 2
mov(a , ptr [x + 8 * 0]);
mov(t0, ptr [x + 8 * 1]);
if (isAdd) {
add(a , ptr [y + 8 * 0]);
adc(t0, ptr [y + 8 * 1]);
} else {
sub(a , ptr [y + 8 * 0]);
sbb(t0, ptr [y + 8 * 1]);
}
mov(ptr [out + 8 * 0], a);
mov(ptr [out + 8 * 1], t0);
mov(a, 0);
setc(al);
ret();
L("@@");
// n == 3
mov(a , ptr [x + 8 * 0]);
mov(t0, ptr [x + 8 * 1]);
mov(t1, ptr [x + 8 * 2]);
if (isAdd) {
add(a , ptr [y + 8 * 0]);
adc(t0, ptr [y + 8 * 1]);
adc(t1, ptr [y + 8 * 2]);
} else {
sub(a , ptr [y + 8 * 0]);
sbb(t0, ptr [y + 8 * 1]);
sbb(t1, ptr [y + 8 * 2]);
}
mov(ptr [out + 8 * 0], a);
mov(ptr [out + 8 * 1], t0);
mov(ptr [out + 8 * 2], t1);
mov(a, 0);
setc(al);
ret();
L(".main"); // n >= 4
#ifdef XBYAK64_WIN
mov(ptr [rsp + 8 * 1], t2);
#endif
mov(a, n);
shr(n, 2);
and_(a, 3);
jz(".lp");
cmp(a, 1);
jne("@f");
// 4x + 1
mov(a, ptr [x + 8 * 0]);
if (isAdd) {
add(a, ptr [y + 8 * 0]);
} else {
sub(a, ptr [y + 8 * 0]);
}
mov(ptr [out + 8 * 0], a);
lea(x, ptr [x + 8]);
lea(y, ptr [y + 8]);
lea(out, ptr [out + 8]);
jmp(".lp");
L("@@");
cmp(a, 2);
jne("@f");
// 4x + 2
mov(a , ptr [x + 8 * 0]);
mov(t0, ptr [x + 8 * 1]);
if (isAdd) {
add(a , ptr [y + 8 * 0]);
adc(t0, ptr [y + 8 * 1]);
} else {
sub(a , ptr [y + 8 * 0]);
sbb(t0, ptr [y + 8 * 1]);
}
mov(ptr [out + 8 * 0], a);
mov(ptr [out + 8 * 1], t0);
lea(x, ptr [x + 8 * 2]);
lea(y, ptr [y + 8 * 2]);
lea(out, ptr [out + 8 * 2]);
jmp(".lp");
L("@@");
// 4x + 3
mov(a , ptr [x + 8 * 0]);
mov(t0, ptr [x + 8 * 1]);
mov(t1, ptr [x + 8 * 2]);
if (isAdd) {
add(a , ptr [y + 8 * 0]);
adc(t0, ptr [y + 8 * 1]);
adc(t1, ptr [y + 8 * 2]);
} else {
sub(a , ptr [y + 8 * 0]);
sbb(t0, ptr [y + 8 * 1]);
sbb(t1, ptr [y + 8 * 2]);
}
mov(ptr [out + 8 * 0], a);
mov(ptr [out + 8 * 1], t0);
mov(ptr [out + 8 * 2], t1);
lea(x, ptr [x + 8 * 3]);
lea(y, ptr [y + 8 * 3]);
lea(out, ptr [out + 8 * 3]);
align(16);
L(".lp");
mov(a , ptr [x + 8 * 0]);
mov(t0, ptr [x + 8 * 1]);
mov(t1, ptr [x + 8 * 2]);
mov(t2, ptr [x + 8 * 3]);
if (isAdd) {
adc(a , ptr [y + 8 * 0]);
adc(t0, ptr [y + 8 * 1]);
adc(t1, ptr [y + 8 * 2]);
adc(t2, ptr [y + 8 * 3]);
} else {
sbb(a , ptr [y + 8 * 0]);
sbb(t0, ptr [y + 8 * 1]);
sbb(t1, ptr [y + 8 * 2]);
sbb(t2, ptr [y + 8 * 3]);
}
mov(ptr [out + 8 * 0], a);
mov(ptr [out + 8 * 1], t0);
mov(ptr [out + 8 * 2], t1);
mov(ptr [out + 8 * 3], t2);
lea(x, ptr [x + 8 * 4]);
lea(y, ptr [y + 8 * 4]);
lea(out, ptr [out + 8 * 4]);
dec(n);
jnz(".lp");
L(".exit");
mov(a, 0);
setc(al);
#ifdef XBYAK64_WIN
mov(t2, ptr [rsp + 8 * 1]);
#endif
ret();
outLocalLabel();
}
// add1(Unit *out, const Unit *x, size_t n, Unit y);
void genAddSub1(bool isAdd)
{
using namespace Xbyak;
inLocalLabel();
const Reg64& a = rax;
const Reg64& c = rcx;
#ifdef XBYAK64_WIN
mov(r10, c);
mov(c, r8); // n
const Reg64& out = r10;
const Reg64& x = rdx;
const Reg64& y = r9;
const Reg64& t = r11;
#else
mov(r10, c);
mov(c, rdx); // n
const Reg64& out = rdi;
const Reg64& x = rsi;
const Reg64& y = r10;
const Reg64& t = r8;
#endif
lea(out, ptr [out + c * 8]);
lea(x, ptr [x + c * 8]);
xor_(a, a);
neg(c);
mov(t, ptr [x + c * 8]);
if (isAdd) {
add(t, y);
} else {
sub(t, y);
}
mov(ptr [out + c * 8], t);
inc(c);
// faster on Core i3
jz(".exit");
L(".lp");
mov(t, ptr [x + c * 8]);
if (isAdd) {
adc(t, a);
} else {
sbb(t, a);
}
mov(ptr [out + c * 8], t);
inc(c);
jnz(".lp");
L(".exit");
setc(al);
ret();
outLocalLabel();
}
void genMul()
{
using namespace Xbyak;
inLocalLabel();
// void in_mul(Unit *out, const Unit *x, size_t n, Unit y)
const Reg64& a = rax;
const Reg64& d = rdx;
const Reg64& t = r11;
mov(r10, rdx);
#ifdef XBYAK64_WIN
const Reg64& out = rcx;
const Reg64& x = r10; // rdx
const Reg64& n = r8;
const Reg64& y = r9;
#else
const Reg64& out = rdi;
const Reg64& x = rsi;
const Reg64& n = r10; // rdx
const Reg64& y = rcx;
#endif
const int s = (int)sizeof(Unit);
xor_(d, d);
L(".lp");
mov(t, d);
mov(a, ptr [x]);
mul(y); // [d:a] = [x] * y
add(t, a);
adc(d, 0);
mov(ptr [out], t);
add(x, s);
add(out, s);
sub(n, 1);
jnz(".lp");
mov(ptr [out], d);
ret();
outLocalLabel();
}
void genDiv()
{
using namespace Xbyak;
inLocalLabel();
// Unit in_div(Unit *q, const Unit *x, size_t xn, Unit y)
const Reg64& a = rax;
const Reg64& d = rdx;
mov(r10, rdx);
#ifdef XBYAK64_WIN
const Reg64& q = rcx;
const Reg64& x = r10; // rdx
const Reg64& n = r8;
const Reg64& y = r9;
#else
const Reg64& q = rdi;
const Reg64& x = rsi;
const Reg64& n = r10; // rdx
const Reg64& y = rcx;
#endif
const int s = (int)sizeof(Unit);
lea(x, ptr [x + n * s - s]); // x = &x[xn - 1]
lea(q, ptr [q + n * s - s]); // q = &q[xn - 1]
xor_(d, d); // r = 0
L(".lp");
mov(a, ptr [x]);
div(y); // [d:a] / y = a ... d ; q = a, r = d
mov(ptr [q], a);
sub(x, s);
sub(q, s);
sub(n, 1);
jnz(".lp");
mov(a, d);
ret();
outLocalLabel();
}
void genMod()
{
using namespace Xbyak;
inLocalLabel();
// Unit mod1(const Unit *x, size_t n, Unit y);
const Reg64& a = rax;
const Reg64& d = rdx;
mov(r10, rdx);
#ifdef XBYAK64_WIN
const Reg64& x = rcx;
const Reg64& n = r10; // rdx
const Reg64& y = r8;
#else
const Reg64& x = rdi;
const Reg64& n = rsi;
const Reg64& y = r10; // rdx
#endif
const int s = (int)sizeof(Unit);
lea(x, ptr [x + n * s - s]); // x = &x[xn - 1]
xor_(d, d); // r = 0
L(".lp");
mov(a, ptr [x]);
div(y); // [d:a] / y = a ... d ; q = a, r = d
sub(x, s);
sub(n, 1);
jnz(".lp");
mov(a, d);
ret();
outLocalLabel();
}
public:
Code()
{
mie::local::PrimitiveFunction::addN = getCurr<bool (*)(Unit *, const Unit *, const Unit *, size_t)>();
genAddSub(true);
align(16);
mie::local::PrimitiveFunction::add1 = getCurr<bool (*)(Unit *, const Unit *, size_t, Unit)>();
genAddSub1(true);
align(16);
mie::local::PrimitiveFunction::subN = getCurr<bool (*)(Unit *, const Unit *, const Unit *, size_t)>();
genAddSub(false);
align(16);
mie::local::PrimitiveFunction::mul1 = getCurr<void (*)(Unit *, const Unit *, size_t, Unit)>();
genMul();
align(16);
mie::local::PrimitiveFunction::div1 = getCurr<Unit (*)(Unit *, const Unit *, size_t, Unit)>();
genDiv();
align(16);
mie::local::PrimitiveFunction::mod1 = getCurr<Unit (*)(const Unit *, size_t, Unit)>();
genMod();
}
};
#endif
void mie::zmInit()
{
#ifdef MIE_USE_X64ASM
static bool isInit = false;
if (isInit) return;
isInit = true;
try {
static Code code;
} catch (std::exception& e) {
fprintf(stderr, "zmInit ERR:%s\n", e.what());
exit(1);
}
#endif
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +0,0 @@
*.o
bench_test
bn
test_zm

View File

@@ -1,54 +0,0 @@
include ../common.mk
HEADER= ../include/zm.h ../include/bn.h ../include/zm2.h test_point.hpp
TARGET=test_zm bn ../lib/libzm.a
ifeq ($(BIT),-m64)
TARGET += bench_test sample loop_test java_api
endif
all:$(TARGET)
.SUFFIXES: .cpp
test_zm: test_zm.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
bn: bn.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
sample: sample.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
java_api: java_api.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
loop_test: loop_test.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
bench_test: bench.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
../lib/libzm.a: $(HEADER) ../src/zm2.cpp ../src/zm.cpp
$(MAKE) -C ../src ../lib/libzm.a
clean:
$(RM) *.o $(TARGET)
$(MAKE) -C ../src clean
test: $(TARGET)
./test_zm && ./bn
test_zm.o: test_zm.cpp $(HEADER)
bn.o: bn.cpp $(HEADER)
bench.o: bench.cpp $(HEADER)
sample.o: sample.cpp $(HEADER)
java_api.o: java_api.cpp $(HEADER) ../java/bn254_if.hpp
loop_test.o: loop_test.cpp $(HEADER)
minitest.o: minitest.cpp $(HEADER)
minitest: minitest.o ../lib/libzm.a
$(CXX) -o $@ $< $(LDFLAGS) $(BIT)
check: minitest
./minitest

View File

@@ -1,163 +0,0 @@
#include "zm.h"
#ifndef XBYAK_NO_OP_NAMES
#define XBYAK_NO_OP_NAMES
#endif
#include <xbyak/xbyak.h>
#include <xbyak/xbyak_util.h>
#include <stdio.h>
#include <stdlib.h>
#include <memory.h>
#include <iostream>
#define NUM_OF_ARRAY(x) (sizeof(x) / sizeof(*x))
using namespace Xbyak;
const int innerN = 1;
struct Code : CodeGenerator {
void makeBench(int N, int mode)
{
#ifdef XBYAK64_WIN
const Reg64& pz = rcx;
const Reg64& px = rdx;
const Reg64& py = r8;
#else
const Reg64& pz = rdi;
const Reg64& px = rsi;
const Reg64& py = rdx;
#endif
mov(r10, pz);
mov(r9, px);
mov(r8, py);
push(r12);
push(r13);
mov(ecx, N);
L(".lp");
for (int i = 0; i < innerN; i++) {
switch (mode) {
case 0:
mov(r10, ptr [px]);
mov(r11, ptr [px + 8]);
mov(r12, ptr [px + 16]);
mov(r13, ptr [px + 24]);
add(r10, r10);
adc(r8, r11);
adc(r9, r12);
adc(py, r13);
break;
case 1:
add(r10, ptr [px]);
adc(r8, ptr [px + 8]);
adc(r9, ptr [px + 16]);
adc(py, ptr [px + 24]);
break;
}
}
sub(ecx, 1);
jnz(".lp");
xor_(eax, eax);
pop(r13);
pop(r12);
ret();
}
/*
[t4:t3:t2:t1:t0] <- py[3:2:1:0] * x
*/
void makeMul4x1(const Reg64& t4, const Reg64& t3, const Reg64& t2, const Reg64& t1, const Reg64& t0, const Reg64& py, const Reg64& x)
{
const Reg64& a = rax;
const Reg64& d = rdx;
mov(a, ptr [py]);
mul(x);
mov(t0, a);
mov(t1, d);
mov(a, ptr [py + 8]);
mul(x);
xor_(t2, t2);
add(t1, a);
adc(t2, d);
mov(a, ptr [py + 16]);
mul(x);
xor_(t3, t3);
add(t2, a);
adc(t3, d);
mov(a, ptr [py + 24]);
mul(x);
xor_(t4, t4);
add(t3, a);
adc(t4, d);
}
};
mie::Vuint Put(const uint64_t *x, size_t n)
{
mie::Vuint t;
t.set(x, n);
std::cout << t << std::endl;
return t;
}
void bench(int mode)
{
const int N = 100000;
Code code;
code.makeBench(N, mode);
int (*p)(uint64_t*, const uint64_t*, const uint64_t*) = code.getCode<int (*)(uint64_t*, const uint64_t*, const uint64_t*)>();
uint64_t a[4] = { uint64_t(-1), uint64_t(-2), uint64_t(-3), 544443221 };
uint64_t b[4] = { uint64_t(-123), uint64_t(-3), uint64_t(-4), 222222222 };
uint64_t c[5] = { 0, 0, 0, 0, 0 };
const int M = 100;
Xbyak::util::Clock clk;
for (int i = 0; i < M; i++) {
clk.begin();
p(c, a, b);
clk.end();
}
printf("%.2fclk\n", clk.getClock() / double(M) / double(N) / innerN);
}
struct Call : Xbyak::CodeGenerator {
Call(const void **p)
{
const void *f = (const void *)getCurr();
sub();
align(16);
*p = (const void*)getCurr();
mov(eax, 3);
call(f);
ret();
}
void sub()
{
add(eax, eax);
ret();
}
};
int main(int argc, char *argv[])
{
argc--, argv++;
/*
Core i7
add : 8.0clk
mul1: 10.7clk
mul2: 17.5clk
*/
try {
puts("test0");
bench(0);
puts("test1");
bench(1);
int (*f)();
Call call((const void**)&f);
printf("%d\n", f());
} catch (std::exception& e) {
fprintf(stderr, "ExpCode ERR:%s\n", e.what());
}
}

View File

@@ -1,194 +0,0 @@
<?xml version="1.0" encoding="shift_jis"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="9.00"
Name="bench"
ProjectGUID="{D3016C82-693F-4C7A-9BA7-5C658EDE39ED}"
RootNamespace="bench"
Keyword="Win32Proj"
TargetFrameworkVersion="196613"
>
<Platforms>
<Platform
Name="Win32"
/>
<Platform
Name="x64"
/>
</Platforms>
<ToolFiles>
</ToolFiles>
<Configurations>
<Configuration
Name="Debug|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\..\ate.vsprops"
CharacterSet="1"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
TargetEnvironment="3"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="0"
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
MinimalRebuild="true"
BasicRuntimeChecks="3"
RuntimeLibrary="3"
UsePrecompiledHeader="0"
WarningLevel="3"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
LinkIncremental="2"
GenerateDebugInformation="true"
SubSystem="1"
TargetMachine="17"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
<Configuration
Name="Release|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\..\release.vsprops;..\..\ate.vsprops"
CharacterSet="1"
WholeProgramOptimization="1"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
TargetEnvironment="3"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="2"
EnableIntrinsicFunctions="true"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="2"
EnableFunctionLevelLinking="true"
UsePrecompiledHeader="0"
WarningLevel="3"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
LinkIncremental="1"
GenerateDebugInformation="true"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="17"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<Filter
Name="source"
Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
>
<File
RelativePath="..\bench.cpp"
>
</File>
</Filter>
<Filter
Name="header"
Filter="h;hpp;hxx;hm;inl;inc;xsd"
UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
>
</Filter>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

File diff suppressed because it is too large Load Diff

View File

@@ -1,216 +0,0 @@
#include <stdio.h>
#include <memory.h>
#include "xbyak/xbyak_util.h"
extern Xbyak::util::Clock sclk;
#include "bn.h"
#include "bn-multi.h"
#include <iostream>
#include "util.h"
/*
define DEBUG_COUNT in zm2.cpp
m = mul256 ; ~50clk@i7-2600
r = r512 ; ~75clk@i7-2600
Fp::mul = m + r
Fp2::mul = 3m + 2r
Fp2::square = 2m + 2r
EXP = 3526m + 1932r + 18328a
single = (6785m + 3022r + 31752a) * N + EXP = (10311m + 4954r + 50080a) * N
opt_ateMultiPairingJ = (4604m + 2301r + 18858a) * N + EXP + 2268m + 758r + 13215a
opt_atePairingKnownG2Mixed = (3011m + 1125r + 13324a) * N + EXP + 6872m + 3059r + 32073a
*/
#define NUM_OF_ARRAY(x) (sizeof(x) / sizeof(*x))
using namespace bn;
using namespace ecop;
const mie::Vuint& p = Param::p;
const mie::Vuint& r = Param::r;
int g_count_m256;
int g_count_r512;
int g_count_add256;
void setQn(Fp2 Qn[8][3])
{
Qn[0][0]=Fp2(Fp("11517672169287316361176188794386139376898724198982915810069006199968038420191"),Fp("10835361915836038157486987955558731390170808906118754364538491386961053655274"));
Qn[0][1]=Fp2(Fp("1778069034406611548660214861031659277772705043859539974021985347836919469866"),Fp("13681662439753864346331726398353760699598198306861051081413655381467995631936"));
Qn[0][2]=Fp2(Fp("1"),Fp("0"));
Qn[1][0]=Fp2(Fp("11390001546001864402600540835220615821206485289741226204852043657103214655977"),Fp("213486898011631016451079100860803230206220441184056952439876131775418552592"));
Qn[1][1]=Fp2(Fp("5965151828180680581929387432038466302456180976708258052725425526886445195733"),Fp("12702697697694570150381325292928077349338690254734620977056113962521966534889"));
Qn[1][2]=Fp2(Fp("1"),Fp("0"));
Qn[2][0]=Fp2(Fp("317827852388030917234217793035893093021483955938125136386577929112148939582"),Fp("9278896717834347780025960489604748790341527699671814896844481527020420813618"));
Qn[2][1]=Fp2(Fp("4604308811892940474377307346344747500789112678674827642409858197234913520861"),Fp("1070757109492256048282468809751283274011240272126145201336641668031831023352"));
Qn[2][2]=Fp2(Fp("1"),Fp("0"));
Qn[3][0]=Fp2(Fp("15141009173450567653529452777511404553752377160725217686850520933697466610083"),Fp("16629468586199856290567823389372902473847532496895550111471616660495281795606"));
Qn[3][1]=Fp2(Fp("16359294718145568711088295546349667627209864787890529421842732282659027460110"),Fp("4591971789740803293911582763487966490224517350021416286195900260588416606065"));
Qn[3][2]=Fp2(Fp("1"),Fp("0"));
Qn[4][0]=Fp2(Fp("10488487177327651775576239839542683920101411453666780842090527101415731001540"),Fp("2602939798302577793362090318279109373481010951639935585239213420440620860256"));
Qn[4][1]=Fp2(Fp("9863609696075612874186230195435829835812764055187360738836832136664960165343"),Fp("9165168201048188946636094951894632779149730577982455486230427162387564949929"));
Qn[4][2]=Fp2(Fp("1"),Fp("0"));
Qn[5][0]=Fp2(Fp("5292072187326998897140340417009249180463237659578606571855121284655134215860"),Fp("10689661392573589494547928414723411741556083467963111340842112372714719466139"));
Qn[5][1]=Fp2(Fp("10086838226925579349559952786276070806316934197051675278290874521792223694104"),Fp("13667433560602011720585073415592848192740319254215300890305616288368916516198"));
Qn[5][2]=Fp2(Fp("1"),Fp("0"));
Qn[6][0]=Fp2(Fp("3324985573750603904527543011287473872994996187451653207674213275952327637416"),Fp("11720559305674136650678029454774125211095289377686252974953815151770225790606"));
Qn[6][1]=Fp2(Fp("16223977577427149050185260579094435839305544920515759664940268454263915683853"),Fp("11626391287612250863586482028776561221723650762976964634697768152047450671751"));
Qn[6][2]=Fp2(Fp("1"),Fp("0"));
Qn[7][0]=Fp2(Fp("11092045417843901853363139904491331290984929202254355616261729225837031249105"),Fp("16375618814835183841514709507215753591504289965694221131243608575439086917851"));
Qn[7][1]=Fp2(Fp("11975761771951146049662473814541920209508901398043990032621984937015757049692"),Fp("9935470201967315109600907913103514245507786763785970745954141872305198444941"));
Qn[7][2]=Fp2(Fp("1"),Fp("0"));
}
void setPn(Fp Pn[8][3])
{
const char *tbl[24] = {
"15005210600138052046451246658259670676477715213603446606994004703153823368243", "4343577407084448292832047972972645626170560205031956773324523863608486249664", "1",
"15457516573829718488435745408064186554479833481222785826769940346975045420508", "2364160699533534328839631410506819262600701890275670413410946410280478989668", "1",
"15003436906848848651695496307412705267730348433833218834960455592435929249345", "10438564479059694929228270049917726710497181841089954820494133901102091831646", "1",
"14972172091841485293055489162518763468703573114581772138812357514357617047461", "15977643626927900448562211593960826565665302700760283301009127977689582843731", "1",
"15003436851221616142043118946721100720407021250819020617903326552093126299817", "10454848416660761219292910571291338353428640207324163522494422111725972067818", "1",
"15455749699910633167624727292022947052630250106502406644415113459134577275001", "4855673267417630607411742108134371885186924557679407788265067025515460883257", "1",
"15457516574066288719995335683976973092876237104757111162798583482630975970975", "15911868723492233724159130363827027376562792523950861658431560892650986377042", "1",
"15457523448721782653342827839546724569763830282323659028716247514367541280549", "10929099066851842018351837306432073284036718863229651547413986283505570608536", "1",
};
Fp *p = &Pn[8][3];
for (int i = 0; i < 24; i++) {
p[i].set(tbl[i]);
}
}
void testPairing()
{
Fp2 Qn[8][3];
Fp Pn[8][3];
Fp6 l_T[8][70];
Fp12 e;
setQn(Qn);
setPn(Pn);
{
Fp6 *p = &l_T[0][0];
for (int i = 0; i < 8 * 70; i++) {
p[i] = i + 1;
}
}
#if 0
double pre = 0;
for (int i = 1; i < 8; i++) {
Xbyak::util::Clock clk;
clk.begin();
const int N = 3000;
for (int j = 0; j < N; j++) {
bn::opt_atePairingKnownG2Mixed(e, i, l_T, 1, Qn, Pn);
}
clk.end();
double t = clk.getClock() / double(N) / 1e3;
printf("pairing %d %7.2fKclk %7.2fKclk\n", i, t, t - pre);
pre = t;
}
#endif
#if 1
{
int c1[9];
int c2[9];
int c3[9];
for (int i = 1; i < 8; i++) {
g_count_m256 = 0;
g_count_r512 = 0;
g_count_add256 = 0;
// bn::opt_ateMultiPairingJ(e, i, Qn, Pn);
bn::opt_atePairingKnownG2Mixed(e, i, l_T, 1, Qn, Pn);
c1[i] = g_count_m256;
c2[i] = g_count_r512;
c3[i] = g_count_add256;
printf("%d m=%d r=%d a=%d\n", i, g_count_m256, g_count_r512, g_count_add256);
}
puts("extra");
for (int i = 1; i < 7; i++) {
printf("%d %d %d\n", c1[i + 1] - c1[i], c2[i + 1] - c2[i], c3[i + 1] - c3[i]);
}
int m = c1[3] - c1[2];
int r = c2[3] - c2[2];
int a = c3[3] - c3[2];
int em;
int er;
int ea;
{
g_count_m256 = 0;
g_count_r512 = 0;
g_count_add256 = 0;
e.final_exp();
em = g_count_m256;
er = g_count_r512;
ea = g_count_add256;
}
int sm;
int sr;
int sa;
{
g_count_m256 = 0;
g_count_r512 = 0;
g_count_add256 = 0;
bn::opt_atePairing(e, Qn[0], Pn[0]);
sm = g_count_m256;
sr = g_count_r512;
sa = g_count_add256;
}
int km;
int kr;
int ka;
{
Fp6 pre[8][70];
memset(pre, 0, sizeof(pre));
PrecomputePairingKnownG2(pre, 8, Qn);
g_count_m256 = 0;
g_count_r512 = 0;
g_count_add256 = 0;
bn::opt_atePairingKnownG2(e, pre[0], Pn[0]);
km = g_count_m256;
kr = g_count_r512;
ka = g_count_add256;
}
printf("(%dm + %dr + %da) * N + EXP + %dm + %dr + %da\n"
, m, r, a
, c1[2] - 2 * m - em, c2[2] - 2 * r - er, c3[2] - 2 * a - ea);
printf("EXP = %dm + %dr + %da\n", em, er, ea);
printf("single U = (%dm + %dr + %da) + EXP\n", sm - em, sr - er, sa - ea);
printf(" U = %dm + %dr + %da\n", sm, sr, sa);
printf(" K = %dm + %dr + %da\n", km, kr, ka);
#if 0
{
Fp12 f;
Fp6 l;
g_count_m256 = 0;
g_count_r512 = 0;
g_count_add256 = 0;
// Fp6::pointDblLineEval(e.a_, Qn[0], Pn[0]); // 25m + 20r + 76a
// e *= e; // 54m + 12r + 316a
// Fp6::pointAddLineEval(e.a_, Qn[0], Qn[0], Pn[0]); // 41m + 26r + 102a
// Fp12::Dbl::mul_Fp2_024(f, l); // 39m + 12r + 190a
// Fp12::square(f); // 36m + 12r + 210a
Fp12::Dbl::mul_Fp2_024_Fp2_024(e, l, l); // 18m + 10r + 88a
printf("op = %dm + %dr + %da\n", g_count_m256, g_count_r512, g_count_add256);
}
#endif
}
#endif
}
int main(int argc, char *argv[]) try
{
argc--, argv++;
Param::init(-1);
testPairing();
} catch (std::exception& e) {
fprintf(stderr, "exception %s\n", e.what());
return 1;
}

View File

@@ -1,134 +0,0 @@
/*
a new api of pairing for Java
*/
#include "../java/bn254_if.hpp"
#include <iostream>
#undef PUT
#define PUT(x) std::cout << #x "\t=" << (x).toString() << std::endl
static int errNum = 0;
void assertBool(const char *msg, bool b)
{
if (b) {
printf("%s : ok\n", msg);
} else {
printf("%s : ng\n", msg);
errNum++;
}
}
template<class T, class S>
void assertEqual(const char *msg, const T& a, const S& b)
{
if (a.equals(b)) {
printf("%s : ok\n", msg);
} else {
PUT(a);
PUT(b);
errNum++;
}
}
struct G2 {
const char *aa;
const char *ab;
const char *ba;
const char *bb;
} g2c = {
"12723517038133731887338407189719511622662176727675373276651903807414909099441",
"4168783608814932154536427934509895782246573715297911553964171371032945126671",
"13891744915211034074451795021214165905772212241412891944830863846330766296736",
"7937318970632701341203597196594272556916396164729705624521405069090520231616",
};
int main()
{
SystemInit();
const Ec1 g1(-1, 1);
const Ec2 g2(
Fp2(Fp(g2c.aa), Fp(g2c.ab)),
Fp2(Fp(g2c.ba), Fp(g2c.bb))
);
// assertBool g2 and g1 on curve
assertBool("g1 is on EC", g1.isValid());
assertBool("g2 is on twist EC", g2.isValid());
puts("order of group");
const Mpz& r = GetParamR();
PUT(r);
{
Ec1 t = g1;
t.mul(r);
assertBool("orgder of g1 == r", t.isZero());
}
{
Ec2 t = g2;
t.mul(r);
assertBool("order of g2 == r", t.isZero());
}
const Mpz a("123456789012345");
const Mpz b("998752342342342342424242421");
// scalar-multiplication sample
{
Mpz c = a;
c.add(b);
Ec1 Pa = g1; Pa.mul(a);
Ec1 Pb = g1; Pb.mul(b);
Ec1 Pc = g1; Pc.mul(c);
Ec1 out = Pa;
out.add(Pb);
assertEqual("check g1 * c = g1 * a + g1 * b", Pc, out);
}
Fp12 e;
// calc e : G2 x G1 -> G3 pairing
e.pairing(g2, g1); // e = e(g2, g1)
PUT(e);
{
Fp12 t = e;
t.power(r);
assertEqual("order of e == r", t, Fp12(1));
}
Ec2 g2a = g2;
g2a.mul(a);
Fp12 ea1;
ea1.pairing(g2a, g1);
Fp12 ea2 = e;
ea2.power(a); // ea2 = e^a
assertEqual("e(g2 * a, g1) = e(g2, g1)^a", ea1, ea2);
Ec1 g1b = g1;
g1b.mul(b);
Fp12 eb1;
eb1.pairing(g2, g1b); // eb1 = e(g2, g1b)
Fp12 eb2 = e;
eb2.power(b); // eb2 = e^b
assertEqual("e(g2a, g1 * b) = e(g2, g1)^b", eb1, eb2);
Ec1 q1 = g1;
q1.mul(12345);
assertBool("q1 is on EC", q1.isValid());
Fp12 e1, e2;
e1.pairing(g2, g1); // e1 = e(g2, g1)
e2.pairing(g2, q1); // e2 = e(g2, q1)
Ec1 q2 = g1;
q2.add(q1);
e.pairing(g2, q2); // e = e(g2, q2)
e1.mul(e2);
assertEqual("e = e1 * e2", e, e1);
/*
reduce one copy as the following
*/
g2a = g2;
g2a.mul(a);
g1b = g1;
g1b.mul(b);
Ec2 g2at = g2; g2at.mul(a);
Ec1 g1bt = g1; g1bt.mul(b);
assertEqual("g2a == g2 * a", g2a, g2at);
assertEqual("g1b == g1 * b", g1b, g1bt);
printf("errNum = %d\n", errNum);
}

View File

@@ -1,51 +0,0 @@
/*
loop test
*/
#include "bn.h"
#include "test_point.hpp"
using namespace bn;
using namespace ecop;
int main()
{
#ifdef BN_SUPPORT_SNARK
puts("snark");
bn::CurveParam cp = bn::CurveSNARK1;
#else
puts("fp254BNb");
bn::CurveParam cp = bn::CurveFp254BNb;
#endif
// init my library
Param::init();
// prepair a generator
const Point& pt = selectPoint(cp);
const Ec2 g2(
Fp2(Fp(pt.g2.aa), Fp(pt.g2.ab)),
Fp2(Fp(pt.g2.ba), Fp(pt.g2.bb))
);
const Ec1 g1(pt.g1.a, pt.g1.b);
Fp12 e, ea, ea1, ea2;
Ec2 g2a;
Ec1 g1a;
// calc e : G2 x G1 -> G3 pairing
opt_atePairing(e, g2, g1); // e = e(g2, g1)
mie::Vuint a("0x18b48dddfb2f81cc829b4b9acd393ccb1e90909aabe126bcdbe6a96438eaf313");
for (int i = 0; i < 3000; i++) {
ea = power(e, a);
g1a = g1 * a;
g2a = g2 * a;
opt_atePairing(ea1, g2, g1a); // ea1 = e(g2, g1a)
opt_atePairing(ea2, g2a, g1); // ea2 = e(g2a, g1)
if (ea != ea1 || ea != ea2) {
printf("ERR i=%d\n", i);
PUT(a);
PUT(ea);
PUT(ea1);
PUT(ea2);
exit(1);
}
a -= 1;
}
puts("ok");
}

View File

@@ -1,54 +0,0 @@
#include "bn.h"
#include "test_point.hpp"
static int errNum = 0;
const char *expect = "[[[8118772341496577043438385328606447626730215814727396173233264007541007797690,6742571767760762192519140673058087976840103832045324348366170860928670686713],"
" [9727912590495366720378364920530546614235713408261568635512172059018197267630,10180700148605185348549931182990442059136187839792856455707820203302941578832],"
" [5054507763444412917986776641611331046146804026682679569910978464879371792565,6917005519826733659554708445125877487590687705432214234949972860245110398023]],"
" [[10448556317747236258066222816126375978842661908560317699736569642190930635294,1516980358051268127904344653343215863076753141133525905743113718749531324025],"
" [9794836735385959178744195210089532061310424844916928682580569566332541022353,9375574834170998962484906689780052970915033987453510324648351251071086068423],"
" [710778048594563655498360873129325895716179849942646859397874562033386335205,10688745994254573144943003027511098295097561129365638275727908595677791826005]]]";
template<class T, class S>
void verify(const char *msg, const T& a, const S& b)
{
if (a == b) {
printf("%s : ok\n", msg);
} else {
printf("%s : ng\n", msg);
PUT(a);
PUT(b);
errNum++;
}
}
int main()
try
{
// bn::CurveParam cp = bn::CurveSNARK1;
bn::CurveParam cp = bn::CurveFp254BNb;
using namespace bn;
// init my library
Param::init(cp);
const Point& pt = selectPoint(cp);
const Ec2 g2(
Fp2(Fp(pt.g2.aa), Fp(pt.g2.ab)),
Fp2(Fp(pt.g2.ba), Fp(pt.g2.bb))
);
const Ec1 g1(pt.g1.a, pt.g1.b);
PUT(g1);
PUT(g2);
Fp12 e1, e2;
opt_atePairing(e1, g2, g1);
PUT(e1);
{
std::stringstream ss(expect);
ss >> e2;
}
printf("%s\n", e1 == e2 ? "OK" : "NG");
} catch (std::exception& e) {
printf("ERR %s\n", e.what());
}

View File

@@ -1,288 +0,0 @@
/*
a tiny sample of optimal ate pairing
*/
#include "bn.h"
#include "test_point.hpp"
static int errNum = 0;
template<class T, class S>
void verify(const char *msg, const T& a, const S& b)
{
if (a == b) {
printf("%s : ok\n", msg);
} else {
printf("%s : ng\n", msg);
PUT(a);
PUT(b);
errNum++;
}
}
void sample1(const bn::CurveParam& cp)
{
using namespace bn;
// init my library
Param::init(cp);
// prepair a generator
const Point& pt = selectPoint(cp);
const Fp2 g2[3] = {
Fp2(Fp(pt.g2.aa), Fp(pt.g2.ab)),
Fp2(Fp(pt.g2.ba), Fp(pt.g2.bb)),
Fp2(1, 0),
};
const Fp g1[3] = { pt.g1.a, pt.g1.b, 1 };
// verify g2 and g1 on curve
verify("g1 is on EC", ecop::isOnECJac3(g1), true);
verify("g2 is on twist EC", ecop::isOnTwistECJac3(g2), true);
puts("order of group");
PUT(Param::r);
PUT(Param::p);
PUT(Param::t);
{
Fp t[3];
ecop::ScalarMult(t, g1, Param::r);
// (x, y, 0) means 0 at Jacobi coordinate
verify("orgder of g1 == r", t[2], 0);
}
{
Fp2 t[3];
ecop::ScalarMult(t, g2, Param::r);
verify("order of g2 == r", t[2], 0);
}
const char *aStr = "123456789012345";
const char *bStr = "998752342342342342424242421";
const mie::Vuint a(aStr);
const mie::Vuint b(bStr);
// scalar-multiplication sample
{
Fp Pa[3];
Fp Pb[3];
Fp Pc[3];
Fp out[3];
const mie::Vuint c = a + b;
ecop::ScalarMult(Pa, g1, a); // Pa = g1 * a
ecop::ScalarMult(Pb, g1, b); // Pb = g1 * b
ecop::ScalarMult(Pc, g1, c); // Pc = g1 * (a + b)
ecop::ECAdd(out, Pa, Pb); // g1 * a + g1 * b
ecop::NormalizeJac(Pc, Pc);
ecop::NormalizeJac(out, out);
std::cout << std::hex;
verify("check g1 * c = g1 * a + g1 * b", Pc[0] == out[0] && Pc[1] == out[1] && Pc[2] == out[2], true);
#ifdef MIE_ATE_USE_GMP
{
mpz_class aa(aStr);
mpz_class bb(bStr);
mpz_class cc = aa + bb;
Fp Paa[3];
Fp Pbb[3];
Fp Pcc[3];
ecop::ScalarMult(Paa, g1, aa); // Pa = g1 * a
ecop::ScalarMult(Pbb, g1, bb); // Pb = g1 * b
ecop::ScalarMult(Pcc, g1, cc); // Pc = g1 * (a + b)
ecop::NormalizeJac(Pcc, Pcc);
verify("gmp Paa == Pa", Paa[0] == Pa[0] && Paa[1] == Pa[1] && Paa[2] == Pa[2], true);
verify("gmp Pbb == Pb", Pbb[0] == Pb[0] && Pbb[1] == Pb[1] && Pbb[2] == Pb[2], true);
verify("gmp Pcc == Pc", Pcc[0] == Pc[0] && Pcc[1] == Pc[1] && Pcc[2] == Pc[2], true);
}
#endif
}
Fp12 e;
// calc e : G2 x G1 -> G3 pairing
opt_atePairingJac<Fp>(e, g2, g1); // e = e(g2, g1)
PUT(e);
{
Fp12 t = power(e, Param::r);
verify("order of e == r", t, 1);
}
Fp2 g2a[3];
ecop::ScalarMult(g2a, g2, a); // g2a = g2 * a
Fp12 ea1;
opt_atePairingJac<Fp>(ea1, g2a, g1); // ea1 = e(g2a, g1)
Fp12 ea2 = power(e, a); // ea2 = e^a
verify("e(g2 * a, g1) = e(g2, g1)^a", ea1, ea2);
Fp g1b[3];
ecop::ScalarMult(g1b, g1, b); // g1b = g1 * b
Fp12 eb1;
opt_atePairingJac<Fp>(eb1, g2, g1b); // eb1 = e(g2, g1b)
Fp12 eb2 = power(e, b); // eb2 = e^b
verify("e(g2a, g1 * b) = e(g2, g1)^b", eb1, eb2);
Fp q1[3];
ecop::ScalarMult(q1, g1, 12345);
verify("q1 is on EC", ecop::isOnECJac3(q1), true);
Fp12 e1, e2;
opt_atePairingJac<Fp>(e1, g2, g1); // e1 = e(g2, g1)
opt_atePairingJac<Fp>(e2, g2, q1); // e2 = e(g2, q1)
Fp q2[3];
ecop::ECAdd(q2, g1, q1); // q2 = g1 + q1
opt_atePairingJac<Fp>(e, g2, q2); // e = e(g2, q2)
verify("e = e1 * e2", e, e1 * e2);
}
void sample2(const bn::CurveParam& cp)
{
using namespace bn;
// init my library
Param::init(cp);
const Point& pt = selectPoint(cp);
const Ec2 g2(
Fp2(Fp(pt.g2.aa), Fp(pt.g2.ab)),
Fp2(Fp(pt.g2.ba), Fp(pt.g2.bb))
);
const Ec1 g1(pt.g1.a, pt.g1.b);
// verify g2 and g1 on curve
verify("g1 is on EC", g1.isValid(), true);
verify("g2 is on twist EC", g2.isValid(), true);
puts("order of group");
PUT(Param::r);
{
Ec1 t = g1 * Param::r;
// Ec1::mul(t, g1, Param::r);
verify("orgder of g1 == r", t.isZero(), true);
}
{
Ec2 t = g2 * Param::r;
// Ec2::mul(t, g2, Param::r);
verify("order of g2 == r", t.isZero(), true);
}
const char *aStr = "123456789012345";
const char *bStr = "998752342342342342424242421";
const mie::Vuint a(aStr);
const mie::Vuint b(bStr);
// scalar-multiplication sample
{
const mie::Vuint c = a + b;
Ec1 Pa = g1 * a;
Ec1 Pb = g1 * b;
Ec1 Pc = g1 * c;
Ec1 out = Pa + Pb;
verify("check g1 * c = g1 * a + g1 * b", Pc, out);
#ifdef MIE_ATE_USE_GMP
{
mpz_class aa(aStr);
mpz_class bb(bStr);
mpz_class cc = aa + bb;
Ec1 Paa = g1 * aa;
Ec1 Pbb = g1 * bb;
Ec1 Pcc = g1 * cc;
verify("gmp Paa == Pa", Paa, Pa);
verify("gmp Pbb == Pb", Pbb, Pb);
verify("gmp Pcc == Pc", Pcc, Pc);
}
#endif
}
Fp12 e;
// calc e : G2 x G1 -> G3 pairing
opt_atePairing(e, g2, g1); // e = e(g2, g1)
PUT(e);
{
Fp12 t = power(e, Param::r);
verify("order of e == r", t, 1);
}
Ec2 g2a = g2 * a;
// Ec2::mul(g2a, g2, a);
Fp12 ea1;
opt_atePairing(ea1, g2a, g1); // ea1 = e(g2a, g1)
Fp12 ea2 = power(e, a); // ea2 = e^a
verify("e(g2 * a, g1) = e(g2, g1)^a", ea1, ea2);
Ec1 g1b = g1 * b;
// Ec1::mul(g1b, g1, b);
Fp12 eb1;
opt_atePairing(eb1, g2, g1b); // eb1 = e(g2, g1b)
Fp12 eb2 = power(e, b); // eb2 = e^b
verify("e(g2a, g1 * b) = e(g2, g1)^b", eb1, eb2);
Ec1 q1 = g1 * 12345;
verify("q1 is on EC", q1.isValid(), true);
Fp12 e1, e2;
opt_atePairing(e1, g2, g1); // e1 = e(g2, g1)
opt_atePairing(e2, g2, q1); // e2 = e(g2, q1)
Ec1 q2 = g1 + q1;
opt_atePairing(e, g2, q2); // e = e(g2, q2)
verify("e = e1 * e2", e, e1 * e2);
/*
reduce one copy as the following
*/
Ec2::mul(g2a, g2, a); // g2a = g2 * a
Ec1::mul(g1b, g1, b);
verify("g2a == g2 * a", g2a, g2 * a);
verify("g1b == g1 * b", g1b, g1 * b);
}
void multi(const bn::CurveParam& cp)
{
using namespace bn;
// init my library
Param::init(cp);
const Point& pt = selectPoint(cp);
const Ec2 g2(
Fp2(Fp(pt.g2.aa), Fp(pt.g2.ab)),
Fp2(Fp(pt.g2.ba), Fp(pt.g2.bb))
);
const Ec1 g1(pt.g1.a, pt.g1.b);
const size_t N = 10;
const int c = 234567;
std::vector<Ec1> g1s;
g1s.resize(N);
for (size_t i = 0; i < N; i++) {
Ec1::mul(g1s[i], g1, c + i);
g1s[i] = g1 * (c + i);
g1s[i].normalize();
}
std::vector<Fp6> Qcoeff;
Fp2 precQ[3];
bn::components::precomputeG2(Qcoeff, precQ, g2.p);
for (size_t i = 0; i < N; i++) {
Fp12 e1;
bn::components::millerLoop(e1, Qcoeff, g1s[i].p);
e1.final_exp();
Fp12 e2;
opt_atePairing(e2, g2, g1s[i]);
if (e1 != e2) {
printf("err multi %d\n", (int)i);
}
}
}
int main(int argc, char *argv[])
{
#ifdef BN_SUPPORT_SNARK
int b = 3;
if (argc >= 2) {
b = atoi(argv[1]);
if (b != 3 && b != 82) {
printf("not support b=%d\n", b);
return 1;
}
}
printf("SNARK b = %d\n", b);
bn::CurveParam cp = bn::CurveSNARK1;
cp.b = b;
#else
if (argc > 1 && argv[1]) {
printf("not support\n");
return 1;
}
bn::CurveParam cp = bn::CurveFp254BNb;
#endif
puts("sample1");
sample1(cp);
puts("sample2");
sample2(cp);
puts("multi");
multi(cp);
printf("errNum = %d\n", errNum);
}

View File

@@ -1,184 +0,0 @@
<?xml version="1.0" encoding="shift_jis"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="9.00"
Name="test_bn"
ProjectGUID="{F4B8F9CD-2E24-4374-9BF6-6886347E861B}"
RootNamespace="test_bn"
TargetFrameworkVersion="131072"
>
<Platforms>
<Platform
Name="x64"
/>
</Platforms>
<ToolFiles>
</ToolFiles>
<Configurations>
<Configuration
Name="Debug|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\ate.vsprops"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="0"
RuntimeLibrary="3"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
GenerateDebugInformation="true"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
<Configuration
Name="Release|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\release.vsprops;..\ate.vsprops"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
AdditionalOptions="/GS-"
Optimization="3"
InlineFunctionExpansion="2"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
WholeProgramOptimization="true"
AdditionalIncludeDirectories="C:\Program Files (x86)\Intel\VTune Amplifier XE\include"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="2"
BufferSecurityCheck="false"
EnableFunctionLevelLinking="false"
FloatingPointModel="2"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
AdditionalLibraryDirectories="C:\Program Files (x86)\Intel\VTune Amplifier XE\lib64"
GenerateDebugInformation="true"
LinkTimeCodeGeneration="1"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<Filter
Name="header"
>
</Filter>
<Filter
Name="source"
>
<File
RelativePath=".\bn.cpp"
>
<FileConfiguration
Name="Release|x64"
>
<Tool
Name="VCCLCompilerTool"
AssemblerOutput="0"
/>
</FileConfiguration>
</File>
</Filter>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@@ -1,74 +0,0 @@
#pragma once
#include "bn.h"
#define PUT(x) std::cout << #x << "\t=" << (x) << std::endl;
const struct Point {
struct G2 {
const char *aa;
const char *ab;
const char *ba;
const char *bb;
} g2;
struct G1 {
int a;
int b;
} g1;
} g_pointTbl[] = {
#ifdef BN_SUPPORT_SNARK
// SNARK1(b = 3)
{
{
"15267802884793550383558706039165621050290089775961208824303765753922461897946",
"9034493566019742339402378670461897774509967669562610788113215988055021632533",
"644888581738283025171396578091639672120333224302184904896215738366765861164",
"20532875081203448695448744255224543661959516361327385779878476709582931298750",
},
{
1, 2
},
},
// SNARK2(b = 82)
{
{
"7281644703356799059368313064438243279269372005747477888712173236228325795991",
"15160700668152503952980485502602536850541312794041965342451842375663084147486",
"13523979532236795535820810482891703536907572704519492618036353386190612673074",
"15929067770616689398844794432758732907995965312283969374632681891490787470887",
},
{
-1, 9
},
},
#else
// Aranha
{
{
"12723517038133731887338407189719511622662176727675373276651903807414909099441",
"4168783608814932154536427934509895782246573715297911553964171371032945126671",
"13891744915211034074451795021214165905772212241412891944830863846330766296736",
"7937318970632701341203597196594272556916396164729705624521405069090520231616",
},
{
-1, 1
},
},
#endif
};
inline const Point& selectPoint(const bn::CurveParam& cp)
{
#ifdef BN_SUPPORT_SNARK
if (cp.b != 3 && cp.b != 82) {
printf("not support point for b=%d\n", cp.b);
exit(1);
}
return g_pointTbl[cp.b == 3 ? 0 : 1];
#else
if (cp != bn::CurveFp254BNb) {
printf("not support except for CurveFp254BNb");
exit(1);
}
return g_pointTbl[0];
#endif
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,166 +0,0 @@
<?xml version="1.0" encoding="shift_jis"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="9.00"
Name="test_zm"
ProjectGUID="{C8EC3A26-31ED-4764-A6F1-D5EBD2D09AFF}"
RootNamespace="test_zm"
TargetFrameworkVersion="131072"
>
<Platforms>
<Platform
Name="x64"
/>
</Platforms>
<ToolFiles>
</ToolFiles>
<Configurations>
<Configuration
Name="Debug|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\ate.vsprops"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="0"
RuntimeLibrary="3"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
GenerateDebugInformation="true"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
<Configuration
Name="Release|x64"
OutputDirectory="$(SolutionDir)$(PlatformName)\$(ConfigurationName)"
IntermediateDirectory="$(PlatformName)\$(ConfigurationName)"
ConfigurationType="1"
InheritedPropertySheets="..\release.vsprops;..\ate.vsprops"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="3"
InlineFunctionExpansion="2"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="1"
OmitFramePointers="true"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="2"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<Filter
Name="header"
>
</Filter>
<Filter
Name="source"
>
<File
RelativePath=".\test_zm.cpp"
>
</File>
</Filter>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@@ -1,25 +0,0 @@
#ifndef MIE_ATE_UTIL_H_
#define MIE_ATE_UTIL_H_
#ifdef _WIN32
#include <time.h>
static inline double GetCurrTime()
{
return clock() / double(CLOCKS_PER_SEC);
}
#else
#include <sys/time.h>
#include <stdio.h>
static inline double GetCurrTime()
{
struct timeval tv;
gettimeofday(&tv, NULL);
return tv.tv_sec + (double) tv.tv_usec * 1e-6;
}
#endif
#endif

View File

@@ -1,40 +0,0 @@
# Ignore CI build directory
build/
xcuserdata
cmake-build-debug/
.idea/
bazel-bin
bazel-genfiles
bazel-googletest
bazel-out
bazel-testlogs
# python
*.pyc
# Visual Studio files
*.sdf
*.opensdf
*.VC.opendb
*.suo
*.user
_ReSharper.Caches/
Win32-Debug/
Win32-Release/
x64-Debug/
x64-Release/
# Ignore autoconf / automake files
Makefile.in
aclocal.m4
configure
build-aux/
autom4te.cache/
googletest/m4/libtool.m4
googletest/m4/ltoptions.m4
googletest/m4/ltsugar.m4
googletest/m4/ltversion.m4
googletest/m4/lt~obsolete.m4
# Ignore generated directories.
googlemock/fused-src/
googletest/fused-src/

View File

@@ -1,81 +0,0 @@
# Build matrix / environment variable are explained on:
# http://about.travis-ci.org/docs/user/build-configuration/
# This file can be validated on:
# http://lint.travis-ci.org/
sudo: false
language: cpp
# Define the matrix explicitly, manually expanding the combinations of (os, compiler, env).
# It is more tedious, but grants us far more flexibility.
matrix:
include:
- os: linux
compiler: gcc
sudo : true
install: ./ci/install-linux.sh && ./ci/log-config.sh
script: ./ci/build-linux-bazel.sh
- os: linux
compiler: clang
sudo : true
install: ./ci/install-linux.sh && ./ci/log-config.sh
script: ./ci/build-linux-bazel.sh
- os: linux
group: deprecated-2017Q4
compiler: gcc
install: ./ci/install-linux.sh && ./ci/log-config.sh
script: ./ci/build-linux-autotools.sh
- os: linux
group: deprecated-2017Q4
compiler: gcc
env: BUILD_TYPE=Debug VERBOSE=1 CXX_FLAGS=-std=c++11
- os: linux
group: deprecated-2017Q4
compiler: clang
env: BUILD_TYPE=Debug VERBOSE=1
- os: linux
group: deprecated-2017Q4
compiler: clang
env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
- os: osx
compiler: gcc
env: BUILD_TYPE=Debug VERBOSE=1
if: type != pull_request
- os: osx
compiler: gcc
env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
if: type != pull_request
- os: osx
compiler: clang
env: BUILD_TYPE=Debug VERBOSE=1
if: type != pull_request
- os: osx
compiler: clang
env: BUILD_TYPE=Release VERBOSE=1 CXX_FLAGS=-std=c++11
if: type != pull_request
# These are the install and build (script) phases for the most common entries in the matrix. They could be included
# in each entry in the matrix, but that is just repetitive.
install:
- ./ci/install-${TRAVIS_OS_NAME}.sh
- . ./ci/env-${TRAVIS_OS_NAME}.sh
- ./ci/log-config.sh
script: ./ci/travis.sh
# For sudo=false builds this section installs the necessary dependencies.
addons:
apt:
# List of whitelisted in travis packages for ubuntu-precise can be found here:
# https://github.com/travis-ci/apt-package-whitelist/blob/master/ubuntu-precise
# List of whitelisted in travis apt-sources:
# https://github.com/travis-ci/apt-source-whitelist/blob/master/ubuntu.json
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- g++-4.9
- clang-3.7
notifications:
email: false

View File

@@ -1,175 +0,0 @@
# Copyright 2017 Google Inc.
# All Rights Reserved.
#
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: misterg@google.com (Gennadiy Civil)
#
# Bazel Build for Google C++ Testing Framework(Google Test)
package(default_visibility = ["//visibility:public"])
licenses(["notice"])
config_setting(
name = "windows",
values = { "cpu": "x64_windows" },
)
config_setting(
name = "windows_msvc",
values = {"cpu": "x64_windows_msvc"},
)
config_setting(
name = "has_absl",
values = {"define": "absl=1"},
)
# Google Test including Google Mock
cc_library(
name = "gtest",
srcs = glob(
include = [
"googletest/src/*.cc",
"googletest/src/*.h",
"googletest/include/gtest/**/*.h",
"googlemock/src/*.cc",
"googlemock/include/gmock/**/*.h",
],
exclude = [
"googletest/src/gtest-all.cc",
"googletest/src/gtest_main.cc",
"googlemock/src/gmock-all.cc",
"googlemock/src/gmock_main.cc",
],
),
hdrs =glob([
"googletest/include/gtest/*.h",
"googlemock/include/gmock/*.h",
]),
copts = select(
{
":windows": [],
":windows_msvc": [],
"//conditions:default": ["-pthread"],
},
),
includes = [
"googlemock",
"googlemock/include",
"googletest",
"googletest/include",
],
linkopts = select({
":windows": [],
":windows_msvc": [],
"//conditions:default": [
"-pthread",
],
}),
defines = select ({
":has_absl": [
"GTEST_HAS_ABSL=1",
],
"//conditions:default": [],
}
),
deps = select ({
":has_absl": [
"@com_google_absl//absl/types:optional",
"@com_google_absl//absl/strings"
],
"//conditions:default": [],
}
)
)
cc_library(
name = "gtest_main",
srcs = [
"googlemock/src/gmock_main.cc",
],
deps = [":gtest"],
)
# The following rules build samples of how to use gTest.
cc_library(
name = "gtest_sample_lib",
srcs = [
"googletest/samples/sample1.cc",
"googletest/samples/sample2.cc",
"googletest/samples/sample4.cc",
],
hdrs = [
"googletest/samples/prime_tables.h",
"googletest/samples/sample1.h",
"googletest/samples/sample2.h",
"googletest/samples/sample3-inl.h",
"googletest/samples/sample4.h",
],
)
cc_test(
name = "gtest_samples",
size = "small",
#All Samples except:
#sample9 ( main )
#sample10 (main and takes a command line option and needs to be separate)
srcs = [
"googletest/samples/sample1_unittest.cc",
"googletest/samples/sample2_unittest.cc",
"googletest/samples/sample3_unittest.cc",
"googletest/samples/sample4_unittest.cc",
"googletest/samples/sample5_unittest.cc",
"googletest/samples/sample6_unittest.cc",
"googletest/samples/sample7_unittest.cc",
"googletest/samples/sample8_unittest.cc",
],
deps = [
"gtest_sample_lib",
":gtest_main",
],
)
cc_test(
name = "sample9_unittest",
size = "small",
srcs = ["googletest/samples/sample9_unittest.cc"],
deps = [":gtest"],
)
cc_test(
name = "sample10_unittest",
size = "small",
srcs = ["googletest/samples/sample10_unittest.cc"],
deps = [
":gtest",
],
)

View File

@@ -1,33 +0,0 @@
cmake_minimum_required(VERSION 2.6.4)
if (POLICY CMP0048)
cmake_policy(SET CMP0048 NEW)
endif (POLICY CMP0048)
project( googletest-distribution )
enable_testing()
include(CMakeDependentOption)
if (CMAKE_VERSION VERSION_LESS 2.8.5)
set(CMAKE_INSTALL_BINDIR "bin" CACHE STRING "User executables (bin)")
set(CMAKE_INSTALL_LIBDIR "lib${LIB_SUFFIX}" CACHE STRING "Object code libraries (lib)")
set(CMAKE_INSTALL_INCLUDEDIR "include" CACHE STRING "C header files (include)")
mark_as_advanced(CMAKE_INSTALL_BINDIR CMAKE_INSTALL_LIBDIR CMAKE_INSTALL_INCLUDEDIR)
else()
include(GNUInstallDirs)
endif()
option(BUILD_GTEST "Builds the googletest subproject" OFF)
#Note that googlemock target already builds googletest
option(BUILD_GMOCK "Builds the googlemock subproject" ON)
cmake_dependent_option(INSTALL_GTEST "Enable installation of googletest. (Projects embedding googletest may want to turn this OFF.)" ON "BUILD_GTEST OR BUILD_GMOCK" OFF)
cmake_dependent_option(INSTALL_GMOCK "Enable installation of googlemock. (Projects embedding googlemock may want to turn this OFF.)" ON "BUILD_GMOCK" OFF)
if(BUILD_GMOCK)
add_subdirectory( googlemock )
elseif(BUILD_GTEST)
add_subdirectory( googletest )
endif()

View File

@@ -1,160 +0,0 @@
# How to become a contributor and submit your own code
## Contributor License Agreements
We'd love to accept your patches! Before we can take them, we
have to jump a couple of legal hurdles.
Please fill out either the individual or corporate Contributor License Agreement
(CLA).
* If you are an individual writing original source code and you're sure you
own the intellectual property, then you'll need to sign an
[individual CLA](https://developers.google.com/open-source/cla/individual).
* If you work for a company that wants to allow you to contribute your work,
then you'll need to sign a
[corporate CLA](https://developers.google.com/open-source/cla/corporate).
Follow either of the two links above to access the appropriate CLA and
instructions for how to sign and return it. Once we receive it, we'll be able to
accept your pull requests.
## Contributing A Patch
1. Submit an issue describing your proposed change to the
[issue tracker](https://github.com/google/googletest).
1. Please don't mix more than one logical change per submittal,
because it makes the history hard to follow. If you want to make a
change that doesn't have a corresponding issue in the issue
tracker, please create one.
1. Also, coordinate with team members that are listed on the issue in
question. This ensures that work isn't being duplicated and
communicating your plan early also generally leads to better
patches.
1. If your proposed change is accepted, and you haven't already done so, sign a
Contributor License Agreement (see details above).
1. Fork the desired repo, develop and test your code changes.
1. Ensure that your code adheres to the existing style in the sample to which
you are contributing.
1. Ensure that your code has an appropriate set of unit tests which all pass.
1. Submit a pull request.
If you are a Googler, it is preferable to first create an internal change and
have it reviewed and submitted, and then create an upstreaming pull
request here.
## The Google Test and Google Mock Communities ##
The Google Test community exists primarily through the
[discussion group](http://groups.google.com/group/googletestframework)
and the GitHub repository.
Likewise, the Google Mock community exists primarily through their own
[discussion group](http://groups.google.com/group/googlemock).
You are definitely encouraged to contribute to the
discussion and you can also help us to keep the effectiveness of the
group high by following and promoting the guidelines listed here.
### Please Be Friendly ###
Showing courtesy and respect to others is a vital part of the Google
culture, and we strongly encourage everyone participating in Google
Test development to join us in accepting nothing less. Of course,
being courteous is not the same as failing to constructively disagree
with each other, but it does mean that we should be respectful of each
other when enumerating the 42 technical reasons that a particular
proposal may not be the best choice. There's never a reason to be
antagonistic or dismissive toward anyone who is sincerely trying to
contribute to a discussion.
Sure, C++ testing is serious business and all that, but it's also
a lot of fun. Let's keep it that way. Let's strive to be one of the
friendliest communities in all of open source.
As always, discuss Google Test in the official GoogleTest discussion group.
You don't have to actually submit code in order to sign up. Your participation
itself is a valuable contribution.
## Style
To keep the source consistent, readable, diffable and easy to merge,
we use a fairly rigid coding style, as defined by the [google-styleguide](https://github.com/google/styleguide) project. All patches will be expected
to conform to the style outlined [here](https://google.github.io/styleguide/cppguide.html).
## Requirements for Contributors ###
If you plan to contribute a patch, you need to build Google Test,
Google Mock, and their own tests from a git checkout, which has
further requirements:
* [Python](https://www.python.org/) v2.3 or newer (for running some of
the tests and re-generating certain source files from templates)
* [CMake](https://cmake.org/) v2.6.4 or newer
* [GNU Build System](https://en.wikipedia.org/wiki/GNU_Build_System)
including automake (>= 1.9), autoconf (>= 2.59), and
libtool / libtoolize.
## Developing Google Test ##
This section discusses how to make your own changes to Google Test.
### Testing Google Test Itself ###
To make sure your changes work as intended and don't break existing
functionality, you'll want to compile and run Google Test's own tests.
For that you can use CMake:
mkdir mybuild
cd mybuild
cmake -Dgtest_build_tests=ON ${GTEST_DIR}
Make sure you have Python installed, as some of Google Test's tests
are written in Python. If the cmake command complains about not being
able to find Python (`Could NOT find PythonInterp (missing:
PYTHON_EXECUTABLE)`), try telling it explicitly where your Python
executable can be found:
cmake -DPYTHON_EXECUTABLE=path/to/python -Dgtest_build_tests=ON ${GTEST_DIR}
Next, you can build Google Test and all of its own tests. On \*nix,
this is usually done by 'make'. To run the tests, do
make test
All tests should pass.
### Regenerating Source Files ##
Some of Google Test's source files are generated from templates (not
in the C++ sense) using a script.
For example, the
file include/gtest/internal/gtest-type-util.h.pump is used to generate
gtest-type-util.h in the same directory.
You don't need to worry about regenerating the source files
unless you need to modify them. You would then modify the
corresponding `.pump` files and run the '[pump.py](googletest/scripts/pump.py)'
generator script. See the [Pump Manual](googletest/docs/PumpManual.md).
## Developing Google Mock ###
This section discusses how to make your own changes to Google Mock.
#### Testing Google Mock Itself ####
To make sure your changes work as intended and don't break existing
functionality, you'll want to compile and run Google Test's own tests.
For that you'll need Autotools. First, make sure you have followed
the instructions above to configure Google Mock.
Then, create a build output directory and enter it. Next,
${GMOCK_DIR}/configure # try --help for more info
Once you have successfully configured Google Mock, the build steps are
standard for GNU-style OSS packages.
make # Standard makefile following GNU conventions
make check # Builds and runs all tests - all should pass.
Note that when building your project against Google Mock, you are building
against Google Test as well. There is no need to configure Google Test
separately.

Binary file not shown.

View File

@@ -1,14 +0,0 @@
## Process this file with automake to produce Makefile.in
ACLOCAL_AMFLAGS = -I m4
AUTOMAKE_OPTIONS = foreign
# Build . before src so that our all-local and clean-local hooks kicks in at
# the right time.
SUBDIRS = googletest googlemock
EXTRA_DIST = \
BUILD.bazel \
CMakeLists.txt \
README.md \
WORKSPACE

Some files were not shown because too many files have changed in this diff Show More