mirror of
https://github.com/XRPLF/rippled.git
synced 2026-02-14 02:42:32 +00:00
Compare commits
6 Commits
legleux/bu
...
a1q123456/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b433b6878 | ||
|
|
4796ed57a4 | ||
|
|
73e5323859 | ||
|
|
3d6c575f5c | ||
|
|
3a172301ce | ||
|
|
6c1a92fe93 |
6
.github/scripts/levelization/README.md
vendored
6
.github/scripts/levelization/README.md
vendored
@@ -70,7 +70,7 @@ that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization](generate.sh) script takes no parameters,
|
||||
The [levelization](generate.py) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
@@ -104,7 +104,7 @@ It generates many files of [results](results):
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
done anything else to improve levelization, run `levelization.sh`,
|
||||
done anything else to improve levelization, run `generate.py`,
|
||||
and commit the updated results.
|
||||
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
@@ -128,7 +128,7 @@ The committed files hide the detailed values intentionally, to
|
||||
prevent false alarms and merging issues, and because it's easy to
|
||||
get those details locally.
|
||||
|
||||
1. Run `levelization.sh`
|
||||
1. Run `generate.py`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||
|
||||
369
.github/scripts/levelization/generate.py
vendored
Normal file
369
.github/scripts/levelization/generate.py
vendored
Normal file
@@ -0,0 +1,369 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Usage: generate.py
|
||||
This script takes no parameters, and can be run from any directory,
|
||||
as long as it is in the expected.
|
||||
location in the repo.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple, Set, Optional
|
||||
|
||||
|
||||
# Compile regex patterns once at module level
|
||||
INCLUDE_PATTERN = re.compile(r"^\s*#include.*/.*\.h")
|
||||
INCLUDE_PATH_PATTERN = re.compile(r'[<"]([^>"]+)[>"]')
|
||||
|
||||
|
||||
def dictionary_sort_key(s: str) -> str:
|
||||
"""
|
||||
Create a sort key that mimics 'sort -d' (dictionary order).
|
||||
Dictionary order only considers blanks and alphanumeric characters.
|
||||
This means punctuation like '.' is ignored during sorting.
|
||||
"""
|
||||
# Keep only alphanumeric characters and spaces
|
||||
return "".join(c for c in s if c.isalnum() or c.isspace())
|
||||
|
||||
|
||||
def get_level(file_path: str) -> str:
|
||||
"""
|
||||
Extract the level from a file path (second and third directory components).
|
||||
Equivalent to bash: cut -d/ -f 2,3
|
||||
|
||||
Examples:
|
||||
src/xrpld/app/main.cpp -> xrpld.app
|
||||
src/libxrpl/protocol/STObject.cpp -> libxrpl.protocol
|
||||
include/xrpl/basics/base_uint.h -> xrpl.basics
|
||||
"""
|
||||
parts = file_path.split("/")
|
||||
|
||||
# Get fields 2 and 3 (indices 1 and 2 in 0-based indexing)
|
||||
if len(parts) >= 3:
|
||||
level = f"{parts[1]}/{parts[2]}"
|
||||
elif len(parts) >= 2:
|
||||
level = f"{parts[1]}/toplevel"
|
||||
else:
|
||||
level = file_path
|
||||
|
||||
# If the "level" indicates a file, cut off the filename
|
||||
if "." in level.split("/")[-1]: # Avoid Path object creation
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
level = level.rsplit("/", 1)[0] + "/toplevel"
|
||||
|
||||
return level.replace("/", ".")
|
||||
|
||||
|
||||
def extract_include_level(include_line: str) -> Optional[str]:
|
||||
"""
|
||||
Extract the include path from an #include directive.
|
||||
Gets the first two directory components from the include path.
|
||||
Equivalent to bash: cut -d/ -f 1,2
|
||||
|
||||
Examples:
|
||||
#include <xrpl/basics/base_uint.h> -> xrpl.basics
|
||||
#include "xrpld/app/main/Application.h" -> xrpld.app
|
||||
"""
|
||||
# Remove everything before the quote or angle bracket
|
||||
match = INCLUDE_PATH_PATTERN.search(include_line)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
include_path = match.group(1)
|
||||
parts = include_path.split("/")
|
||||
|
||||
# Get first two fields (indices 0 and 1)
|
||||
if len(parts) >= 2:
|
||||
include_level = f"{parts[0]}/{parts[1]}"
|
||||
else:
|
||||
include_level = include_path
|
||||
|
||||
# If the "includelevel" indicates a file, cut off the filename
|
||||
if "." in include_level.split("/")[-1]: # Avoid Path object creation
|
||||
include_level = include_level.rsplit("/", 1)[0] + "/toplevel"
|
||||
|
||||
return include_level.replace("/", ".")
|
||||
|
||||
|
||||
def find_repo_root(start_path: Path, depth_limit: int = 10) -> Path:
|
||||
"""
|
||||
Find the repository root by looking for .git directory or src/include folders.
|
||||
Walks up the directory tree from the start path.
|
||||
"""
|
||||
current = start_path.resolve()
|
||||
|
||||
# Walk up the directory tree
|
||||
for _ in range(depth_limit): # Limit search depth to prevent infinite loops
|
||||
# Check if this directory has src or include folders
|
||||
has_src = (current / "src").exists()
|
||||
has_include = (current / "include").exists()
|
||||
|
||||
if has_src or has_include:
|
||||
return current
|
||||
|
||||
# Check if this is a git repository root
|
||||
if (current / ".git").exists():
|
||||
# Check if it has src or include nearby
|
||||
if has_src or has_include:
|
||||
return current
|
||||
|
||||
# Move up one level
|
||||
parent = current.parent
|
||||
if parent == current: # Reached filesystem root
|
||||
break
|
||||
current = parent
|
||||
|
||||
# If we couldn't find it, raise an error
|
||||
raise RuntimeError(
|
||||
"Could not find repository root. "
|
||||
"Expected to find a directory containing 'src' and/or 'include' folders."
|
||||
)
|
||||
|
||||
|
||||
def get_scan_directories(repo_root: Path) -> List[Path]:
|
||||
"""
|
||||
Get the list of directories to scan for include files.
|
||||
Returns paths that actually exist.
|
||||
"""
|
||||
directories = []
|
||||
|
||||
for dir_name in ["include", "src"]:
|
||||
dir_path = repo_root / dir_name
|
||||
if dir_path.exists() and dir_path.is_dir():
|
||||
directories.append(dir_path)
|
||||
|
||||
if not directories:
|
||||
raise RuntimeError(f"No 'src' or 'include' directories found in {repo_root}")
|
||||
|
||||
return directories
|
||||
|
||||
|
||||
def main():
|
||||
# Change to the script's directory
|
||||
script_dir = Path(__file__).parent.resolve()
|
||||
os.chdir(script_dir)
|
||||
|
||||
# If the shell is interactive, clean up any flotsam before analyzing
|
||||
# Match bash behavior: check if PS1 is set (indicates interactive shell)
|
||||
# When running a script, PS1 is not set even if stdin/stdout are TTYs
|
||||
if os.environ.get("PS1"):
|
||||
try:
|
||||
subprocess.run(["git", "clean", "-ix"], check=False, timeout=30)
|
||||
except (subprocess.TimeoutExpired, KeyboardInterrupt):
|
||||
print("Skipping git clean...")
|
||||
except Exception:
|
||||
# If git clean fails for any reason, just continue
|
||||
pass
|
||||
|
||||
# Clean up and create results directory
|
||||
results_dir = script_dir / "results"
|
||||
if results_dir.exists():
|
||||
import shutil
|
||||
|
||||
shutil.rmtree(results_dir)
|
||||
results_dir.mkdir()
|
||||
|
||||
# Find the repository root by searching for src and include directories
|
||||
try:
|
||||
repo_root = find_repo_root(script_dir)
|
||||
scan_dirs = get_scan_directories(repo_root)
|
||||
|
||||
print(f"Found repository root: {repo_root}")
|
||||
print(f"Scanning directories:")
|
||||
for scan_dir in scan_dirs:
|
||||
print(f" - {scan_dir.relative_to(repo_root)}")
|
||||
except RuntimeError as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print("\nScanning for raw includes...")
|
||||
# Find all #include directives
|
||||
raw_includes: List[Tuple[str, str]] = []
|
||||
rawincludes_file = results_dir / "rawincludes.txt"
|
||||
|
||||
# Write to file as we go to avoid storing everything in memory
|
||||
with open(rawincludes_file, "w", buffering=8192) as raw_f:
|
||||
for dir_path in scan_dirs:
|
||||
print(f" Scanning {dir_path.relative_to(repo_root)}...")
|
||||
|
||||
for file_path in dir_path.rglob("*"):
|
||||
if not file_path.is_file():
|
||||
continue
|
||||
|
||||
try:
|
||||
rel_path_str = str(file_path.relative_to(repo_root))
|
||||
|
||||
# Read file with larger buffer for better performance
|
||||
with open(
|
||||
file_path,
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
errors="ignore",
|
||||
buffering=8192,
|
||||
) as f:
|
||||
for line in f:
|
||||
# Quick check before regex
|
||||
if "#include" not in line or "boost" in line:
|
||||
continue
|
||||
|
||||
if INCLUDE_PATTERN.match(line):
|
||||
line_stripped = line.strip()
|
||||
entry = f"{rel_path_str}:{line_stripped}\n"
|
||||
print(entry, end="")
|
||||
raw_f.write(entry)
|
||||
raw_includes.append((rel_path_str, line_stripped))
|
||||
except Exception as e:
|
||||
print(f"Error reading {file_path}: {e}", file=sys.stderr)
|
||||
|
||||
# Build levelization paths and count directly (no need to sort first)
|
||||
print("Build levelization paths")
|
||||
path_counts: Dict[Tuple[str, str], int] = defaultdict(int)
|
||||
|
||||
for file_path, include_line in raw_includes:
|
||||
level = get_level(file_path)
|
||||
include_level = extract_include_level(include_line)
|
||||
|
||||
if include_level and level != include_level:
|
||||
path_counts[(level, include_level)] += 1
|
||||
|
||||
# Sort and deduplicate paths (using dictionary order like bash 'sort -d')
|
||||
print("Sort and deduplicate paths")
|
||||
|
||||
paths_file = results_dir / "paths.txt"
|
||||
with open(paths_file, "w") as f:
|
||||
# Sort using dictionary order: only alphanumeric and spaces matter
|
||||
sorted_items = sorted(
|
||||
path_counts.items(),
|
||||
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
|
||||
)
|
||||
for (level, include_level), count in sorted_items:
|
||||
line = f"{count:7} {level} {include_level}\n"
|
||||
print(line.rstrip())
|
||||
f.write(line)
|
||||
|
||||
# Split into flat-file database
|
||||
print("Split into flat-file database")
|
||||
includes_dir = results_dir / "includes"
|
||||
included_by_dir = results_dir / "included_by"
|
||||
includes_dir.mkdir()
|
||||
included_by_dir.mkdir()
|
||||
|
||||
# Batch writes by grouping data first to avoid repeated file opens
|
||||
includes_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
|
||||
included_by_data: Dict[str, List[Tuple[str, int]]] = defaultdict(list)
|
||||
|
||||
# Process in sorted order to match bash script behavior (dictionary order)
|
||||
sorted_items = sorted(
|
||||
path_counts.items(),
|
||||
key=lambda x: (dictionary_sort_key(x[0][0]), dictionary_sort_key(x[0][1])),
|
||||
)
|
||||
for (level, include_level), count in sorted_items:
|
||||
includes_data[level].append((include_level, count))
|
||||
included_by_data[include_level].append((level, count))
|
||||
|
||||
# Write all includes files in sorted order (dictionary order)
|
||||
for level in sorted(includes_data.keys(), key=dictionary_sort_key):
|
||||
entries = includes_data[level]
|
||||
with open(includes_dir / level, "w") as f:
|
||||
for include_level, count in entries:
|
||||
line = f"{include_level} {count}\n"
|
||||
print(line.rstrip())
|
||||
f.write(line)
|
||||
|
||||
# Write all included_by files in sorted order (dictionary order)
|
||||
for include_level in sorted(included_by_data.keys(), key=dictionary_sort_key):
|
||||
entries = included_by_data[include_level]
|
||||
with open(included_by_dir / include_level, "w") as f:
|
||||
for level, count in entries:
|
||||
line = f"{level} {count}\n"
|
||||
print(line.rstrip())
|
||||
f.write(line)
|
||||
|
||||
# Search for loops
|
||||
print("Search for loops")
|
||||
loops_file = results_dir / "loops.txt"
|
||||
ordering_file = results_dir / "ordering.txt"
|
||||
|
||||
loops_found: Set[Tuple[str, str]] = set()
|
||||
|
||||
# Pre-load all include files into memory to avoid repeated I/O
|
||||
# This is the biggest optimization - we were reading files repeatedly in nested loops
|
||||
# Use list of tuples to preserve file order
|
||||
includes_cache: Dict[str, List[Tuple[str, int]]] = {}
|
||||
includes_lookup: Dict[str, Dict[str, int]] = {} # For fast lookup
|
||||
|
||||
# Note: bash script uses 'for source in *' which uses standard glob sorting,
|
||||
# NOT dictionary order. So we use standard sorted() here, not dictionary_sort_key.
|
||||
for include_file in sorted(includes_dir.iterdir(), key=lambda p: p.name):
|
||||
if not include_file.is_file():
|
||||
continue
|
||||
|
||||
includes_cache[include_file.name] = []
|
||||
includes_lookup[include_file.name] = {}
|
||||
with open(include_file, "r") as f:
|
||||
for line in f:
|
||||
parts = line.strip().split()
|
||||
if len(parts) >= 2:
|
||||
include_name = parts[0]
|
||||
include_count = int(parts[1])
|
||||
includes_cache[include_file.name].append(
|
||||
(include_name, include_count)
|
||||
)
|
||||
includes_lookup[include_file.name][include_name] = include_count
|
||||
|
||||
with open(loops_file, "w", buffering=8192) as loops_f, open(
|
||||
ordering_file, "w", buffering=8192
|
||||
) as ordering_f:
|
||||
|
||||
# Use standard sorting to match bash glob expansion 'for source in *'
|
||||
for source in sorted(includes_cache.keys()):
|
||||
source_includes = includes_cache[source]
|
||||
|
||||
for include, include_freq in source_includes:
|
||||
# Check if include file exists and references source
|
||||
if include not in includes_lookup:
|
||||
continue
|
||||
|
||||
source_freq = includes_lookup[include].get(source)
|
||||
|
||||
if source_freq is not None:
|
||||
# Found a loop
|
||||
loop_key = tuple(sorted([source, include]))
|
||||
if loop_key in loops_found:
|
||||
continue
|
||||
loops_found.add(loop_key)
|
||||
|
||||
loops_f.write(f"Loop: {source} {include}\n")
|
||||
|
||||
# If the counts are close, indicate that the two modules are
|
||||
# on the same level, though they shouldn't be
|
||||
diff = include_freq - source_freq
|
||||
if diff > 3:
|
||||
loops_f.write(f" {source} > {include}\n\n")
|
||||
elif diff < -3:
|
||||
loops_f.write(f" {include} > {source}\n\n")
|
||||
elif source_freq == include_freq:
|
||||
loops_f.write(f" {include} == {source}\n\n")
|
||||
else:
|
||||
loops_f.write(f" {include} ~= {source}\n\n")
|
||||
else:
|
||||
ordering_f.write(f"{source} > {include}\n")
|
||||
|
||||
# Print results
|
||||
print("\nOrdering:")
|
||||
with open(ordering_file, "r") as f:
|
||||
print(f.read(), end="")
|
||||
|
||||
print("\nLoops:")
|
||||
with open(loops_file, "r") as f:
|
||||
print(f.read(), end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
130
.github/scripts/levelization/generate.sh
vendored
130
.github/scripts/levelization/generate.sh
vendored
@@ -1,130 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: generate.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
|
||||
pushd $( dirname $0 )
|
||||
|
||||
if [ -v PS1 ]
|
||||
then
|
||||
# if the shell is interactive, clean up any flotsam before analyzing
|
||||
git clean -ix
|
||||
fi
|
||||
|
||||
# Ensure all sorting is ASCII-order consistently across platforms.
|
||||
export LANG=C
|
||||
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../../..
|
||||
echo Raw includes:
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
popd
|
||||
pushd results
|
||||
|
||||
oldifs=${IFS}
|
||||
IFS=:
|
||||
mkdir includes
|
||||
mkdir included_by
|
||||
echo Build levelization paths
|
||||
exec 3< ${includes} # open rawincludes.txt for input
|
||||
while read -r -u 3 file include
|
||||
do
|
||||
level=$( echo ${file} | cut -d/ -f 2,3 )
|
||||
# If the "level" indicates a file, cut off the filename
|
||||
if [[ "${level##*.}" != "${level}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
level="$( dirname ${level} )/toplevel"
|
||||
fi
|
||||
level=$( echo ${level} | tr '/' '.' )
|
||||
|
||||
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
|
||||
cut -d/ -f 1,2 )
|
||||
if [[ "${includelevel##*.}" != "${includelevel}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
includelevel="$( dirname ${includelevel} )/toplevel"
|
||||
fi
|
||||
includelevel=$( echo ${includelevel} | tr '/' '.' )
|
||||
|
||||
if [[ "$level" != "$includelevel" ]]
|
||||
then
|
||||
echo $level $includelevel | tee -a paths.txt
|
||||
fi
|
||||
done
|
||||
echo Sort and deduplicate paths
|
||||
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
|
||||
mv sortedpaths.txt paths.txt
|
||||
exec 3>&- #close fd 3
|
||||
IFS=${oldifs}
|
||||
unset oldifs
|
||||
|
||||
echo Split into flat-file database
|
||||
exec 4<paths.txt # open paths.txt for input
|
||||
while read -r -u 4 count level include
|
||||
do
|
||||
echo ${include} ${count} | tee -a includes/${level}
|
||||
echo ${level} ${count} | tee -a included_by/${include}
|
||||
done
|
||||
exec 4>&- #close fd 4
|
||||
|
||||
loops="$( pwd )/loops.txt"
|
||||
ordering="$( pwd )/ordering.txt"
|
||||
pushd includes
|
||||
echo Search for loops
|
||||
# Redirect stdout to a file
|
||||
exec 4>&1
|
||||
exec 1>"${loops}"
|
||||
for source in *
|
||||
do
|
||||
if [[ -f "$source" ]]
|
||||
then
|
||||
exec 5<"${source}" # open for input
|
||||
while read -r -u 5 include includefreq
|
||||
do
|
||||
if [[ -f $include ]]
|
||||
then
|
||||
if grep -q -w $source $include
|
||||
then
|
||||
if grep -q -w "Loop: $include $source" "${loops}"
|
||||
then
|
||||
continue
|
||||
fi
|
||||
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
|
||||
echo "Loop: $source $include"
|
||||
# If the counts are close, indicate that the two modules are
|
||||
# on the same level, though they shouldn't be
|
||||
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $source > $include\n"
|
||||
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $include > $source\n"
|
||||
elif [[ $sourcefreq -eq $includefreq ]]
|
||||
then
|
||||
echo -e " $include == $source\n"
|
||||
else
|
||||
echo -e " $include ~= $source\n"
|
||||
fi
|
||||
else
|
||||
echo "$source > $include" >> "${ordering}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
exec 5>&- #close fd 5
|
||||
fi
|
||||
done
|
||||
exec 1>&4 #close fd 1
|
||||
exec 4>&- #close fd 4
|
||||
cat "${ordering}"
|
||||
cat "${loops}"
|
||||
popd
|
||||
popd
|
||||
popd
|
||||
@@ -153,6 +153,7 @@ tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.core > xrpl.basics
|
||||
xrpl.core > xrpl.json
|
||||
xrpl.core > xrpl.ledger
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
xrpl.ledger > xrpl.protocol
|
||||
|
||||
61
.github/scripts/strategy-matrix/generate.py
vendored
61
.github/scripts/strategy-matrix/generate.py
vendored
@@ -51,22 +51,20 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
# - Bookworm using GCC 13: Release on linux/amd64, set the reference
|
||||
# fee to 500.
|
||||
# - Bookworm using GCC 15: Debug on linux/amd64, enable code
|
||||
# coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug on linux/arm64, enable voidstar.
|
||||
# - Bookworm using Clang 17: Release on linux/amd64, set the
|
||||
# reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug on linux/amd64.
|
||||
if os["distro_name"] == "debian":
|
||||
skip = True
|
||||
if os["distro_version"] == "bookworm":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
|
||||
@@ -74,14 +72,12 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
cmake_args = f"-Dvoidstar=ON {cmake_args}"
|
||||
@@ -89,7 +85,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
|
||||
@@ -97,7 +92,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
@@ -105,15 +99,14 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
# - 9 using GCC 12: Debug on linux/amd64.
|
||||
# - 10 using Clang: Release on linux/amd64.
|
||||
if os["distro_name"] == "rhel":
|
||||
skip = True
|
||||
if os["distro_version"] == "9":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
@@ -121,7 +114,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
@@ -129,17 +121,16 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
# - Jammy using GCC 12: Debug on linux/arm64.
|
||||
# - Noble using GCC 14: Release on linux/amd64.
|
||||
# - Noble using Clang 18: Debug on linux/amd64.
|
||||
# - Noble using Clang 19: Release on linux/arm64.
|
||||
if os["distro_name"] == "ubuntu":
|
||||
skip = True
|
||||
if os["distro_version"] == "jammy":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
@@ -147,21 +138,18 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
@@ -169,20 +157,16 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
# - Debug on macos/arm64.
|
||||
if os["distro_name"] == "macos" and not (
|
||||
build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "macos/arm64"
|
||||
build_type == "Debug" and architecture["platform"] == "macos/arm64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
# - Release on windows/amd64.
|
||||
if os["distro_name"] == "windows" and not (
|
||||
build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "windows/amd64"
|
||||
build_type == "Release" and architecture["platform"] == "windows/amd64"
|
||||
):
|
||||
continue
|
||||
|
||||
@@ -209,18 +193,17 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
):
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug on
|
||||
# linux/amd64
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
# or debian-bookworm-gcc-12-amd64-release.
|
||||
config_name = os["distro_name"]
|
||||
if (n := os["distro_version"]) != "":
|
||||
config_name += f"-{n}"
|
||||
@@ -234,8 +217,6 @@ def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
config_name += f"-{build_type.lower()}"
|
||||
if "-Dcoverage=ON" in cmake_args:
|
||||
config_name += "-coverage"
|
||||
if "-Dunity=ON" in cmake_args:
|
||||
config_name += "-unity"
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
|
||||
187
.github/scripts/strategy-matrix/linux.json
vendored
187
.github/scripts/strategy-matrix/linux.json
vendored
@@ -17,13 +17,196 @@
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "21",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "ab4d1f0"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "ab4d1f0"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
"cmake_args": [""]
|
||||
}
|
||||
|
||||
5
.github/scripts/strategy-matrix/macos.json
vendored
5
.github/scripts/strategy-matrix/macos.json
vendored
@@ -15,8 +15,5 @@
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
"cmake_args": ["-DCMAKE_POLICY_VERSION_MINIMUM=3.5"]
|
||||
}
|
||||
|
||||
2
.github/scripts/strategy-matrix/windows.json
vendored
2
.github/scripts/strategy-matrix/windows.json
vendored
@@ -15,5 +15,5 @@
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
"cmake_args": [""]
|
||||
}
|
||||
|
||||
102
.github/workflows/on-pr.yml
vendored
102
.github/workflows/on-pr.yml
vendored
@@ -1,14 +1,11 @@
|
||||
# This workflow runs all workflows to check, build, package and test the project on
|
||||
# various Linux flavors, as well as on macOS and Windows, on every push to a
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label. For commits to PRs that target a release branch,
|
||||
# it also uploads the libxrpl recipe to the Conan remote.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- legleux/build
|
||||
merge_group:
|
||||
types:
|
||||
- checks_requested
|
||||
@@ -68,9 +65,6 @@ jobs:
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-build-pkg.yml
|
||||
.github/workflows/reusable-pkg.yml
|
||||
.github/workflows/reusable-package.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.github/workflows/reusable-upload-recipe.yml
|
||||
@@ -79,7 +73,6 @@ jobs:
|
||||
conan/**
|
||||
external/**
|
||||
include/**
|
||||
pkgs/**
|
||||
src/**
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
@@ -104,57 +97,68 @@ jobs:
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
# check-levelization:
|
||||
# needs: should-run
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
# uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
# build-test:
|
||||
# needs: should-run
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
# uses: ./.github/workflows/reusable-build-test.yml
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [linux, macos, windows]
|
||||
# with:
|
||||
# os: ${{ matrix.os }}
|
||||
# secrets:
|
||||
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-package:
|
||||
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-pkg.yml
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
check-rename:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-rename.yml
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# pkg_type: [rpm]
|
||||
pkg_type: [deb, rpm]
|
||||
arch: [amd64]
|
||||
# arch: [amd64, arm64]
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
pkg_type: ${{ matrix.pkg_type }}
|
||||
arch: ${{ matrix.arch }}
|
||||
# Enable ccache only for events targeting the XRPLF repository, since
|
||||
# other accounts will not have access to our remote cache storage.
|
||||
ccache_enabled: ${{ github.repository_owner == 'XRPLF' }}
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
# notify-clio:
|
||||
# needs:
|
||||
# - should-run
|
||||
# - build-test
|
||||
# if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
# uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
# secrets:
|
||||
# clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
upload-recipe:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
# Only run when committing to a PR that targets a release branch in the
|
||||
# XRPLF repository.
|
||||
if: ${{ github.repository_owner == 'XRPLF' && needs.should-run.outputs.go == 'true' && startsWith(github.ref, 'refs/heads/release') }}
|
||||
uses: ./.github/workflows/reusable-upload-recipe.yml
|
||||
secrets:
|
||||
remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
notify-clio:
|
||||
needs: upload-recipe
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Notify the Clio repository about the newly proposed release version, so
|
||||
# it can be checked for compatibility before the release is actually made.
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[ref]=${{ needs.upload-recipe.outputs.recipe_ref }}" \
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
|
||||
passed:
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
# - build-test
|
||||
# - check-levelization
|
||||
- build-package
|
||||
- check-levelization
|
||||
- check-rename
|
||||
- build-test
|
||||
- upload-recipe
|
||||
- notify-clio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
|
||||
97
.github/workflows/on-trigger.new.yml
vendored
97
.github/workflows/on-trigger.new.yml
vendored
@@ -1,97 +0,0 @@
|
||||
# This workflow runs all workflows to build and test the code on various Linux
|
||||
# flavors, as well as on MacOS and Windows, on a scheduled basis, on merge into
|
||||
# the 'develop' or 'release*' branches, or when requested manually. Upon pushes
|
||||
# to the develop branch it also uploads the libxrpl recipe to the Conan remote.
|
||||
name: Trigger
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- legleux/linux_packages
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/generate-version/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-build-pkg.yml"
|
||||
- ".github/workflows/reusable-pkg.yml"
|
||||
- ".github/workflows/reusable-package.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".github/workflows/reusable-upload-recipe.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "pkgs/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
- "conan.lock"
|
||||
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
# all dependencies can be built successfully. Only the dependencies that
|
||||
# are actually missing from the remote will be uploaded.
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# check-missing-commits:
|
||||
# if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
# uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||
|
||||
# build-test:
|
||||
# uses: ./.github/workflows/reusable-build-test.yml
|
||||
# strategy:
|
||||
# matrix:
|
||||
# os: [linux, macos, windows]
|
||||
# with:
|
||||
# os: ${{ matrix.os }}
|
||||
# strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
# secrets:
|
||||
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-package:
|
||||
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
|
||||
uses: ./.github/workflows/reusable-build-pkg.yml
|
||||
secrets: inherit
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
# pkg_type: [rpm]
|
||||
pkg_type: [deb, rpm]
|
||||
arch: [amd64]
|
||||
# arch: [amd64, arm64]
|
||||
with:
|
||||
pkg_type: ${{ matrix.pkg_type }}
|
||||
arch: ${{ matrix.arch }}
|
||||
20
.github/workflows/on-trigger.yml
vendored
20
.github/workflows/on-trigger.yml
vendored
@@ -9,7 +9,6 @@ on:
|
||||
branches:
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "linux_packages_squashed"
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
@@ -23,9 +22,6 @@ on:
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-build-pkg.yml"
|
||||
- ".github/workflows/reusable-pkg.yml"
|
||||
- ".github/workflows/reusable-package.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".github/workflows/reusable-upload-recipe.yml"
|
||||
@@ -34,7 +30,6 @@ on:
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "pkgs/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
@@ -83,21 +78,6 @@ jobs:
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-package:
|
||||
name: Build ${{ matrix.pkg_type }} ${{ matrix.arch }} packages
|
||||
uses: ./.github/workflows/reusable-build-pkg.yml
|
||||
secrets: inherit
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
# pkg_type: [rpm]
|
||||
pkg_type: [deb, rpm]
|
||||
arch: [amd64]
|
||||
# arch: [amd64, arm64]
|
||||
with:
|
||||
pkg_type: ${{ matrix.pkg_type }}
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
upload-recipe:
|
||||
needs: build-test
|
||||
# Only run when pushing to the develop branch in the XRPLF repository.
|
||||
|
||||
34
.github/workflows/package-test.yml
vendored
34
.github/workflows/package-test.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Test rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
pkg_type:
|
||||
description: "Whether to run unit tests"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
arch:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
test:
|
||||
name: Test ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- { pkg: rpm, distro: "rocky:9" }
|
||||
- { pkg: deb, distro: "ubuntu:jammy" }
|
||||
- { pkg: deb, distro: "debian:trixie" }
|
||||
runs-on: ubuntu-latest
|
||||
container: ${{ matrix.distro }}
|
||||
steps:
|
||||
- name: run unittests
|
||||
run: |
|
||||
ls -lh
|
||||
# - name: Download rippled artifact
|
||||
# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
# with:
|
||||
# name: rippled-${{ inputs.config_name }}
|
||||
148
.github/workflows/reusable-build-pkg.yml
vendored
148
.github/workflows/reusable-build-pkg.yml
vendored
@@ -1,148 +0,0 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
pkg_type:
|
||||
required: false
|
||||
type: string
|
||||
arch:
|
||||
required: false
|
||||
type: string
|
||||
# secrets:
|
||||
# GPG_KEY_B64:
|
||||
# description: "The gpg key to sign packages."
|
||||
# required: true
|
||||
# GPG_KEY_PASS_B64:
|
||||
# description: "The gpg key passphrase."
|
||||
# required: true
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.pkg_type }} ${{ inputs.arch }} package
|
||||
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
|
||||
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Build packages
|
||||
run: |
|
||||
./pkgs/build.sh
|
||||
cat build_vars >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
path: |
|
||||
*.deb
|
||||
*.ddeb
|
||||
if-no-files-found: error
|
||||
if: inputs.pkg_type == 'deb'
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
|
||||
if-no-files-found: error
|
||||
if: inputs.pkg_type == 'rpm'
|
||||
|
||||
test:
|
||||
name: Test ${{ inputs.pkg_type }} ${{ inputs.arch }} package
|
||||
needs: build
|
||||
runs-on: heavy${{ inputs.arch == 'arm64' && '-arm64' || '' }}
|
||||
container: ghcr.io/xrplf/ci/${{ inputs.pkg_type == 'rpm' && 'rhel-9' || 'ubuntu-jammy' }}:gcc-12
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
- name: Running tests
|
||||
run: echo "Running tests..."
|
||||
|
||||
sign:
|
||||
name: Sign ${{ inputs.pkg_type }} ${{ inputs.arch }} package
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/astral-sh/uv:python3.13-bookworm-slim
|
||||
|
||||
steps:
|
||||
- name: Install gpg & rpm
|
||||
run: apt-get update && apt-get install -y gpg rpm
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
|
||||
- name: Sign
|
||||
env:
|
||||
PYTHONUNBUFFERED: 1
|
||||
GPG_KEY_B64: ${{ secrets.GPG_KEY_B64 }}
|
||||
GPG_KEY_PASS_B64: ${{ secrets.GPG_KEY_PASS_B64 }}
|
||||
run: |
|
||||
if [ "${{ inputs.pkg_type }}" = "rpm" ]; then
|
||||
for i in $(find . -maxdepth 1 -type f -name "rippled-[0-9]*.rpm"); do
|
||||
echo "found $i"
|
||||
./pkgs/sign_packages.py "$i"
|
||||
done
|
||||
elif [ "${{ inputs.pkg_type }}" = "deb" ]; then
|
||||
for i in $(find . -maxdepth 1 -type f -name "rippled_*.deb"); do
|
||||
echo "found $i"
|
||||
./pkgs/sign_packages.py "$i"
|
||||
done
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
path: |
|
||||
*.deb
|
||||
*.ddeb
|
||||
if-no-files-found: error
|
||||
if: inputs.pkg_type == 'deb'
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signed-rippled-${{ inputs.pkg_type }}-${{ inputs.arch }}
|
||||
path: "*${{ inputs.arch }}.${{ inputs.pkg_type }}"
|
||||
if-no-files-found: error
|
||||
if: inputs.pkg_type == 'rpm'
|
||||
|
||||
docker:
|
||||
name: Build Docker image
|
||||
if: inputs.pkg_type == 'deb'
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: deb-${{ inputs.arch }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: docker/metadata-action@v5
|
||||
id: meta
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/rippled
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=sha
|
||||
|
||||
- uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: pkgs/docker/Dockerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check levelization
|
||||
run: .github/scripts/levelization/generate.sh
|
||||
run: python .github/scripts/levelization/generate.py
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||
Run '.github/scripts/levelization/generate.py' in your repo, commit
|
||||
and push the changes. See .github/scripts/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
|
||||
69
.github/workflows/reusable-package.yml
vendored
69
.github/workflows/reusable-package.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: Package rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: false
|
||||
type: string
|
||||
default: 'Release'
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: false
|
||||
type: string
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: false
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: false
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Package ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Build packages
|
||||
run: |
|
||||
export BUILD_TYPE=${{ inputs.build_type }}
|
||||
export CMAKE_ARGS=${{ inputs.cmake_args }}
|
||||
export CMAKE_TARGETS=${{ inputs.cmake_target }}
|
||||
|
||||
./pkgs/build.sh
|
||||
|
||||
{
|
||||
echo "<table>"
|
||||
while IFS='=' read -r k v; do
|
||||
printf '<tr><td>%s</td><td align="right"><code>%s</code></td></tr>\n' "$k" "$v"
|
||||
done < build_vars
|
||||
echo "</table>"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.config_name }}
|
||||
path: '**/*.{deb,rpm}'
|
||||
if-no-files-found: error
|
||||
41
.github/workflows/reusable-pkg.yml
vendored
41
.github/workflows/reusable-pkg.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Package
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: false
|
||||
type: string
|
||||
default: linux
|
||||
strategy_matrix_subset:
|
||||
description: 'The strategy matrix to use for generating a subset of configurations.'
|
||||
required: false
|
||||
type: string
|
||||
default: "package"
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix_subset: ${{ inputs.strategy_matrix_subset }}
|
||||
|
||||
package:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-package.yml
|
||||
strategy:
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
with:
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-5dd7158', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
@@ -13,10 +13,6 @@ on:
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
strategy_matrix_subset:
|
||||
description: 'The strategy matrix to use for generating a subset of configs).'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
matrix:
|
||||
description: "The generated strategy matrix."
|
||||
@@ -46,5 +42,4 @@ jobs:
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
GENERATE_SUBSET: ${{ inputs.strategy_matrix_subset != '' && format('--{0}', inputs.strategy_matrix_subset) || '' }}
|
||||
run: ./generate.py ${{ env.GENERATE_SUBSET }} ${{ env.GENERATE_OPTION }} ${{ env.GENERATE_CONFIG }} >> "${GITHUB_OUTPUT}"
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -71,3 +71,6 @@ DerivedData
|
||||
/.augment
|
||||
/.claude
|
||||
/CLAUDE.md
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
|
||||
37
BUILD.md
37
BUILD.md
@@ -368,6 +368,36 @@ The workaround for this error is to add two lines to your profile:
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
```
|
||||
|
||||
### Set Up Ccache
|
||||
|
||||
To speed up repeated compilations, we recommend that you install
|
||||
[ccache](https://ccache.dev), a tool that wraps your compiler so that it can
|
||||
cache build objects locally.
|
||||
|
||||
#### Linux
|
||||
|
||||
You can install it using the package manager, e.g. `sudo apt install ccache`
|
||||
(Ubuntu) or `sudo dnf install ccache` (RHEL).
|
||||
|
||||
#### macOS
|
||||
|
||||
You can install it using Homebrew, i.e. `brew install ccache`.
|
||||
|
||||
#### Windows
|
||||
|
||||
You can install it using Chocolatey, i.e. `choco install ccache`. If you already
|
||||
have Ccache installed, then `choco upgrade ccache` will update it to the latest
|
||||
version. However, if you see an error such as:
|
||||
|
||||
```
|
||||
terminate called after throwing an instance of 'std::bad_alloc'
|
||||
what(): std::bad_alloc
|
||||
C:\Program Files\Microsoft Visual Studio\2022\Community\MSBuild\Microsoft\VC\v170\Microsoft.CppCommon.targets(617,5): error MSB6006: "cl.exe" exited with code 3.
|
||||
```
|
||||
|
||||
then please install a specific version of Ccache that we know works, via: `choco
|
||||
install ccache --version 4.11.3 --allow-downgrade`.
|
||||
|
||||
### Build and Test
|
||||
|
||||
1. Create a build directory and move into it.
|
||||
@@ -545,16 +575,10 @@ See [Sanitizers docs](./docs/build/sanitizers.md) for more details.
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Conan
|
||||
@@ -621,7 +645,6 @@ If you want to experiment with a new package, follow these steps:
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
|
||||
@@ -9,8 +9,5 @@ function (xrpl_add_test name)
|
||||
|
||||
isolate_headers(${target} "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests/${name}" PRIVATE)
|
||||
|
||||
# Make sure the test isn't optimized away in unity builds
|
||||
set_target_properties(${target} PROPERTIES UNITY_BUILD_MODE GROUP UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
endfunction ()
|
||||
|
||||
@@ -4,12 +4,7 @@
|
||||
|
||||
include(target_protobuf_sources)
|
||||
|
||||
# Protocol buffers cannot participate in a unity build,
|
||||
# because all the generated sources
|
||||
# define a bunch of `static const` variables with the same names,
|
||||
# so we just build them as a separate library.
|
||||
add_library(xrpl.libpb)
|
||||
set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto LANGUAGE cpp IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/xrpl.proto)
|
||||
|
||||
@@ -160,12 +155,4 @@ if (xrpld)
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(xrpld PRIVATE ${CMAKE_SOURCE_DIR}/external/antithesis-sdk)
|
||||
endif ()
|
||||
|
||||
# any files that don't play well with unity should be added here
|
||||
if (tests)
|
||||
set_source_files_properties(
|
||||
# these two seem to produce conflicts in beast teardown template methods
|
||||
src/test/rpc/ValidatorRPC_test.cpp src/test/ledger/Invariants_test.cpp PROPERTIES SKIP_UNITY_BUILD_INCLUSION
|
||||
TRUE)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@@ -30,14 +30,6 @@ if (tests)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake." OFF)
|
||||
if (unity)
|
||||
if (NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif ()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif ()
|
||||
|
||||
if (is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif ()
|
||||
|
||||
@@ -23,7 +23,6 @@ class Xrpl(ConanFile):
|
||||
"shared": [True, False],
|
||||
"static": [True, False],
|
||||
"tests": [True, False],
|
||||
"unity": [True, False],
|
||||
"xrpld": [True, False],
|
||||
}
|
||||
|
||||
@@ -55,7 +54,6 @@ class Xrpl(ConanFile):
|
||||
"shared": False,
|
||||
"static": True,
|
||||
"tests": False,
|
||||
"unity": False,
|
||||
"xrpld": False,
|
||||
"date/*:header_only": True,
|
||||
"ed25519/*:shared": False,
|
||||
@@ -168,7 +166,6 @@ class Xrpl(ConanFile):
|
||||
tc.variables["rocksdb"] = self.options.rocksdb
|
||||
tc.variables["BUILD_SHARED_LIBS"] = self.options.shared
|
||||
tc.variables["static"] = self.options.static
|
||||
tc.variables["unity"] = self.options.unity
|
||||
tc.variables["xrpld"] = self.options.xrpld
|
||||
tc.generate()
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#ifndef XRPL_UNITY_ROCKSDB_H_INCLUDED
|
||||
#define XRPL_UNITY_ROCKSDB_H_INCLUDED
|
||||
#ifndef XRPL_BASICS_ROCKSDB_H_INCLUDED
|
||||
#define XRPL_BASICS_ROCKSDB_H_INCLUDED
|
||||
|
||||
#if XRPL_ROCKSDB_AVAILABLE
|
||||
// #include <rocksdb2/port/port_posix.h>
|
||||
|
||||
202
include/xrpl/core/ServiceRegistry.h
Normal file
202
include/xrpl/core/ServiceRegistry.h
Normal file
@@ -0,0 +1,202 @@
|
||||
#ifndef XRPL_CORE_SERVICEREGISTRY_H_INCLUDED
|
||||
#define XRPL_CORE_SERVICEREGISTRY_H_INCLUDED
|
||||
|
||||
#include <xrpl/basics/Blob.h>
|
||||
#include <xrpl/basics/SHAMapHash.h>
|
||||
#include <xrpl/basics/TaggedCache.h>
|
||||
#include <xrpl/ledger/CachedSLEs.h>
|
||||
|
||||
namespace xrpl {
|
||||
|
||||
// Forward declarations
|
||||
namespace NodeStore {
|
||||
class Database;
|
||||
}
|
||||
namespace Resource {
|
||||
class Manager;
|
||||
}
|
||||
namespace perf {
|
||||
class PerfLog;
|
||||
}
|
||||
|
||||
class AcceptedLedger;
|
||||
class AmendmentTable;
|
||||
class Cluster;
|
||||
class CollectorManager;
|
||||
class DatabaseCon;
|
||||
class Family;
|
||||
class HashRouter;
|
||||
class InboundLedgers;
|
||||
class InboundTransactions;
|
||||
class JobQueue;
|
||||
class LedgerCleaner;
|
||||
class LedgerMaster;
|
||||
class LedgerReplayer;
|
||||
class LoadFeeTrack;
|
||||
class LoadManager;
|
||||
class ManifestCache;
|
||||
class NetworkOPs;
|
||||
class OpenLedger;
|
||||
class OrderBookDB;
|
||||
class Overlay;
|
||||
class PathRequests;
|
||||
class PeerReservationTable;
|
||||
class PendingSaves;
|
||||
class RelationalDatabase;
|
||||
class ServerHandler;
|
||||
class SHAMapStore;
|
||||
class TimeKeeper;
|
||||
class TransactionMaster;
|
||||
class TxQ;
|
||||
class ValidatorList;
|
||||
class ValidatorSite;
|
||||
|
||||
template <class Adaptor>
|
||||
class Validations;
|
||||
class RCLValidationsAdaptor;
|
||||
using RCLValidations = Validations<RCLValidationsAdaptor>;
|
||||
|
||||
using NodeCache = TaggedCache<SHAMapHash, Blob>;
|
||||
|
||||
/** Service registry for dependency injection.
|
||||
|
||||
This abstract interface provides access to various services and components
|
||||
used throughout the application. It separates the service locator pattern
|
||||
from the Application lifecycle management.
|
||||
|
||||
Components that need access to services can hold a reference to
|
||||
ServiceRegistry rather than Application when they only need service
|
||||
access and not lifecycle management.
|
||||
|
||||
*/
|
||||
class ServiceRegistry
|
||||
{
|
||||
public:
|
||||
ServiceRegistry() = default;
|
||||
virtual ~ServiceRegistry() = default;
|
||||
|
||||
// Core infrastructure services
|
||||
virtual CollectorManager&
|
||||
getCollectorManager() = 0;
|
||||
|
||||
virtual Family&
|
||||
getNodeFamily() = 0;
|
||||
|
||||
virtual TimeKeeper&
|
||||
timeKeeper() = 0;
|
||||
|
||||
virtual JobQueue&
|
||||
getJobQueue() = 0;
|
||||
|
||||
virtual NodeCache&
|
||||
getTempNodeCache() = 0;
|
||||
|
||||
virtual CachedSLEs&
|
||||
cachedSLEs() = 0;
|
||||
|
||||
// Protocol and validation services
|
||||
virtual AmendmentTable&
|
||||
getAmendmentTable() = 0;
|
||||
|
||||
virtual HashRouter&
|
||||
getHashRouter() = 0;
|
||||
|
||||
virtual LoadFeeTrack&
|
||||
getFeeTrack() = 0;
|
||||
|
||||
virtual LoadManager&
|
||||
getLoadManager() = 0;
|
||||
|
||||
virtual RCLValidations&
|
||||
getValidations() = 0;
|
||||
|
||||
virtual ValidatorList&
|
||||
validators() = 0;
|
||||
|
||||
virtual ValidatorSite&
|
||||
validatorSites() = 0;
|
||||
|
||||
virtual ManifestCache&
|
||||
validatorManifests() = 0;
|
||||
|
||||
virtual ManifestCache&
|
||||
publisherManifests() = 0;
|
||||
|
||||
// Network services
|
||||
virtual Overlay&
|
||||
overlay() = 0;
|
||||
|
||||
virtual Cluster&
|
||||
cluster() = 0;
|
||||
|
||||
virtual PeerReservationTable&
|
||||
peerReservations() = 0;
|
||||
|
||||
virtual Resource::Manager&
|
||||
getResourceManager() = 0;
|
||||
|
||||
// Storage services
|
||||
virtual NodeStore::Database&
|
||||
getNodeStore() = 0;
|
||||
|
||||
virtual SHAMapStore&
|
||||
getSHAMapStore() = 0;
|
||||
|
||||
virtual RelationalDatabase&
|
||||
getRelationalDatabase() = 0;
|
||||
|
||||
// Ledger services
|
||||
virtual InboundLedgers&
|
||||
getInboundLedgers() = 0;
|
||||
|
||||
virtual InboundTransactions&
|
||||
getInboundTransactions() = 0;
|
||||
|
||||
virtual TaggedCache<uint256, AcceptedLedger>&
|
||||
getAcceptedLedgerCache() = 0;
|
||||
|
||||
virtual LedgerMaster&
|
||||
getLedgerMaster() = 0;
|
||||
|
||||
virtual LedgerCleaner&
|
||||
getLedgerCleaner() = 0;
|
||||
|
||||
virtual LedgerReplayer&
|
||||
getLedgerReplayer() = 0;
|
||||
|
||||
virtual PendingSaves&
|
||||
pendingSaves() = 0;
|
||||
|
||||
virtual OpenLedger&
|
||||
openLedger() = 0;
|
||||
|
||||
virtual OpenLedger const&
|
||||
openLedger() const = 0;
|
||||
|
||||
// Transaction and operation services
|
||||
virtual NetworkOPs&
|
||||
getOPs() = 0;
|
||||
|
||||
virtual OrderBookDB&
|
||||
getOrderBookDB() = 0;
|
||||
|
||||
virtual TransactionMaster&
|
||||
getMasterTransaction() = 0;
|
||||
|
||||
virtual TxQ&
|
||||
getTxQ() = 0;
|
||||
|
||||
virtual PathRequests&
|
||||
getPathRequests() = 0;
|
||||
|
||||
// Server services
|
||||
virtual ServerHandler&
|
||||
getServerHandler() = 0;
|
||||
|
||||
virtual perf::PerfLog&
|
||||
getPerfLog() = 0;
|
||||
};
|
||||
|
||||
} // namespace xrpl
|
||||
|
||||
#endif
|
||||
203
pkgs/build.sh
203
pkgs/build.sh
@@ -1,203 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o xtrace
|
||||
|
||||
. /etc/os-release
|
||||
|
||||
case "$ID $ID_LIKE" in
|
||||
*rhel*|*fedora*)
|
||||
# dnf -y module install "nodejs:20/common"
|
||||
PKG="rpm"
|
||||
;;
|
||||
*debian*|*ubuntu*)
|
||||
# curl -fsSL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh
|
||||
# chmod +x nodesource_setup.sh
|
||||
# ./nodesource_setup.sh
|
||||
# apt-get install -y nodejs
|
||||
PKG="deb"
|
||||
;;
|
||||
esac
|
||||
|
||||
# build_dir="/root/build/${PKG}/packages"
|
||||
# ./pkgs/build_rippled.${PKG}.sh
|
||||
|
||||
# echo "my build_vars" > build_vars
|
||||
# exit 0
|
||||
# if [ 1 -eq 0 ]; then
|
||||
repo_dir=$PWD
|
||||
set -a
|
||||
repo_name="rippled"
|
||||
pkgs_dir="${repo_dir}/pkgs"
|
||||
shared_files="${pkgs_dir}/shared"
|
||||
pkg_files="${pkgs_dir}/packaging/${PKG}"
|
||||
build_info_src="${repo_dir}/src/libxrpl/protocol/BuildInfo.cpp"
|
||||
xrpl_version=$(grep -E -i -o "\b(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-[0-9a-z\-]+(\.[0-9a-z\-]+)*)?(\+[0-9a-z\-]+(\.[0-9a-z\-]+)*)?\b" "${build_info_src}")
|
||||
|
||||
git config --global --add safe.directory '*'
|
||||
branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
commit=$(git rev-parse HEAD)
|
||||
short_commit=$(git rev-parse --short=7 HEAD)
|
||||
date=$(git show -s --format=%cd --date=format-local:"%Y%m%d%H%M%S")
|
||||
|
||||
conan_remote_name="${CONAN_REMOTE_NAME:-xrplf}"
|
||||
conan_remote_url="${CONAN_REMOTE_URL:-https://conan.ripplex.io}"
|
||||
|
||||
BUILD_TYPE=${BUILD_TYPE:-Release}
|
||||
set +a
|
||||
|
||||
if [ "${branch}" = 'develop' ]; then
|
||||
# TODO: Can remove when CMake sets version
|
||||
dev_version="${date}~${short_commit}"
|
||||
xrpl_version="${xrpl_version}+${dev_version}"
|
||||
fi
|
||||
|
||||
if [ "${PKG}" = 'rpm' ]; then
|
||||
IFS='-' read -r RIPPLED_RPM_VERSION RELEASE <<< "${xrpl_version}"
|
||||
export RIPPLED_RPM_VERSION
|
||||
RPM_RELEASE=${RPM_RELEASE-1}
|
||||
# post-release version
|
||||
if [ "hf" = "$(echo "$RELEASE" | cut -c -2)" ]; then
|
||||
RPM_RELEASE="${RPM_RELEASE}.${RELEASE}"
|
||||
# pre-release version (-b or -rc)
|
||||
elif [[ $RELEASE ]]; then
|
||||
RPM_RELEASE="0.${RPM_RELEASE}.${RELEASE}"
|
||||
fi
|
||||
export RPM_RELEASE
|
||||
|
||||
if [[ $RPM_PATCH ]]; then
|
||||
RPM_PATCH=".${RPM_PATCH}"
|
||||
export RPM_PATCH
|
||||
fi
|
||||
|
||||
build_dir="build/${PKG}/packages"
|
||||
rm -rf ${build_dir}
|
||||
mkdir -p ${build_dir}/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
git archive \
|
||||
--remote "${repo_dir}" HEAD \
|
||||
--prefix ${repo_name}/ \
|
||||
--format tar.gz \
|
||||
--output ${build_dir}/rpmbuild/SOURCES/rippled.tar.gz
|
||||
ln --symbolic "${repo_dir}" ${build_dir}/rippled
|
||||
cp -r "${pkgs_dir}/packaging/rpm/rippled.spec" ${build_dir}
|
||||
pushd "${build_dir}" || exit
|
||||
|
||||
rpmbuild \
|
||||
--define "_topdir ${PWD}/rpmbuild" \
|
||||
--define "_smp_build_ncpus %(nproc --ignore=2 2>/dev/null || echo 1)" \
|
||||
-ba rippled.spec
|
||||
|
||||
RPM_VERSION_RELEASE=$(rpm -qp --qf='%{NAME}-%{VERSION}-%{RELEASE}' ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm)
|
||||
tar_file=$RPM_VERSION_RELEASE.tar.gz
|
||||
|
||||
printf '%s\n' \
|
||||
"rpm_md5sum=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm 2>/dev/null)" \
|
||||
"rpm_sha256=$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm | awk '{ print $1 }')" \
|
||||
"rippled_version=${xrpl_version}" \
|
||||
"rippled_git_hash=${commit}" \
|
||||
"rpm_version=${RIPPLED_RPM_VERSION}" \
|
||||
"rpm_file_name=${tar_file}" \
|
||||
"rpm_version_release=${RPM_VERSION_RELEASE}" \
|
||||
> build_vars
|
||||
|
||||
# Rename the files to match the debs
|
||||
mv rpmbuild/RPMS/x86_64/* .
|
||||
for f in *x86_64.rpm; do
|
||||
new="${f/x86_64/amd64}"
|
||||
mv "$f" "$new"
|
||||
echo "Renamed $f -> $new"
|
||||
done
|
||||
rm -rf rpmbuild
|
||||
rm -f rippled rippled.tar.gz rippled.spec
|
||||
pushd -0 && dirs -c
|
||||
|
||||
mv "${build_dir}/build_vars" .
|
||||
|
||||
elif [ "${PKG}" = 'deb' ]; then
|
||||
dpkg_version=$(echo "${xrpl_version}" | sed 's:-:~:g')
|
||||
full_version="${dpkg_version}"
|
||||
build_dir="build/${PKG}/packages"
|
||||
rm -rf ${build_dir}
|
||||
mkdir -p ${build_dir}
|
||||
|
||||
git archive \
|
||||
--remote "${repo_dir}" HEAD \
|
||||
--prefix ${repo_name}/ \
|
||||
--format tar.gz \
|
||||
--output "${build_dir}/${repo_name}_${dpkg_version}.orig.tar.gz"
|
||||
|
||||
pushd ${build_dir} || exit
|
||||
tar -zxf "${repo_name}_${dpkg_version}.orig.tar.gz"
|
||||
|
||||
pushd ${repo_name} || exit
|
||||
|
||||
# Prepare the package metadata directory, `debian/`, within `rippled/`.
|
||||
cp -r "${pkg_files}/debian" .
|
||||
cp "${shared_files}/rippled.service" debian/
|
||||
cp "${shared_files}/update-rippled.sh" .
|
||||
cp "${shared_files}/update-rippled-cron" .
|
||||
cp "${shared_files}/rippled-logrotate" .
|
||||
|
||||
if [ "${branch}" = 'develop' ]; then
|
||||
# TODO: Can remove when CMake sets version
|
||||
sed --in-place "s/versionString = \"\([^\"]*\)\"/versionString = \"${xrpl_version}\"/" "${build_info_src}"
|
||||
fi
|
||||
|
||||
cat << CHANGELOG > ./debian/changelog
|
||||
rippled (${xrpl_version}) unstable; urgency=low
|
||||
|
||||
* see RELEASENOTES
|
||||
|
||||
-- Ripple Labs Inc. <support@ripple.com> $(TZ=UTC date -R)
|
||||
CHANGELOG
|
||||
|
||||
cat ./debian/changelog
|
||||
dpkg-buildpackage -b -d -us -uc
|
||||
|
||||
popd || exit
|
||||
rm -rf ${repo_name}
|
||||
# for f in *.ddeb; do mv -- "$f" "${f%.ddeb}.deb"; done
|
||||
popd || exit
|
||||
cp ${build_dir}/${repo_name}_${xrpl_version}_amd64.changes .
|
||||
|
||||
awk '/Checksums-Sha256:/{hit=1;next}/Files:/{hit=0}hit' ${repo_name}_${xrpl_version}_amd64.changes | sed -E 's!^[[:space:]]+!!' > shasums
|
||||
sha() {
|
||||
<shasums awk "/$1/ { print \$1 }"
|
||||
}
|
||||
|
||||
printf '%s\n' \
|
||||
"deb_sha256=$(sha "rippled_${full_version}_amd64.deb")" \
|
||||
"dbg_sha256=$(sha "rippled-dbgsym_${full_version}_amd64")" \
|
||||
"rippled_version=${xrpl_version}" \
|
||||
"rippled_git_hash=${commit}" \
|
||||
"dpkg_version=${dpkg_version}" \
|
||||
"dpkg_full_version=${full_version}" \
|
||||
> build_vars
|
||||
|
||||
pushd -0 && dirs -c
|
||||
fi
|
||||
|
||||
# fi
|
||||
|
||||
# find . -name "*.${PKG}"
|
||||
# mkdir -p $build_dir
|
||||
# if [ "${PKG}" = 'rpm' ]; then
|
||||
# mv /root/rpmbuild/RPMS/x86_64/* .
|
||||
# for f in *x86_64.rpm; do
|
||||
# new="${f/x86_64/amd64}"
|
||||
# mv "$f" "$build_dir/$new"
|
||||
# echo "Renamed $f -> $new"
|
||||
# done
|
||||
# # mv /root/rpmbuild/RPMS/x86_64/* $build_dir/
|
||||
# else
|
||||
# echo $PWD
|
||||
# find / -name "rippled-3.0.0_amd64.deb"
|
||||
# mv *.deb $build_dir
|
||||
# fi
|
||||
# printf '%s\n' \
|
||||
# "rippled_version=3.0.0" \
|
||||
# "rippled_git_hash=deadbeef" \
|
||||
# > build_vars
|
||||
cp "${build_dir}/"*.deb . 2>/dev/null || true
|
||||
cp "${build_dir}/"*.ddeb . 2>/dev/null || true
|
||||
cp "${build_dir}/"*.rpm . 2>/dev/null || true
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM ubuntu:jammy
|
||||
|
||||
COPY rippled_*_amd64.deb /tmp/
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends /tmp/rippled_*_amd64.deb && \
|
||||
rm -f /tmp/*.deb && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN <<EOF
|
||||
useradd \
|
||||
--system \
|
||||
--no-create-home \
|
||||
--shell /usr/sbin/nologin \
|
||||
rippled
|
||||
chown -R \
|
||||
rippled:rippled \
|
||||
/var/lib/rippled \
|
||||
/var/log/rippled \
|
||||
/opt/ripple
|
||||
EOF
|
||||
EXPOSE 51235 6006
|
||||
USER rippled
|
||||
|
||||
ENTRYPOINT ["/opt/ripple/bin/rippled"]
|
||||
@@ -1,3 +0,0 @@
|
||||
rippled daemon
|
||||
|
||||
-- Mike Ellery <mellery451@gmail.com> Tue, 04 Dec 2018 18:19:03 +0000
|
||||
@@ -1,20 +0,0 @@
|
||||
Source: rippled
|
||||
Section: net
|
||||
Priority: optional
|
||||
Maintainer: Ripple Labs Inc. <support@ripple.com>
|
||||
Build-Depends: cmake,
|
||||
debhelper (>= 13),
|
||||
debhelper-compat (= 13)
|
||||
# debhelper (>= 14),
|
||||
# debhelper-compat (= 14),
|
||||
# TODO: Let's go for 14!
|
||||
Standards-Version: 4.6.0
|
||||
Homepage: https://github.com/XRPLF/rippled.git
|
||||
Rules-Requires-Root: no
|
||||
|
||||
Package: rippled
|
||||
Architecture: any
|
||||
Depends: ${shlibs:Depends}, ${misc:Depends}
|
||||
Description: XRP Ledger server (rippled)
|
||||
rippled is the core server of the XRP Ledger, providing a peer-to-peer
|
||||
network node for validating and processing transactions.
|
||||
@@ -1,86 +0,0 @@
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: rippled
|
||||
Source: https://github.com/ripple/rippled
|
||||
|
||||
Files: *
|
||||
Copyright: 2012-2019 Ripple Labs Inc.
|
||||
|
||||
License: __UNKNOWN__
|
||||
|
||||
The accompanying files under various copyrights.
|
||||
|
||||
Copyright (c) 2012, 2013, 2014 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
The accompanying files incorporate work covered by the following copyright
|
||||
and previous license notice:
|
||||
|
||||
Copyright (c) 2011 Arthur Britto, David Schwartz, Jed McCaleb,
|
||||
Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant
|
||||
|
||||
Some code from Raw Material Software, Ltd., provided under the terms of the
|
||||
ISC License. See the corresponding source files for more details.
|
||||
Copyright (c) 2013 - Raw Material Software Ltd.
|
||||
Please visit http://www.juce.com
|
||||
|
||||
Some code from ASIO examples:
|
||||
// Copyright (c) 2003-2011 Christopher M. Kohlhoff (chris at kohlhoff dot com)
|
||||
//
|
||||
// Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
Some code from Bitcoin:
|
||||
// Copyright (c) 2009-2010 Satoshi Nakamoto
|
||||
// Copyright (c) 2011 The Bitcoin developers
|
||||
// Distributed under the MIT/X11 software license, see the accompanying
|
||||
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
Some code from Tom Wu:
|
||||
This software is covered under the following copyright:
|
||||
|
||||
/*
|
||||
* Copyright (c) 2003-2005 Tom Wu
|
||||
* All Rights Reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
|
||||
* WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
|
||||
*
|
||||
* IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
||||
* INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
|
||||
* RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
|
||||
* THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
|
||||
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*
|
||||
* In addition, the following condition applies:
|
||||
*
|
||||
* All redistributions must retain an intact copy of this copyright notice
|
||||
* and disclaimer.
|
||||
*/
|
||||
|
||||
Address all questions regarding this license to:
|
||||
|
||||
Tom Wu
|
||||
tjw@cs.Stanford.EDU
|
||||
@@ -1,3 +0,0 @@
|
||||
/var/log/rippled/
|
||||
/var/lib/rippled/
|
||||
/etc/systemd/system/rippled.service.d/
|
||||
@@ -1,2 +0,0 @@
|
||||
README.md
|
||||
LICENSE.md
|
||||
@@ -1,3 +0,0 @@
|
||||
opt/ripple/include
|
||||
opt/ripple/lib/*.a
|
||||
opt/ripple/lib/cmake/*
|
||||
@@ -1,2 +0,0 @@
|
||||
/opt/ripple/etc/rippled.cfg
|
||||
/opt/ripple/etc/validators.txt
|
||||
@@ -1,10 +0,0 @@
|
||||
etc/logrotate.d/rippled
|
||||
opt/ripple/bin/rippled
|
||||
opt/ripple/bin/update-rippled.sh
|
||||
opt/ripple/bin/validator-keys
|
||||
opt/ripple/bin/xrpld
|
||||
opt/ripple/etc/rippled.cfg
|
||||
opt/ripple/etc/update-rippled-cron
|
||||
opt/ripple/etc/validators.txt
|
||||
opt/ripple/include/*
|
||||
opt/ripple/lib/*
|
||||
@@ -1,4 +0,0 @@
|
||||
opt/ripple/etc/rippled.cfg etc/opt/ripple/rippled.cfg
|
||||
opt/ripple/etc/validators.txt etc/opt/ripple/validators.txt
|
||||
opt/ripple/bin/rippled usr/local/bin/rippled
|
||||
opt/ripple/bin/rippled opt/ripple/bin/xrpld
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
USER_NAME=rippled
|
||||
GROUP_NAME=rippled
|
||||
case "$1" in
|
||||
configure)
|
||||
id -u $USER_NAME >/dev/null 2>&1 || \
|
||||
useradd --system \
|
||||
--home-dir /nonexistent \
|
||||
--no-create-home \
|
||||
--shell /usr/sbin/nologin \
|
||||
--comment "system user for rippled" \
|
||||
--user-group \
|
||||
${USER_NAME}
|
||||
|
||||
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
|
||||
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
|
||||
chown -R $USER_NAME:$GROUP_NAME /opt/ripple
|
||||
chmod 755 /var/log/rippled/
|
||||
chmod 755 /var/lib/rippled/
|
||||
chmod 644 /opt/ripple/etc/update-rippled-cron
|
||||
chmod 644 /etc/logrotate.d/rippled
|
||||
chown -R root:$GROUP_NAME /opt/ripple/etc/update-rippled-cron
|
||||
;;
|
||||
|
||||
abort-upgrade|abort-remove|abort-deconfigure)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "postinst called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "postrm called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
install|upgrade)
|
||||
;;
|
||||
|
||||
abort-upgrade)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "preinst called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
remove|upgrade|deconfigure)
|
||||
;;
|
||||
|
||||
failed-upgrade)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "prerm called with unknown argument \`$1'" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
export DH_VERBOSE = 1
|
||||
export DH_OPTIONS = -v
|
||||
## TODO: Confirm these are still required.
|
||||
# debuild sets some warnings that don't work well
|
||||
# for our curent build..so try to remove those flags here:
|
||||
export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
|
||||
export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
|
||||
export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
|
||||
export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
|
||||
|
||||
## TODO: Confirm these are still required.
|
||||
export DEB_BUILD_MAINT_OPTIONS = hardening=+all
|
||||
export DEB_BUILD_OPTIONS = nodwz
|
||||
|
||||
export RIPPLE_REMOTE = xrplf
|
||||
export RIPPLE_REMOTE_URL = https://conan.ripplex.io
|
||||
|
||||
# ## CMake Configure args
|
||||
# export DEB_CMAKE_GENERATOR = Ninja
|
||||
export DEB_CMAKE_BUILD_TYPE = RelWithDebInfo
|
||||
|
||||
NPROC := $(shell nproc --ignore=2)
|
||||
export DEB_BUILD_OPTIONS += parallel=$(NPROC)
|
||||
|
||||
CONAN_HOME := $(shell conan config home)
|
||||
CONAN_PROFILE := $(shell conan profile path default)
|
||||
CONAN_GCONF := $(CONAN_HOME)/global.conf
|
||||
INSTALL_PREFIX := "/opt/ripple"
|
||||
BUILD_DIR := obj-$(DEB_BUILD_GNU_TYPE)
|
||||
|
||||
.ONESHELL:
|
||||
SHELL := /bin/bash
|
||||
|
||||
%:
|
||||
dh $@ --buildsystem=cmake
|
||||
override_dh_installsystemd:
|
||||
dh_installsystemd --no-start
|
||||
|
||||
override_dh_auto_configure:
|
||||
conan remote add --index 0 $(RIPPLE_REMOTE) $(RIPPLE_REMOTE_URL) --force
|
||||
conan config install ./conan/profiles/default --target-folder $(CONAN_HOME)/profiles
|
||||
echo "tools.build:jobs={{ os.cpu_count() - 2 }}" >> ${CONAN_HOME}/global.conf
|
||||
echo "core.download:parallel={{ os.cpu_count() }}" >> $(CONAN_GCONF)
|
||||
|
||||
conan install . \
|
||||
--settings:all build_type=$(DEB_CMAKE_BUILD_TYPE) \
|
||||
--output-folder=$(BUILD_DIR) \
|
||||
--options:host "&:xrpld=True" \
|
||||
--options:host "&:tests=True" \
|
||||
--build=missing
|
||||
|
||||
# Debian assumes an offline build process and sets CMake's FETCHCONTENT_FULLY_DISCONNECTED variable to ON
|
||||
# To use as much as the default settings as possible we'll clone it where CMake's FetchContent expects it.
|
||||
mkdir -p "$(BUILD_DIR)/_deps"
|
||||
git clone https://github.com/ripple/validator-keys-tool.git "$(BUILD_DIR)/_deps/validator_keys-src"
|
||||
|
||||
dh_auto_configure --builddirectory=$(BUILD_DIR) -- \
|
||||
-DCMAKE_BUILD_TYPE:STRING=$(DEB_CMAKE_BUILD_TYPE) \
|
||||
-Dxrpld=ON -Dtests=ON -Dvalidator_keys=ON \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=$(INSTALL_PREFIX) \
|
||||
-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=$(BUILD_DIR)/build/generators/conan_toolchain.cmake
|
||||
|
||||
override_dh_auto_build:
|
||||
dh_auto_build \
|
||||
--builddirectory=$(BUILD_DIR) -- rippled validator-keys
|
||||
|
||||
# cmake \
|
||||
# --build $(BUILD_DIR) \
|
||||
# --target rippled \
|
||||
# --target validator-keys \
|
||||
# --parallel $(NPROC)
|
||||
|
||||
|
||||
|
||||
|
||||
override_dh_auto_install:
|
||||
cmake --install $(BUILD_DIR) --prefix debian/tmp/opt/ripple
|
||||
# install -D $(BUILD_DIR)/_deps/validator_keys_src-build/validator-keys/validator-keys debian/tmp/opt/ripple/bin/validator-keys
|
||||
install -D $(BUILD_DIR)/_deps/validator_keys_src-build/validator-keys debian/tmp/opt/ripple/bin/validator-keys
|
||||
install -D update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
|
||||
install -D update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
|
||||
install -D rippled-logrotate debian/tmp/etc/logrotate.d/rippled
|
||||
rm -rf debian/tmp/opt/ripple/lib64/cmake/date
|
||||
|
||||
override_dh_dwz:
|
||||
@echo "Skipping DWZ due to huge debug info"
|
||||
@@ -1,84 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
export DH_VERBOSE = 1
|
||||
export DH_OPTIONS = -v
|
||||
# debuild sets some warnings that don't work well
|
||||
# for our curent build..so try to remove those flags here:
|
||||
export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
|
||||
export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
|
||||
export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
|
||||
export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
|
||||
|
||||
export DEB_BUILD_MAINT_OPTIONS = hardening=+all
|
||||
export DEB_BUILD_OPTIONS = nodwz
|
||||
|
||||
export RIPPLE_REMOTE="xrplf"
|
||||
export RIPPLE_REMOTE_URL="https://conan.ripplex.io"
|
||||
|
||||
export CONAN_HOME := $(shell conan config home)
|
||||
export CONAN_PROFILE := $(shell conan profile path default)
|
||||
|
||||
export DEB_CMAKE_GENERATOR = Ninja
|
||||
export DEB_CMAKE_BUILD_TYPE = Release
|
||||
export DEB_CMAKE_EXTRA_FLAGS = -Dvalidator_keys=ON -Dtests=ON -Dxrpld=ON -DCMAKE_TOOLCHAIN_FILE=build/generators/conan_toolchain.cmake
|
||||
|
||||
|
||||
.ONESHELL:
|
||||
SHELL := /bin/bash
|
||||
NPROC := $(shell expr $(shell nproc) - 2)
|
||||
BUILD_DIR := build.rippled
|
||||
VKT_PATH := $(BUILD_DIR)/vkt
|
||||
|
||||
%:
|
||||
dh $@ --buildsystem=cmake
|
||||
|
||||
override_dh_installsystemd:
|
||||
dh_installsystemd --no-start
|
||||
|
||||
override_dh_auto_configure:
|
||||
|
||||
dpkg-buildflags --get CFLAGS
|
||||
dpkg-buildflags --get LDFLAGS
|
||||
dpkg-buildflags --status
|
||||
|
||||
conan remote add --index 0 $(RIPPLE_REMOTE) $(RIPPLE_REMOTE_URL) --force
|
||||
sed -i "s/gnu17/20/" $(CONAN_PROFILE)
|
||||
|
||||
git clone https://github.com/ripple/validator-keys-tool.git $(VKT_PATH)
|
||||
conan install . --options:a "&:xrpld=True" --options:a "&:tests=True" --build "missing"
|
||||
|
||||
dh_auto_configure --builddirectory=$(BUILD_DIR) -- \
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-Dvalidator_keys=ON \
|
||||
-Dtests=ON \
|
||||
-Dxrpld=ON \
|
||||
-DCMAKE_VERBOSE_MAKEFILE=ON \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake
|
||||
# -DFETCHCONTENT_FULLY_DISCONNECTED=OFF \
|
||||
# -DFETCHCONTENT_SOURCE_DIR_VALIDATOR_KEYS_SRC=$(VKT_PATH) \
|
||||
|
||||
|
||||
# dh_auto_configure --builddirectory=$(BUILD_DIR)
|
||||
override_dh_auto_build:
|
||||
cmake --build . --target rippled --target validator-keys --parallel 30
|
||||
|
||||
|
||||
#manually run:
|
||||
FETCHCONTENT_BASE_DIR:PATH=/home/emel/dev/Ripple/rippled/rippled/github_linux_packages/build/dpkg/packages/rippled/build.rippled/_deps
|
||||
FETCHCONTENT_FULLY_DISCONNECTED:BOOL=OFF
|
||||
FETCHCONTENT_QUIET:BOOL=ON
|
||||
FETCHCONTENT_SOURCE_DIR_VALIDATOR_KEYS_SRC:PATH=
|
||||
FETCHCONTENT_UPDATES_DISCONNECTED:BOOL=OFF
|
||||
FETCHCONTENT_UPDATES_DISCONNECTED_VALIDATOR_KEYS_SRC:BOOL=OFF
|
||||
|
||||
override_dh_auto_install:
|
||||
cmake --install $(BUILD_DIR) --prefix debian/tmp/opt/ripple
|
||||
install -D $(BUILD_DIR)/validator-keys/validator-keys debian/tmp/opt/ripple/bin/validator-keys
|
||||
install -D bin/getRippledInfo debian/tmp/opt/ripple/bin/getRippledInfo
|
||||
install -D update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
|
||||
install -D update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
|
||||
install -D rippled-logrotate debian/tmp/etc/logrotate.d/rippled
|
||||
rm -rf debian/tmp/opt/ripple/lib64/cmake/date
|
||||
|
||||
override_dh_dwz:
|
||||
@echo "Skipping DWZ due to huge debug info"
|
||||
@@ -1 +0,0 @@
|
||||
3.0 (native)
|
||||
@@ -1,2 +0,0 @@
|
||||
#abort-on-upstream-changes
|
||||
#unapply-patches
|
||||
@@ -1 +0,0 @@
|
||||
enable rippled.service
|
||||
@@ -1,170 +0,0 @@
|
||||
%global pkg_name %{getenv:repo_name}
|
||||
%global branch %{getenv:branch}
|
||||
%global commit %{getenv:commit}
|
||||
%global shortcommit %{getenv:shortcommit}
|
||||
%global date %{getenv:commit_date}
|
||||
%global conan_remote_name %{getenv:conan_remote_name}
|
||||
%global conan_remote_url %{getenv:conan_remote_url}
|
||||
%global shared_files %{getenv:shared_files}
|
||||
%global pkg_files %{getenv:pkg_files}
|
||||
%global build_type %{getenv:BUILD_TYPE}
|
||||
|
||||
%global _prefix /opt/ripple
|
||||
%global srcdir %{_builddir}/rippled
|
||||
%global blddir %{srcdir}/bld.rippled
|
||||
|
||||
%global xrpl_version %{getenv:xrpl_version}
|
||||
%global ver_base %(v=%{xrpl_version}; echo ${v%%-*})
|
||||
%global _has_dash %(v=%{xrpl_version}; [ "${v#*-}" != "$v" ] && echo 1 || echo 0)
|
||||
%if 0%{?_has_dash}
|
||||
%global ver_suffix %(v=%{xrpl_version}; printf %s "${v#*-}")
|
||||
%endif
|
||||
|
||||
Name: %{pkg_name}
|
||||
Version: %{ver_base}
|
||||
Release: %{?ver_suffix:0.%{ver_suffix}}%{!?ver_suffix:1}%{?dist}
|
||||
Summary: %{name} XRPL daemon
|
||||
|
||||
License: ISC
|
||||
URL: https://github.com/XRPLF/rippled
|
||||
Source0: rippled.tar.gz
|
||||
%{warn:name=%{name}}
|
||||
%{warn:version=%{version}}
|
||||
%{warn:ver_base=%{ver_base}}
|
||||
%{warn:ver_suffix=%{ver_suffix}}
|
||||
%{warn:release=%{release}}
|
||||
%{warn:FullReleaseVersion=%{name}-%{version}-%{release}.%{_arch}.rpm}
|
||||
|
||||
%description
|
||||
%{name} with p2p server for the XRP Ledger.
|
||||
|
||||
%prep
|
||||
%autosetup -p1 -n %{name}
|
||||
|
||||
# TODO: Remove when version set with CMake.
|
||||
if [ %{branch} == 'develop' ]; then
|
||||
sed --in-place "s/versionString = \"\([^\"]*\)\"/versionString = \"\1+%{ver_input}\"/" src/libxrpl/protocol/BuildInfo.cpp
|
||||
fi
|
||||
|
||||
%build
|
||||
conan remote add --index 0 %{conan_remote_name} %{conan_remote_url} --force
|
||||
conan config install conan/profiles/default --target-folder $(conan config home)/profiles/
|
||||
echo "tools.build:jobs={{ os.cpu_count() - 2 }}" >> ${CONAN_HOME}/global.conf
|
||||
echo "core.download:parallel={{ os.cpu_count() }}" >> ${CONAN_HOME}/global.conf
|
||||
|
||||
conan install %{srcdir} \
|
||||
--settings:all build_type=%{build_type} \
|
||||
--output-folder %{srcdir}/conan_deps \
|
||||
--options:host "&:xrpld=True" \
|
||||
--options:host "&:tests=True" \
|
||||
--build=missing
|
||||
|
||||
cmake \
|
||||
-S %{srcdir} \
|
||||
-B %{blddir} \
|
||||
-Dxrpld=ON \
|
||||
-Dvalidator_keys=ON \
|
||||
-Dtests=ON \
|
||||
-DCMAKE_BUILD_TYPE:STRING=%{build_type} \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=%{_prefix} \
|
||||
-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=%{srcdir}/conan_deps/build/generators/conan_toolchain.cmake
|
||||
|
||||
cmake \
|
||||
--build %{blddir} \
|
||||
--parallel %{_smp_build_ncpus} \
|
||||
--target rippled \
|
||||
--target validator-keys
|
||||
|
||||
%install
|
||||
rm -rf %{buildroot}
|
||||
DESTDIR=%{buildroot} cmake --install %{blddir}
|
||||
|
||||
install -Dm0755 %{shared_files}/update-rippled.sh %{buildroot}%{_bindir}/update-rippled.sh
|
||||
ln -s rippled %{buildroot}%{_bindir}/xrpld
|
||||
ln -s update-rippled.sh %{buildroot}%{_bindir}/update-xrpld.sh
|
||||
|
||||
# configs
|
||||
install -Dm0644 %{srcdir}/cfg/rippled-example.cfg %{buildroot}%{_prefix}/etc/rippled.cfg
|
||||
install -Dm0644 %{srcdir}/cfg/validators-example.txt %{buildroot}%{_prefix}/etc/validators.txt
|
||||
mkdir -p %{buildroot}%{_sysconfdir}/opt/ripple
|
||||
|
||||
#/etc points to /opt
|
||||
ln -s ../../../opt/ripple/rippled.cfg %{buildroot}%{_sysconfdir}/opt/ripple/xrpld.cfg
|
||||
ln -s ../../../opt/ripple/etc/rippled.cfg %{buildroot}%{_sysconfdir}/opt/ripple/rippled.cfg
|
||||
ln -s ../../../opt/ripple/etc/validators.txt %{buildroot}%{_sysconfdir}/opt/ripple/validators.txt
|
||||
|
||||
# systemd/sysusers/tmpfiles
|
||||
install -Dm0644 %{shared_files}/rippled.service %{buildroot}%{_unitdir}/rippled.service
|
||||
install -Dm0644 %{pkg_files}/rippled.sysusers %{buildroot}%{_sysusersdir}/rippled.conf
|
||||
install -Dm0644 %{pkg_files}/rippled.tmpfiles %{buildroot}%{_tmpfilesdir}/rippled.conf
|
||||
|
||||
%files
|
||||
%license LICENSE*
|
||||
%doc README*
|
||||
|
||||
# Files/dirs the pkgs owns
|
||||
%dir %{_prefix}
|
||||
%dir %{_prefix}/bin
|
||||
%dir %{_prefix}/etc
|
||||
%if 0
|
||||
%dir %{_sysconfdir}/opt # Add this if rpmlint cries.
|
||||
%endif
|
||||
%dir %{_sysconfdir}/opt/ripple
|
||||
|
||||
# Binaries and symlinks under our (non-standard) _prefix (/opt/ripple)
|
||||
%{_bindir}/rippled
|
||||
%{_bindir}/xrpld
|
||||
%{_bindir}/update-rippled.sh
|
||||
%{_bindir}/update-xrpld.sh
|
||||
%{_bindir}/validator-keys
|
||||
|
||||
# We won't ship these but we'll create them.
|
||||
%ghost /usr/local/bin/rippled
|
||||
%ghost /usr/local/bin/xrpld
|
||||
|
||||
%config(noreplace) %{_prefix}/etc/rippled.cfg
|
||||
%config(noreplace) %{_prefix}/etc/validators.txt
|
||||
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/rippled.cfg
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/xrpld.cfg
|
||||
%config(noreplace) %{_sysconfdir}/opt/ripple/validators.txt
|
||||
|
||||
# systemd and service user creation
|
||||
%{_unitdir}/rippled.service
|
||||
%{_sysusersdir}/rippled.conf
|
||||
%{_tmpfilesdir}/rippled.conf
|
||||
|
||||
# Let tmpfiles create the db and log dirs
|
||||
%ghost %dir /var/opt/ripple
|
||||
%ghost %dir /var/opt/ripple/lib
|
||||
%ghost %dir /var/opt/ripple/log
|
||||
|
||||
# TODO: Fix the CMake install() calls so we don't need to exclude these.
|
||||
%exclude %{_prefix}/include/*
|
||||
%exclude %{_prefix}/lib/*
|
||||
%exclude %{_prefix}/lib/pkgconfig/*
|
||||
%exclude /usr/lib/debug/**
|
||||
|
||||
%post
|
||||
# Add a link to $PATH /usr/local/bin/rippled %{_bindir}/rippled (also non-standard)
|
||||
mkdir -p /usr/local/bin
|
||||
for i in rippled xrpld
|
||||
do
|
||||
if [ ! -e /usr/local/bin/${i} ]; then
|
||||
ln -s %{_bindir}/${i} /usr/local/bin/${i}
|
||||
elif [ -L /usr/local/bin/${i} ] && \
|
||||
[ "$(readlink -f /usr/local/bin/${i})" != "%{_bindir}/${i}" ]; then
|
||||
ln -sfn %{_bindir}/${i} /usr/local/bin/${i}
|
||||
fi
|
||||
done
|
||||
|
||||
%preun
|
||||
# remove the link only if it points to us (on erase, $1 == 0)
|
||||
for i in rippled xrpld
|
||||
do
|
||||
if [ "$1" -eq 0 ] && [ -L /usr/local/bin/${i} ] && \
|
||||
[ "$(readlink -f /usr/local/bin/${i})" = "%{_bindir}/${i}" ]; then
|
||||
rm -f /usr/local/bin/${i}
|
||||
fi
|
||||
done
|
||||
@@ -1,2 +0,0 @@
|
||||
u rippled - "System user for rippled service"
|
||||
g rippled - -
|
||||
@@ -1,2 +0,0 @@
|
||||
d /var/opt/ripple/lib 0750 rippled rippled -
|
||||
d /var/opt/ripple/log 0750 rippled adm -
|
||||
Binary file not shown.
@@ -1,15 +0,0 @@
|
||||
/var/log/rippled/*.log {
|
||||
daily
|
||||
minsize 200M
|
||||
rotate 7
|
||||
nocreate
|
||||
missingok
|
||||
notifempty
|
||||
compress
|
||||
compresscmd /usr/bin/nice
|
||||
compressoptions -n19 ionice -c3 gzip
|
||||
compressext .gz
|
||||
postrotate
|
||||
/opt/ripple/bin/rippled --conf /opt/ripple/etc/rippled.cfg logrotate
|
||||
endscript
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=Ripple Daemon
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/opt/ripple/bin/rippled --net --silent --conf /etc/opt/ripple/rippled.cfg
|
||||
Restart=on-failure
|
||||
User=rippled
|
||||
Group=rippled
|
||||
LimitNOFILE=65536
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,10 +0,0 @@
|
||||
# For automatic updates, symlink this file to /etc/cron.d/
|
||||
# Do not remove the newline at the end of this cron script
|
||||
|
||||
# bash required for use of RANDOM below.
|
||||
SHELL=/bin/bash
|
||||
PATH=/sbin;/bin;/usr/sbin;/usr/bin
|
||||
|
||||
# invoke check/update script with random delay up to 59 mins
|
||||
0 * * * * root sleep $((RANDOM*3540/32768)) && /opt/ripple/bin/update-rippled.sh
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# auto-update script for rippled daemon
|
||||
|
||||
# Check for sudo/root permissions
|
||||
if [[ $(id -u) -ne 0 ]] ; then
|
||||
echo "This update script must be run as root or sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LOCKDIR=/tmp/rippleupdate.lock
|
||||
UPDATELOG=/var/log/rippled/update.log
|
||||
|
||||
function cleanup {
|
||||
# If this directory isn't removed, future updates will fail.
|
||||
rmdir $LOCKDIR
|
||||
}
|
||||
|
||||
# Use mkdir to check if process is already running. mkdir is atomic, as against file create.
|
||||
if ! mkdir $LOCKDIR 2>/dev/null; then
|
||||
echo $(date -u) "lockdir exists - won't proceed." >> $UPDATELOG
|
||||
exit 1
|
||||
fi
|
||||
trap cleanup EXIT
|
||||
|
||||
source /etc/os-release
|
||||
can_update=false
|
||||
|
||||
if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]] ; then
|
||||
# Silent update
|
||||
apt-get update -qq
|
||||
|
||||
# The next line is an "awk"ward way to check if the package needs to be updated.
|
||||
RIPPLE=$(apt-get install -s --only-upgrade rippled | awk '/^Inst/ { print $2 }')
|
||||
test "$RIPPLE" == "rippled" && can_update=true
|
||||
|
||||
function apply_update {
|
||||
apt-get install rippled -qq
|
||||
}
|
||||
elif [[ "$ID" == "fedora" || "$ID" == "centos" || "$ID" == "rhel" || "$ID" == "scientific" ]] ; then
|
||||
RIPPLE_REPO=${RIPPLE_REPO-stable}
|
||||
yum --disablerepo=* --enablerepo=ripple-$RIPPLE_REPO clean expire-cache
|
||||
|
||||
yum check-update -q --enablerepo=ripple-$RIPPLE_REPO rippled || can_update=true
|
||||
|
||||
function apply_update {
|
||||
yum update -y --enablerepo=ripple-$RIPPLE_REPO rippled
|
||||
}
|
||||
else
|
||||
echo "unrecognized distro!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Do the actual update and restart the service after reloading systemctl daemon.
|
||||
if [ "$can_update" = true ] ; then
|
||||
exec 3>&1 1>>${UPDATELOG} 2>&1
|
||||
set -e
|
||||
apply_update
|
||||
systemctl daemon-reload
|
||||
systemctl restart rippled.service
|
||||
echo $(date -u) "rippled daemon updated."
|
||||
else
|
||||
echo $(date -u) "no updates available" >> $UPDATELOG
|
||||
fi
|
||||
|
||||
@@ -1,194 +0,0 @@
|
||||
#!/usr/bin/env -S uv run --script
|
||||
# /// script
|
||||
# requires-python = ">=3.13"
|
||||
# dependencies = [
|
||||
# "python-gnupg",
|
||||
# ]
|
||||
# ///
|
||||
import argparse
|
||||
import base64
|
||||
import gnupg
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class SignCfg:
|
||||
gnupghome: Path
|
||||
fingerprint: str
|
||||
passphrase: str
|
||||
|
||||
|
||||
def set_tty():
|
||||
try:
|
||||
tty = subprocess.check_output(["tty"], text=True, stderr=subprocess.DEVNULL).strip()
|
||||
os.environ["GPG_TTY"] = tty
|
||||
# print(f"GPG_TTY set to {tty}")
|
||||
except subprocess.CalledProcessError:
|
||||
print("No TTY detected. Skipping setting GPG_TTY.")
|
||||
|
||||
|
||||
def make_cfg(passphrase: str, armored_private_key: str) -> SignCfg:
|
||||
ghome = Path(tempfile.mkdtemp())
|
||||
ghome.chmod(0o700)
|
||||
gpg = gnupg.GPG(gnupghome=str(ghome))
|
||||
imp = gpg.import_keys(armored_private_key)
|
||||
fp = imp.fingerprints[0]
|
||||
return SignCfg(gnupghome=ghome, fingerprint=fp, passphrase=passphrase)
|
||||
|
||||
|
||||
def import_pubkey_into_rpmdb(gnupghome: Path, fingerprint: str, rpmdb: Path):
|
||||
env = {**os.environ, "GNUPGHOME": str(gnupghome)}
|
||||
cp = subprocess.run(
|
||||
["gpg", "--batch", "--yes", "--armor", "--export", fingerprint],
|
||||
env=env, text=True, capture_output=True, check=True,
|
||||
)
|
||||
pub = rpmdb / "pubkey.asc"
|
||||
pub.write_text(cp.stdout)
|
||||
|
||||
rpmdb.mkdir(parents=True, exist_ok=True)
|
||||
subprocess.run(["rpm", "--dbpath", str(rpmdb), "--import", str(pub)], check=True)
|
||||
|
||||
|
||||
def sign_rpm(pkg: Path, cfg: SignCfg) -> subprocess.CompletedProcess:
|
||||
fd, pfile = tempfile.mkstemp(text=True)
|
||||
os.write(fd, cfg.passphrase.rstrip("\r\n").encode()); os.close(fd); os.chmod(pfile, 0o600)
|
||||
rpm_sign_cmd = [
|
||||
"rpm",
|
||||
"--define", "%__gpg /usr/bin/gpg",
|
||||
"--define", "_signature gpg",
|
||||
"--define", f"_gpg_name {cfg.fingerprint}",
|
||||
"--define", f"_gpg_path {cfg.gnupghome}",
|
||||
"--define", f"_gpg_passfile {pfile}",
|
||||
"--define", "__gpg_check_password_cmd /bin/true",
|
||||
"--define",
|
||||
"__gpg_sign_cmd %{__gpg} --batch --no-tty --no-armor "
|
||||
"--digest-algo sha512 --pinentry-mode loopback "
|
||||
"--passphrase-file %{_gpg_passfile} "
|
||||
"-u '%{_gpg_name}' --sign --detach-sign "
|
||||
"--output %{__signature_filename} %{__plaintext_filename}",
|
||||
"--addsign", str(pkg),
|
||||
]
|
||||
|
||||
return subprocess.run(
|
||||
rpm_sign_cmd,
|
||||
text=True,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
|
||||
def sign_deb(pkg: Path, cfg: SignCfg) -> subprocess.CompletedProcess:
|
||||
sig = pkg.with_suffix(pkg.suffix + ".asc")
|
||||
env = {**os.environ, "GNUPGHOME": str(cfg.gnupghome)}
|
||||
return subprocess.run(
|
||||
[
|
||||
"gpg",
|
||||
"--batch", "--yes", "--armor",
|
||||
"--pinentry-mode", "loopback",
|
||||
"--local-user", cfg.fingerprint,
|
||||
"--passphrase", cfg.passphrase,
|
||||
"--output", str(sig),
|
||||
"--detach-sign", str(pkg),
|
||||
],
|
||||
env=env, check=False, capture_output=True, text=True,
|
||||
)
|
||||
|
||||
|
||||
def sign_package(pkg: Path, cfg: SignCfg) -> subprocess.CompletedProcess:
|
||||
if pkg.suffix == ".rpm":
|
||||
return sign_rpm(pkg, cfg)
|
||||
if pkg.suffix == ".deb":
|
||||
return sign_deb(pkg, cfg)
|
||||
raise ValueError(f"unsupported package type: {pkg}")
|
||||
|
||||
|
||||
def verify_signature(pkg: Path, *, gnupghome: Path, expected_fp: str):
|
||||
print(f"Verifying {pkg.resolve()}")
|
||||
suf = pkg.suffix.lower()
|
||||
if suf == ".rpm":
|
||||
return verify_rpm_signature(pkg, gnupghome=gnupghome, expected_fp=expected_fp)
|
||||
elif suf == ".deb":
|
||||
return verify_deb_signature(pkg, gnupghome=gnupghome, expected_fp=expected_fp)
|
||||
else:
|
||||
raise ValueError(f"unsupported package type: {pkg}")
|
||||
|
||||
|
||||
def verify_deb_signature(pkg: Path, gnupghome: Path, expected_fp: str) -> None:
|
||||
pkg = Path(pkg)
|
||||
sig = pkg.with_suffix(pkg.suffix + ".asc")
|
||||
env = {**os.environ, "GNUPGHOME": str(gnupghome)}
|
||||
VALIDSIG_RE = re.compile(r"\[GNUPG:\]\s+VALIDSIG\s+([0-9A-Fa-f]{40})")
|
||||
verify_cmd = ["gpg", "--batch", "--status-fd", "1", "--verify", str(sig), str(pkg)]
|
||||
result = subprocess.run(verify_cmd, env=env, text=True, capture_output=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(result.stderr or result.stdout)
|
||||
sys.exit(result.returncode)
|
||||
|
||||
m = VALIDSIG_RE.search(result.stdout)
|
||||
if not m or m.group(1).upper() != expected_fp.upper():
|
||||
print(f"Signature invalid or wrong signer. Expected {expected_fp}")
|
||||
sys.exit(result.returncode)
|
||||
print("********* deb signature verification *********")
|
||||
print(f"✅ Signature verified for {pkg.name} ({m.group(1)})")
|
||||
|
||||
|
||||
def verify_rpm_signature(pkg: Path, *, gnupghome: Path, expected_fp: str):
|
||||
env = {**os.environ, "GNUPGHOME": str(gnupghome)}
|
||||
export_cmd = ["gpg", "--batch", "--yes", "--armor", "--export", expected_fp]
|
||||
cp = subprocess.run(export_cmd, env=env, text=True, capture_output=True, check=True)
|
||||
rpmdb = Path(tempfile.mkdtemp())
|
||||
try:
|
||||
pub = rpmdb / "pubkey.asc"
|
||||
pub.write_text(cp.stdout)
|
||||
# rpm needs the rpmdb for verification
|
||||
subprocess.run(["rpm", "--dbpath", str(rpmdb), "--import", str(pub)], check=True)
|
||||
verify_cmd = ["rpm", "--dbpath", str(rpmdb), "-Kv", str(pkg)]
|
||||
result = subprocess.run(verify_cmd, text=True, capture_output=True)
|
||||
if result.returncode != 0:
|
||||
print(result.stdout or result.stderr)
|
||||
sys.exit(result.returncode)
|
||||
print("********* rpm signature verification *********")
|
||||
print(result.stdout)
|
||||
print(f"✅ Signature verified for {pkg.name}")
|
||||
return True
|
||||
finally:
|
||||
try:
|
||||
for p in rpmdb.iterdir(): p.unlink()
|
||||
rpmdb.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
set_tty()
|
||||
GPG_KEY_B64 = os.environ["GPG_KEY_B64"]
|
||||
GPG_KEY_PASS_B64 = os.environ["GPG_KEY_PASS_B64"]
|
||||
gpg_passphrase = base64.b64decode(GPG_KEY_PASS_B64).decode("utf-8").strip()
|
||||
gpg_key = base64.b64decode(GPG_KEY_B64).decode("utf-8").strip()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("package")
|
||||
args = parser.parse_args()
|
||||
cfg = make_cfg(passphrase=gpg_passphrase, armored_private_key=gpg_key)
|
||||
try:
|
||||
pkg = Path(args.package)
|
||||
res = sign_package(pkg, cfg)
|
||||
if res.returncode:
|
||||
print(res.stderr.strip() or res.stdout.strip())
|
||||
raise sys.exit(res.returncode)
|
||||
verify_signature(pkg, gnupghome=cfg.gnupghome, expected_fp=cfg.fingerprint)
|
||||
finally:
|
||||
shutil.rmtree(cfg.gnupghome, ignore_errors=True)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -85,7 +85,8 @@ registerSSLCerts(boost::asio::ssl::context& ctx, boost::system::error_code& ec,
|
||||
// There is a very unpleasant interaction between <wincrypt> and
|
||||
// openssl x509 types (namely the former has macros that stomp
|
||||
// on the latter), these undefs allow this TU to be safely used in
|
||||
// unity builds without messing up subsequent TUs.
|
||||
// unity builds without messing up subsequent TUs. Although we
|
||||
// no longer use unity builds, leaving the undefs here does no harm.
|
||||
#if BOOST_OS_WINDOWS
|
||||
#undef X509_NAME
|
||||
#undef X509_EXTENSIONS
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
#include <xrpl/basics/TaggedCache.h>
|
||||
#include <xrpl/beast/utility/PropertyStream.h>
|
||||
#include <xrpl/core/ServiceRegistry.h>
|
||||
#include <xrpl/protocol/Protocol.h>
|
||||
#include <xrpl/shamap/TreeNodeCache.h>
|
||||
|
||||
@@ -91,7 +92,7 @@ class Validations;
|
||||
class RCLValidationsAdaptor;
|
||||
using RCLValidations = Validations<RCLValidationsAdaptor>;
|
||||
|
||||
class Application : public beast::PropertyStream::Source
|
||||
class Application : public ServiceRegistry, public beast::PropertyStream::Source
|
||||
{
|
||||
public:
|
||||
/* VFALCO NOTE
|
||||
@@ -146,92 +147,12 @@ public:
|
||||
virtual boost::asio::io_context&
|
||||
getIOContext() = 0;
|
||||
|
||||
virtual CollectorManager&
|
||||
getCollectorManager() = 0;
|
||||
virtual Family&
|
||||
getNodeFamily() = 0;
|
||||
virtual TimeKeeper&
|
||||
timeKeeper() = 0;
|
||||
virtual JobQueue&
|
||||
getJobQueue() = 0;
|
||||
virtual NodeCache&
|
||||
getTempNodeCache() = 0;
|
||||
virtual CachedSLEs&
|
||||
cachedSLEs() = 0;
|
||||
virtual AmendmentTable&
|
||||
getAmendmentTable() = 0;
|
||||
virtual HashRouter&
|
||||
getHashRouter() = 0;
|
||||
virtual LoadFeeTrack&
|
||||
getFeeTrack() = 0;
|
||||
virtual LoadManager&
|
||||
getLoadManager() = 0;
|
||||
virtual Overlay&
|
||||
overlay() = 0;
|
||||
virtual TxQ&
|
||||
getTxQ() = 0;
|
||||
virtual ValidatorList&
|
||||
validators() = 0;
|
||||
virtual ValidatorSite&
|
||||
validatorSites() = 0;
|
||||
virtual ManifestCache&
|
||||
validatorManifests() = 0;
|
||||
virtual ManifestCache&
|
||||
publisherManifests() = 0;
|
||||
virtual Cluster&
|
||||
cluster() = 0;
|
||||
virtual PeerReservationTable&
|
||||
peerReservations() = 0;
|
||||
virtual RCLValidations&
|
||||
getValidations() = 0;
|
||||
virtual NodeStore::Database&
|
||||
getNodeStore() = 0;
|
||||
virtual InboundLedgers&
|
||||
getInboundLedgers() = 0;
|
||||
virtual InboundTransactions&
|
||||
getInboundTransactions() = 0;
|
||||
|
||||
virtual TaggedCache<uint256, AcceptedLedger>&
|
||||
getAcceptedLedgerCache() = 0;
|
||||
|
||||
virtual LedgerMaster&
|
||||
getLedgerMaster() = 0;
|
||||
virtual LedgerCleaner&
|
||||
getLedgerCleaner() = 0;
|
||||
virtual LedgerReplayer&
|
||||
getLedgerReplayer() = 0;
|
||||
virtual NetworkOPs&
|
||||
getOPs() = 0;
|
||||
virtual OrderBookDB&
|
||||
getOrderBookDB() = 0;
|
||||
virtual ServerHandler&
|
||||
getServerHandler() = 0;
|
||||
virtual TransactionMaster&
|
||||
getMasterTransaction() = 0;
|
||||
virtual perf::PerfLog&
|
||||
getPerfLog() = 0;
|
||||
|
||||
virtual std::pair<PublicKey, SecretKey> const&
|
||||
nodeIdentity() = 0;
|
||||
|
||||
virtual std::optional<PublicKey const>
|
||||
getValidationPublicKey() const = 0;
|
||||
|
||||
virtual Resource::Manager&
|
||||
getResourceManager() = 0;
|
||||
virtual PathRequests&
|
||||
getPathRequests() = 0;
|
||||
virtual SHAMapStore&
|
||||
getSHAMapStore() = 0;
|
||||
virtual PendingSaves&
|
||||
pendingSaves() = 0;
|
||||
virtual OpenLedger&
|
||||
openLedger() = 0;
|
||||
virtual OpenLedger const&
|
||||
openLedger() const = 0;
|
||||
virtual RelationalDatabase&
|
||||
getRelationalDatabase() = 0;
|
||||
|
||||
virtual std::chrono::milliseconds
|
||||
getIOLatency() = 0;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user