Compare commits

..

32 Commits

Author SHA1 Message Date
Nicholas Dudfield
ce6c56b104 experiment: testing 2025-08-12 13:37:09 +07:00
Nicholas Dudfield
e342b17172 experiment: testing 2025-08-12 13:28:19 +07:00
Nicholas Dudfield
6a4aff7f36 experiment: testing 2025-08-12 13:24:32 +07:00
Nicholas Dudfield
9dca972266 experiment: testing 2025-08-12 13:14:28 +07:00
Nicholas Dudfield
d8a8030556 experiment: testing 2025-08-12 13:11:11 +07:00
Nicholas Dudfield
788684ca74 experiment: testing 2025-08-12 13:09:50 +07:00
Nicholas Dudfield
a2d9947e85 experiment: testing 2025-08-12 13:07:24 +07:00
Nicholas Dudfield
303a476a53 experiment: testing 2025-08-12 13:05:50 +07:00
Nicholas Dudfield
80fbf9e2d0 experiment: testing 2025-08-12 12:54:02 +07:00
Nicholas Dudfield
04f9d4fbd1 experiment: testing 2025-08-12 12:46:57 +07:00
Nicholas Dudfield
2a2fee3cd3 experiment: testing 2025-08-12 12:45:20 +07:00
Nicholas Dudfield
573569f031 experiment: testing 2025-08-12 12:42:53 +07:00
Nicholas Dudfield
1ebd067c9f experiment: testing 2025-08-12 12:35:38 +07:00
Nicholas Dudfield
7450a302dc experiment: testing 2025-08-12 12:30:12 +07:00
Nicholas Dudfield
949252e25f experiment: testing 2025-08-12 12:25:00 +07:00
Nicholas Dudfield
544bb4f32e experiment: testing 2025-08-12 12:15:04 +07:00
Nicholas Dudfield
7328a26710 experiment: testing 2025-08-12 12:13:30 +07:00
Nicholas Dudfield
317a333170 experiment: testing 2025-08-12 12:10:42 +07:00
Nicholas Dudfield
33052c2bde experiment: testing 2025-08-12 12:01:01 +07:00
Nicholas Dudfield
df2fc9606a experiment: testing 2025-08-12 11:52:47 +07:00
Nicholas Dudfield
8c79f4bfcb experiment: testing 2025-08-12 11:50:06 +07:00
Nicholas Dudfield
a3012388e3 experiment: testing 2025-08-12 11:48:25 +07:00
Nicholas Dudfield
a170b387fc experiment: testing 2025-08-12 11:40:13 +07:00
Nicholas Dudfield
841d902dbd experiment: testing 2025-08-12 11:23:29 +07:00
Nicholas Dudfield
f8acb88f94 experiment: testing 2025-08-11 19:43:19 +07:00
Nicholas Dudfield
3f192ee1b5 experiment: testing 2025-08-11 19:41:54 +07:00
Nicholas Dudfield
ed1ba5595d experiment: testing 2025-08-11 19:28:52 +07:00
Nicholas Dudfield
1d7f5d42cc experiment: testing 2025-08-11 19:25:07 +07:00
Nicholas Dudfield
76a64d0eaa experiment: testing 2025-08-11 19:23:14 +07:00
Nicholas Dudfield
2502509e9e experiment: testing 2025-08-11 19:21:17 +07:00
Nicholas Dudfield
8a99f8ffc3 experiment: testing 2025-08-11 19:20:30 +07:00
Nicholas Dudfield
75849b5314 experiment: testing 2025-08-11 19:18:58 +07:00
49 changed files with 6677 additions and 410 deletions

885
.ci/gitea.py Normal file
View File

@@ -0,0 +1,885 @@
#!/usr/bin/env python3
"""
Persistent Gitea for Conan on Self-Hosted GA Runner
- Localhost only (127.0.0.1) for security
- Persistent volumes survive between workflows
- Idempotent - safe to run multiple times
- Reuses existing container if already running
- Uses pre-baked app.ini to bypass web setup wizard
What This Script Uses Conan For
--------------------------------
This script uses Conan only for testing and verification:
- Optionally configures host's conan client (if available)
- Runs container-based tests to verify the repository works
- Tests upload/download of a sample package (zlib) in a container
- Verifies authentication and package management work correctly
- Does NOT build or manage your actual project dependencies
The test command runs in a Docker container on the same network as Gitea,
exactly mimicking how your GitHub Actions workflows will use it.
Docker Networking
-----------------
Gitea is configured with ROOT_URL using the container name for consistency.
A Docker network (default: conan-net) is used for container-to-container communication.
Access methods:
1. From the host machine:
- The host uses http://localhost:3000 (port mapping)
- Host's Conan configuration uses localhost
2. From Docker containers (tests and CI/CD):
- Containers use http://gitea-conan-persistent:3000
- Containers must be on the same network (default: conan-net)
- The test command automatically handles network setup
The script automatically:
- Creates the Docker network if needed
- Connects Gitea to the network
- Runs tests in containers on the same network
Example in GitHub Actions workflow:
docker network create conan-net
docker network connect conan-net gitea-conan-persistent
docker run --network conan-net <your-build-container> bash -c "
conan remote add gitea-local http://gitea-conan-persistent:3000/api/packages/conan/conan
conan user -p conan-pass-2024 -r gitea-local conan
conan config set general.revisions_enabled=1 # Required for Conan v1
"
"""
import argparse
import logging
import os
import queue
import shutil
import socket
import subprocess
import sys
import threading
import time
from typing import Optional
class DockerLogStreamer(threading.Thread):
"""Background thread to stream docker logs -f and pass lines into a queue"""
def __init__(self, container_name: str, log_queue: queue.Queue):
super().__init__(name=f"DockerLogStreamer-{container_name}")
self.container = container_name
self.log_queue = log_queue
self._stop_event = threading.Event()
self.proc: Optional[subprocess.Popen] = None
self.daemon = True # so it won't block interpreter exit if something goes wrong
def run(self):
try:
# Follow logs, capture both stdout and stderr
self.proc = subprocess.Popen(
["docker", "logs", "-f", self.container],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1,
universal_newlines=True,
)
if not self.proc.stdout:
return
for line in self.proc.stdout:
if line is None:
break
# Ensure exact line fidelity
self.log_queue.put(line.rstrip("\n"))
if self._stop_event.is_set():
break
except Exception as e:
# Put an error marker so consumer can see
self.log_queue.put(f"[STREAMER_ERROR] {e}")
finally:
try:
if self.proc and self.proc.poll() is None:
# Do not kill abruptly unless asked to stop
pass
except Exception:
pass
def stop(self, timeout: float = 5.0):
self._stop_event.set()
try:
if self.proc and self.proc.poll() is None:
# Politely terminate docker logs
self.proc.terminate()
try:
self.proc.wait(timeout=timeout)
except Exception:
self.proc.kill()
except Exception:
pass
class PersistentGiteaConan:
def __init__(self, debug: bool = False, verbose: bool = False):
# Configurable via environment variables for CI flexibility
self.container = os.getenv("GITEA_CONTAINER_NAME", "gitea-conan-persistent")
self.port = int(os.getenv("GITEA_PORT", "3000"))
self.user = os.getenv("GITEA_USER", "conan")
self.passwd = os.getenv("GITEA_PASSWORD", "conan-pass-2024") # do not print this in logs
self.email = os.getenv("GITEA_EMAIL", "conan@localhost")
# Persistent data location on the runner
self.data_dir = os.getenv("GITEA_DATA_DIR", "/opt/gitea")
# Docker network for container communication
self.network = os.getenv("GITEA_NETWORK", "conan-net")
# Behavior flags
self.print_credentials = os.getenv("GITEA_PRINT_CREDENTIALS", "0") == "1"
self.startup_timeout = int(os.getenv("GITEA_STARTUP_TIMEOUT", "120"))
# Logging and docker log streaming infrastructure
self._setup_logging(debug=debug, verbose=verbose)
self.log_queue: queue.Queue[str] = queue.Queue()
self.log_streamer: Optional[DockerLogStreamer] = None
# Conan execution context cache
self._conan_prefix: Optional[str] = None # '' for direct, full sudo+shell for delegated; None if unavailable
# Track sensitive values that should be masked in logs
self._sensitive_values: set = {self.passwd} # Start with password
def _setup_logging(self, debug: bool, verbose: bool):
# Determine level: debug > verbose > default WARNING
if debug:
level = logging.DEBUG
elif verbose:
level = logging.INFO
else:
level = logging.WARNING
logging.basicConfig(level=level, format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s')
self.logger = logging.getLogger(__name__)
# Be slightly quieter for noisy libs
logging.getLogger('urllib3').setLevel(logging.WARNING)
def _mask_sensitive(self, text: str) -> str:
"""Mask any sensitive values in text for safe logging"""
if not text:
return text
masked = text
for sensitive in self._sensitive_values:
if sensitive and sensitive in masked:
masked = masked.replace(sensitive, "***REDACTED***")
return masked
def run(self, cmd, check=True, env=None, sensitive=False):
"""Run command with minimal output
Args:
cmd: Command to run
check: Raise exception on non-zero exit
env: Environment variables
sensitive: If True, command and output are completely hidden
"""
run_env = os.environ.copy()
if env:
run_env.update(env)
# Log command (masked or hidden based on sensitivity)
if sensitive:
self.logger.debug("EXEC: [sensitive command hidden]")
else:
self.logger.debug(f"EXEC: {self._mask_sensitive(cmd)}")
result = subprocess.run(cmd, shell=True, capture_output=True, text=True, env=run_env)
# Log output (masked or hidden based on sensitivity)
if not sensitive:
if result.stdout:
self.logger.debug(f"STDOUT: {self._mask_sensitive(result.stdout.strip())}"[:1000])
if result.stderr:
self.logger.debug(f"STDERR: {self._mask_sensitive(result.stderr.strip())}"[:1000])
if result.returncode != 0 and check:
if sensitive:
self.logger.error(f"Command failed ({result.returncode})")
raise RuntimeError("Command failed (details hidden for security)")
else:
self.logger.error(f"Command failed ({result.returncode}) for: {self._mask_sensitive(cmd)}")
raise RuntimeError(f"Command failed: {self._mask_sensitive(result.stderr)}")
return result
def is_running(self):
"""Check if container is already running"""
result = self.run(f"docker ps -q -f name={self.container}", check=False)
return bool(result.stdout.strip())
def container_exists(self):
"""Check if container exists (running or stopped)"""
result = self.run(f"docker ps -aq -f name={self.container}", check=False)
return bool(result.stdout.strip())
# ---------- Helpers & Preflight Checks ----------
def _check_docker(self):
if not shutil.which("docker"):
raise RuntimeError(
"Docker is not installed or not in PATH. Please install Docker and ensure the daemon is running.")
# Check daemon access
info = subprocess.run("docker info", shell=True, capture_output=True, text=True)
if info.returncode != 0:
raise RuntimeError(
"Docker daemon not accessible. Ensure the Docker service is running and the current user has permission to use Docker.")
def _is_port_in_use(self, host, port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(0.5)
return s.connect_ex((host, port)) == 0
def _setup_directories(self):
"""Create directory structure with proper ownership"""
gitea_data = os.path.join(self.data_dir, "gitea")
gitea_conf = os.path.join(gitea_data, "gitea", "conf")
# Create all directories
os.makedirs(gitea_conf, exist_ok=True)
self.logger.info(f"📁 Created directory structure: {self.data_dir}")
# Set ownership recursively to git user (UID 1000)
for root, dirs, files in os.walk(self.data_dir):
os.chown(root, 1000, 1000)
for d in dirs:
os.chown(os.path.join(root, d), 1000, 1000)
for f in files:
os.chown(os.path.join(root, f), 1000, 1000)
def _preflight(self):
self.logger.info("🔍 Running preflight checks...")
self._check_docker()
# Port check only if our container is not already running
if not self.is_running():
if self._is_port_in_use("127.0.0.1", self.port):
raise RuntimeError(f"Port {self.port} on 127.0.0.1 is already in use. Cannot bind Gitea.")
self.logger.info("✓ Preflight checks passed")
def _generate_secret(self, secret_type):
"""Generate a secret using Gitea's built-in generator"""
cmd = f"docker run --rm gitea/gitea:latest gitea generate secret {secret_type}"
result = self.run(cmd, sensitive=True) # Don't log the output
secret = result.stdout.strip()
if secret:
self._sensitive_values.add(secret) # Track this secret for masking
self.logger.debug(f"Generated {secret_type} successfully")
return secret
def _create_app_ini(self, gitea_conf_dir):
"""Create a pre-configured app.ini file"""
app_ini_path = os.path.join(gitea_conf_dir, "app.ini")
# Check if app.ini already exists (from previous run)
if os.path.exists(app_ini_path):
self.logger.info("✓ Using existing app.ini configuration")
# Minimal migration: ensure HTTP_ADDR allows inbound connections from host via Docker mapping
try:
with open(app_ini_path, 'r+', encoding='utf-8') as f:
content = f.read()
updated = False
if "HTTP_ADDR = 127.0.0.1" in content:
content = content.replace("HTTP_ADDR = 127.0.0.1", "HTTP_ADDR = 0.0.0.0")
updated = True
if updated:
f.seek(0)
f.write(content)
f.truncate()
self.logger.info("🔁 Updated existing app.ini to bind on 0.0.0.0 for container reachability")
except Exception as e:
self.logger.warning(f"⚠️ Could not update existing app.ini automatically: {e}")
return
self.logger.info("🔑 Generating security secrets...")
secret_key = self._generate_secret("SECRET_KEY")
internal_token = self._generate_secret("INTERNAL_TOKEN")
self.logger.info("📝 Creating app.ini configuration...")
app_ini_content = f"""APP_NAME = Conan Package Registry
RUN_USER = git
RUN_MODE = prod
[server]
ROOT_URL = http://gitea-conan-persistent:{self.port}/
HTTP_ADDR = 0.0.0.0
HTTP_PORT = 3000
DISABLE_SSH = true
START_SSH_SERVER = false
OFFLINE_MODE = true
DOMAIN = gitea-conan-persistent
LFS_START_SERVER = false
[database]
DB_TYPE = sqlite3
PATH = /data/gitea.db
LOG_SQL = false
[repository]
ROOT = /data/gitea-repositories
DISABLED_REPO_UNITS = repo.issues, repo.pulls, repo.wiki, repo.projects, repo.actions
[security]
INSTALL_LOCK = true
SECRET_KEY = {secret_key}
INTERNAL_TOKEN = {internal_token}
PASSWORD_HASH_ALGO = pbkdf2
MIN_PASSWORD_LENGTH = 8
[service]
DISABLE_REGISTRATION = true
ENABLE_NOTIFY_MAIL = false
REGISTER_EMAIL_CONFIRM = false
ENABLE_CAPTCHA = false
REQUIRE_SIGNIN_VIEW = false
DEFAULT_KEEP_EMAIL_PRIVATE = true
DEFAULT_ALLOW_CREATE_ORGANIZATION = false
DEFAULT_ENABLE_TIMETRACKING = false
[mailer]
ENABLED = false
[session]
PROVIDER = file
[log]
MODE = console
LEVEL = Info
[api]
ENABLE_SWAGGER = false
[packages]
ENABLED = true
[other]
SHOW_FOOTER_VERSION = false
"""
# Write app.ini with restrictive permissions
with open(app_ini_path, 'w') as f:
f.write(app_ini_content)
# Set ownership to UID 1000:1000 (git user in container)
os.chown(app_ini_path, 1000, 1000)
os.chmod(app_ini_path, 0o640)
self.logger.info("✓ Created app.ini with pre-generated secrets")
def setup(self):
"""Setup or verify Gitea is running"""
self.logger.info("🔧 Setting up persistent Gitea for Conan...")
# Preflight
self._preflight()
# Create persistent data directory structure with proper ownership
self._setup_directories()
gitea_data = os.path.join(self.data_dir, "gitea")
gitea_conf = os.path.join(gitea_data, "gitea", "conf")
# Create app.ini BEFORE starting container (for headless setup)
self._create_app_ini(gitea_conf)
# Check if already running
if self.is_running():
self.logger.info("✅ Gitea container already running")
self._verify_health()
self._configure_conan()
return
# Check if container exists but stopped
if self.container_exists():
self.logger.info("🔄 Starting existing container...")
self.run(f"docker start {self.container}")
# Start log streaming for visibility
self._start_log_streaming()
try:
time.sleep(2)
self._verify_health()
self._configure_conan()
finally:
self._stop_log_streaming()
return
# Create new container (first time setup)
self.logger.info("🚀 Creating new Gitea container...")
gitea_data = os.path.join(self.data_dir, "gitea")
# IMPORTANT: Bind to 127.0.0.1 only for security
# With pre-configured app.ini, Gitea starts directly without wizard
docker_cmd = f"""docker run -d \
--name {self.container} \
-p 127.0.0.1:{self.port}:3000 \
-v {gitea_data}:/data \
-v /etc/timezone:/etc/timezone:ro \
-v /etc/localtime:/etc/localtime:ro \
-e USER_UID=1000 \
-e USER_GID=1000 \
--restart unless-stopped \
gitea/gitea:latest"""
self.run(docker_cmd)
# Debug: Check actual port mapping
port_check = self.run(f"docker port {self.container}", check=False)
self.logger.info(f"🔍 Container port mapping: {port_check.stdout.strip()}")
# Start log streaming and wait for Gitea to be ready
self._start_log_streaming()
try:
self._wait_for_startup(self.startup_timeout)
# Create user (idempotent)
self._create_user()
# Configure Conan
self._configure_conan()
finally:
self._stop_log_streaming()
self.logger.info("✅ Persistent Gitea ready for Conan packages!")
self.logger.info(f" URL: http://localhost:{self.port}")
if self.print_credentials:
self.logger.info(f" User: {self.user} / {self.passwd}")
else:
self.logger.info(" Credentials: hidden (set GITEA_PRINT_CREDENTIALS=1 to display)")
self.logger.info(f" Data persisted in: {self.data_dir}")
def _start_log_streaming(self):
# Start background docker log streamer if not running
if self.log_streamer is not None:
self._stop_log_streaming()
self.logger.debug("Starting Docker log streamer...")
self.log_streamer = DockerLogStreamer(self.container, self.log_queue)
self.log_streamer.start()
def _stop_log_streaming(self):
if self.log_streamer is not None:
self.logger.debug("Stopping Docker log streamer...")
try:
self.log_streamer.stop()
self.log_streamer.join(timeout=5)
except Exception:
pass
finally:
self.log_streamer = None
def _wait_for_startup(self, timeout=60):
"""Wait for container to become healthy by consuming the docker log stream"""
self.logger.info(f"⏳ Waiting for Gitea to start (timeout: {timeout}s)...")
start_time = time.time()
server_detected = False
while time.time() - start_time < timeout:
# Drain all available log lines without blocking
drained_any = False
while True:
try:
line = self.log_queue.get_nowait()
drained_any = True
except queue.Empty:
break
if not line.strip():
continue
# Always log raw docker lines at DEBUG level
self.logger.debug(f"DOCKER: {line}")
# Promote important events
l = line
if ("[E]" in l) or ("ERROR" in l) or ("FATAL" in l) or ("panic" in l):
self.logger.error(l)
elif ("WARN" in l) or ("[W]" in l):
self.logger.warning(l)
# Detect startup listening lines
if ("Web server is now listening" in l) or ("Listen:" in l) or ("Starting new Web server" in l):
if not server_detected:
server_detected = True
self.logger.info("✓ Detected web server startup!")
self.logger.info("⏳ Waiting for Gitea to fully initialize...")
# Quick readiness loop
for i in range(10):
time.sleep(1)
if self._is_healthy():
self.logger.info(f"✓ Gitea is ready and responding! (after {i + 1} seconds)")
return
self.logger.warning(
"Server started but health check failed after 10 attempts, continuing to wait...")
# Check if container is still running periodically
container_status = self.run(
f"docker inspect {self.container} --format='{{{{.State.Status}}}}'",
check=False
)
status = (container_status.stdout or "").strip()
if status and status != "running":
# Container stopped or in error state
error_logs = self.run(
f"docker logs --tail 30 {self.container} 2>&1",
check=False
)
self.logger.error(f"Container is in '{status}' state. Last logs:")
for l in (error_logs.stdout or "").split('\n')[-10:]:
if l.strip():
self.logger.error(l)
raise RuntimeError(f"Container failed to start (status: {status})")
# If nothing drained, brief sleep to avoid busy loop
if not drained_any:
time.sleep(0.5)
raise TimeoutError(f"Gitea failed to become ready within {timeout} seconds")
def _is_healthy(self):
"""Check if Gitea is responding"""
# Try a simple HTTP GET first (less verbose)
result = self.run(
f"curl -s -o /dev/null -w '%{{http_code}}' http://localhost:{self.port}/",
check=False
)
code = result.stdout.strip()
# Treat any 2xx/3xx as healthy (e.g., 200 OK, 302/303 redirects)
if code and code[0] in ("2", "3"):
return True
# If it failed, show debug info
if code == "000":
# Only show debug on first failure
if not hasattr(self, '_health_check_debug_shown'):
self._health_check_debug_shown = True
self.logger.info("🔍 Connection issue detected, showing diagnostics:")
# Check what's actually listening
netstat_result = self.run(f"netstat -tln | grep {self.port}", check=False)
self.logger.info(f" Port {self.port} listeners: {netstat_result.stdout.strip() or 'none found'}")
# Check docker port mapping
port_result = self.run(f"docker port {self.container} 3000", check=False)
self.logger.info(f" Docker mapping: {port_result.stdout.strip() or 'not mapped'}")
return False
def _verify_health(self):
"""Verify Gitea is healthy"""
if not self._is_healthy():
raise RuntimeError("Gitea is not responding properly")
self.logger.info("✅ Gitea is healthy")
def _ensure_network(self):
"""Ensure Docker network exists and Gitea is connected to it"""
# Create network if it doesn't exist (idempotent)
self.run(f"docker network create {self.network} 2>/dev/null || true", check=False)
# Connect Gitea to the network if not already connected (idempotent)
self.run(f"docker network connect {self.network} {self.container} 2>/dev/null || true", check=False)
self.logger.debug(f"Ensured {self.container} is connected to {self.network} network")
# ---------- Conan helpers ----------
def _resolve_conan_prefix(self) -> Optional[str]:
"""Determine how to run the 'conan' CLI and cache the decision.
Returns:
'' for direct invocation (conan in PATH),
full sudo+login-shell prefix string for delegated execution, or
None if Conan is not available.
"""
if self._conan_prefix is not None:
return self._conan_prefix
# If running with sudo, try actual user's login shell
if os.geteuid() == 0 and 'SUDO_USER' in os.environ:
actual_user = os.environ['SUDO_USER']
# Discover the user's shell
shell_result = self.run(f"getent passwd {actual_user} | cut -d: -f7", check=False)
user_shell = shell_result.stdout.strip() if shell_result.returncode == 0 and shell_result.stdout.strip() else "/bin/bash"
self.logger.info(f"→ Using {actual_user}'s shell for Conan: {user_shell}")
which_result = self.run(f"sudo -u {actual_user} {user_shell} -l -c 'which conan'", check=False)
if which_result.returncode == 0 and which_result.stdout.strip():
self._conan_prefix = f"sudo -u {actual_user} {user_shell} -l -c"
self.logger.info(f"✓ Found Conan at: {which_result.stdout.strip()}")
return self._conan_prefix
else:
self.logger.warning(f"⚠️ Conan not found in {actual_user}'s PATH.")
self._conan_prefix = None
return self._conan_prefix
else:
# Non-sudo case; check PATH directly
if shutil.which("conan"):
self._conan_prefix = ''
return self._conan_prefix
else:
self.logger.warning("⚠️ Conan CLI not found in PATH.")
self._conan_prefix = None
return self._conan_prefix
def _build_conan_cmd(self, inner_args: str) -> Optional[str]:
"""Build a shell command to run Conan with given inner arguments.
Example: inner_args='remote list' => 'conan remote list' or "sudo -u user shell -l -c 'conan remote list'".
Returns None if Conan is unavailable.
"""
prefix = self._resolve_conan_prefix()
if prefix is None:
return None
if prefix == '':
return f"conan {inner_args}"
# Delegate via sudo+login shell; quote the inner command
return f"{prefix} 'conan {inner_args}'"
def _run_conan(self, inner_args: str, check: bool = False):
"""Run a Conan subcommand using the resolved execution context.
Returns the subprocess.CompletedProcess-like result, or a dummy object with returncode=127 if unavailable.
"""
full_cmd = self._build_conan_cmd(inner_args)
if full_cmd is None:
# Construct a minimal dummy result
class Dummy:
returncode = 127
stdout = ''
stderr = 'conan: not found'
self.logger.error("❌ Conan CLI is not available. Skipping command: conan " + inner_args)
return Dummy()
return self.run(full_cmd, check=check)
def _run_conan_sensitive(self, inner_args: str, check: bool = False):
"""Run a sensitive Conan subcommand (e.g., with passwords) using the resolved execution context."""
full_cmd = self._build_conan_cmd(inner_args)
if full_cmd is None:
class Dummy:
returncode = 127
stdout = ''
stderr = 'conan: not found'
self.logger.error("❌ Conan CLI is not available. Skipping sensitive command")
return Dummy()
return self.run(full_cmd, check=check, sensitive=True)
def _create_user(self):
"""Create Conan user (idempotent)"""
self.logger.info("👤 Setting up admin user...")
# Retry a few times in case DB initialization lags behind
attempts = 5
for i in range(1, attempts + 1):
# First check if user exists
check_cmd = f"docker exec -u 1000:1000 {self.container} gitea admin user list"
result = self.run(check_cmd, check=False)
if result.returncode == 0 and self.user in result.stdout:
self.logger.info(f"✅ User already exists: {self.user}")
return
# Try to create admin user with --admin flag
create_cmd = f"""docker exec -u 1000:1000 {self.container} \
gitea admin user create \
--username {self.user} \
--password {self.passwd} \
--email {self.email} \
--admin \
--must-change-password=false"""
create_res = self.run(create_cmd, check=False, sensitive=True)
if create_res.returncode == 0:
self.logger.info(f"✅ Created admin user: {self.user}")
return
if "already exists" in (create_res.stderr or "").lower() or "already exists" in (
create_res.stdout or "").lower():
self.logger.info(f"✅ User already exists: {self.user}")
return
if i < attempts:
delay = min(2 ** i, 10)
time.sleep(delay)
self.logger.warning(f"⚠️ Could not create user after {attempts} attempts. You may need to create it manually.")
def _configure_conan(self):
"""Configure Conan client (idempotent)"""
self.logger.info("🔧 Configuring Conan client on host...")
# Ensure Conan is available and determine execution context
if self._resolve_conan_prefix() is None:
self.logger.warning("⚠️ Conan CLI not available on host. Skipping client configuration.")
self.logger.info(" Note: Tests will still work using container-based Conan.")
return
# For host-based Conan, we still use localhost since the host can't resolve container names
# Container-based tests will use gitea-conan-persistent directly
conan_url = f"http://localhost:{self.port}/api/packages/{self.user}/conan"
# Remove old remote if exists (ignore errors)
self._run_conan("remote remove gitea-local 2>/dev/null", check=False)
# Add Gitea as remote
self._run_conan(f"remote add gitea-local {conan_url}")
# Authenticate (mark as sensitive even though Conan masks password in process list)
self._run_conan_sensitive(f"user -p {self.passwd} -r gitea-local {self.user}")
# Enable revisions if not already
self._run_conan("config set general.revisions_enabled=1", check=False)
self.logger.info(f"✅ Host Conan configured with remote: gitea-local (via localhost)")
self.logger.info(f" Container tests will use: http://gitea-conan-persistent:{self.port}")
def verify(self):
"""Verify everything is working"""
self.logger.info("🔍 Verifying setup...")
# Check container
if not self.is_running():
self.logger.error("❌ Container not running")
return False
# Check Gitea health
if not self._is_healthy():
self.logger.error("❌ Gitea not responding")
return False
# Check Conan remote
result = self._run_conan("remote list", check=False)
if getattr(result, 'returncode', 1) != 0 or "gitea-local" not in (getattr(result, 'stdout', '') or ''):
self.logger.error("❌ Conan remote not configured")
return False
self.logger.info("✅ All systems operational")
return True
def info(self):
"""Print current status"""
self.logger.info("📊 Gitea Status:")
self.logger.info(f" Container: {self.container}")
self.logger.info(f" Running: {self.is_running()}")
self.logger.info(f" Data dir: {self.data_dir}")
self.logger.info(f" URL: http://localhost:{self.port}")
self.logger.info(f" Conan URL: http://localhost:{self.port}/api/packages/{self.user}/conan")
# Show disk usage
if os.path.exists(self.data_dir):
result = self.run(f"du -sh {self.data_dir}", check=False)
if result.returncode == 0:
size = result.stdout.strip().split('\t')[0]
self.logger.info(f" Disk usage: {size}")
def test(self):
"""Test Conan package upload/download in a container"""
self.logger.info("🧪 Testing Conan with Gitea (container-based test)...")
# Ensure everything is set up
if not self.is_running():
self.logger.error("❌ Gitea not running. Run 'setup' first.")
return False
# Ensure network exists and Gitea is connected
self._ensure_network()
# Test package name
test_package = "zlib/1.3.1"
package_name = test_package.split('/')[0] # Extract just the package name
self.logger.info(f" → Testing with package: {test_package}")
self.logger.info(f" → Running test in container on {self.network} network")
# Run test in a container (same environment as production)
test_cmd = f"""docker run --rm --network {self.network} conanio/gcc11 bash -ec "
# Configure Conan to use Gitea
conan remote add gitea-local http://gitea-conan-persistent:{self.port}/api/packages/{self.user}/conan
conan user -p {self.passwd} -r gitea-local {self.user}
conan config set general.revisions_enabled=1
# Test package upload/download
echo '→ Building {test_package} from source...'
conan install {test_package}@ --build={test_package}
echo '→ Uploading to Gitea...'
conan upload '{package_name}/*' --all -r gitea-local --confirm
echo '→ Removing local copy...'
conan remove '{package_name}/*' -f
echo '→ Downloading from Gitea...'
conan install {test_package}@ -r gitea-local
echo '✅ Container-based test successful!'
" """
result = self.run(test_cmd, check=False, sensitive=False) # Temporarily show output for debugging
if result.returncode == 0:
self.logger.info("✅ Test successful! Package uploaded and downloaded from Gitea.")
return True
else:
self.logger.error("❌ Test failed. Check the output above for details.")
return False
def teardown(self):
"""Stop and remove Gitea container and data"""
self.logger.info("🛑 Tearing down Gitea...")
# Stop and remove container
if self.container_exists():
self.logger.info(f" → Stopping container: {self.container}")
self.run(f"docker stop {self.container}", check=False)
self.logger.info(f" → Removing container: {self.container}")
self.run(f"docker rm {self.container}", check=False)
else:
self.logger.info(" → No container to remove")
# Remove data directory
if os.path.exists(self.data_dir):
self.logger.info(f" → Removing data directory: {self.data_dir}")
shutil.rmtree(self.data_dir)
self.logger.info(" ✓ Data directory removed")
else:
self.logger.info(" → No data directory to remove")
# Remove Docker network if it exists
network_check = self.run(f"docker network ls --format '{{{{.Name}}}}' | grep '^{self.network}$'", check=False)
if network_check.stdout.strip():
self.logger.info(f" → Removing Docker network: {self.network}")
self.run(f"docker network rm {self.network}", check=False)
self.logger.info(" ✓ Network removed")
else:
self.logger.info(f" → No network '{self.network}' to remove")
self.logger.info(" ✅ Teardown complete!")
# For use in GitHub Actions workflows
def main():
parser = argparse.ArgumentParser(description='Persistent Gitea for Conan packages')
parser.add_argument('command', choices=['setup', 'teardown', 'verify', 'info', 'test'], nargs='?', default='setup')
parser.add_argument('--debug', action='store_true', help='Enable debug logging')
parser.add_argument('--verbose', action='store_true', help='Enable verbose logging (info level)')
args = parser.parse_args()
# Temporary logging before instance creation (level will be reconfigured inside class)
temp_level = logging.DEBUG if args.debug else (logging.INFO if args.verbose else logging.WARNING)
logging.basicConfig(level=temp_level, format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s')
logger = logging.getLogger(__name__)
# Auto-escalate to sudo for operations that need it
needs_sudo = args.command in ['setup', 'teardown']
if needs_sudo and os.geteuid() != 0:
logger.info("📋 This operation requires sudo privileges. Re-running with sudo...")
os.execvp('sudo', ['sudo'] + sys.argv)
gitea = PersistentGiteaConan(debug=args.debug, verbose=args.verbose)
try:
if args.command == "setup":
gitea.setup()
elif args.command == "verify":
sys.exit(0 if gitea.verify() else 1)
elif args.command == "info":
gitea.info()
elif args.command == "test":
sys.exit(0 if gitea.test() else 1)
elif args.command == "teardown":
gitea.teardown()
except KeyboardInterrupt:
logger.warning("Interrupted by user. Cleaning up...")
try:
gitea._stop_log_streaming()
except Exception:
pass
sys.exit(130)
if __name__ == "__main__":
main()

View File

@@ -59,8 +59,8 @@ runs:
- name: Export custom recipes - name: Export custom recipes
shell: bash shell: bash
run: | run: |
conan export external/snappy snappy/1.1.10@xahaud/stable conan export external/snappy snappy/1.1.9@
conan export external/soci soci/4.0.3@xahaud/stable conan export external/soci soci/4.0.3@
- name: Install dependencies - name: Install dependencies
shell: bash shell: bash
@@ -83,4 +83,4 @@ runs:
path: | path: |
~/.conan ~/.conan
~/.conan2 ~/.conan2
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }} key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}

View File

@@ -2,9 +2,9 @@ name: Build using Docker
on: on:
push: push:
branches: ["dev", "candidate", "release", "jshooks"] branches: ["dev", "candidate", "release", "ci-experiments"]
pull_request: pull_request:
branches: ["dev", "candidate", "release", "jshooks"] branches: ["dev", "candidate", "release", "ci-experiments"]
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
@@ -32,46 +32,110 @@ jobs:
clean: true clean: true
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
build: checkpatterns:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, vanity]
needs: [checkout] needs: checkout
defaults: defaults:
run: run:
working-directory: ${{ needs.checkout.outputs.checkout_path }} working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps: steps:
- name: Set Cleanup Script Path - name: Check for suspicious patterns
run: /bin/bash suspicious_patterns.sh
build:
runs-on: [self-hosted, vanity]
needs: [checkpatterns, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Install Python & pipx
run: | run: |
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV sudo apt update && sudo apt install -y python3 python3-pip pipx python-is-python3
- name: Build using Docker - name: Install Conan
run: /bin/bash release-builder.sh
- name: Stop Container (Cleanup)
if: always()
run: | run: |
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT" pipx install "conan<2.0"
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT" /root/.local/bin/conan --version # PATH doesn't seem to be set correctly
CLEANUP_EXIT_CODE=$?
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then - name: Setup network and Gitea
echo "Cleanup script succeeded." run: |
rm -f "$JOB_CLEANUP_SCRIPT" # Create network for container communication (idempotent)
echo "Cleanup script removed." docker network create conan-net 2>/dev/null || true
else
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT" # Setup Gitea
fi PATH="/root/.local/bin:$PATH" python .ci/gitea.py setup --debug
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then # Connect Gitea to the network (idempotent)
echo "🔍 Checking for leftover containers..." docker network connect conan-net gitea-conan-persistent 2>/dev/null || true
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
# Verify it's connected
docker inspect gitea-conan-persistent -f '{{range $net,$v := .NetworkSettings.Networks}}{{$net}} {{end}}'
if [[ -n "$BUILD_CONTAINERS" ]]; then # - name: Test Gitea from build container
echo "⚠️ WARNING: Some build containers are still running" # run: |
echo "$BUILD_CONTAINERS" # # Show conan-net details
else # echo "=== Docker network 'conan-net' details ==="
echo "✅ No build containers found" # docker network inspect conan-net
fi #
fi # # Show what networks Gitea is connected to
# echo "=== Gitea container networks ==="
# docker inspect gitea-conan-persistent -f '{{json .NetworkSettings.Networks}}' | python -m json.tool
#
# # Check if DNS resolution works without adding to conan-net
# docker run --rm alpine nslookup gitea-conan-persistent || echo "⚠️ DNS resolution failed without conan-net"
#
# docker run --rm --network conan-net alpine sh -c "
# # First verify connectivity works
# apk add --no-cache curl >/dev/null 2>&1
# echo 'Testing DNS resolution...'
# nslookup gitea-conan-persistent
# echo 'Testing HTTP connection...'
# curl -s http://gitea-conan-persistent:3000 | head -n1
# "
# docker run --rm --network conan-net conanio/gcc11 bash -xec "
# # Configure Conan using the resolved IP
# conan remote add gitea-local http://gitea-conan-persistent:3000/api/packages/conan/conan
# conan user -p conan-pass-2024 -r gitea-local conan
#
# # Enable revisions to match the server expectation
# conan config set general.revisions_enabled=1
#
# # Test package upload/download
# conan install zlib/1.3.1@ --build=zlib
# conan upload 'zlib/*' --all -r gitea-local --confirm
# conan remove 'zlib/*' -f
# conan install zlib/1.3.1@ -r gitea-local
# echo '✅ Container-to-container test successful!'# - name: Build using Docker
# "
# run: /bin/bash release-builder.sh
#
# - name: Stop Container (Cleanup)
# if: always()
# run: |
# echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
# /bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
# CLEANUP_EXIT_CODE=$?
#
# if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
# echo "Cleanup script succeeded."
# rm -f "$JOB_CLEANUP_SCRIPT"
# echo "Cleanup script removed."
# else
# echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
# fi
#
# if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
# echo "🔍 Checking for leftover containers..."
# BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
#
# if [[ -n "$BUILD_CONTAINERS" ]]; then
# echo "⚠️ WARNING: Some build containers are still running"
# echo "$BUILD_CONTAINERS"
# else
# echo "✅ No build containers found"
# fi
# fi
tests: tests:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, vanity]
@@ -81,7 +145,7 @@ jobs:
working-directory: ${{ needs.checkout.outputs.checkout_path }} working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps: steps:
- name: Unit tests - name: Unit tests
run: /bin/bash docker-unit-tests.sh run: PATH="/root/.local/bin:$PATH" python .ci/gitea.py test --debug
cleanup: cleanup:
runs-on: [self-hosted, vanity] runs-on: [self-hosted, vanity]
@@ -91,5 +155,6 @@ jobs:
- name: Cleanup workspace - name: Cleanup workspace
run: | run: |
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}" CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
PATH="/root/.local/bin:$PATH" python "${CHECKOUT_PATH}/.ci/gitea.py" teardown --debug
echo "Cleaning workspace for ${CHECKOUT_PATH}" echo "Cleaning workspace for ${CHECKOUT_PATH}"
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}" rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"

5
.gitignore vendored
View File

@@ -24,11 +24,6 @@ bin/project-cache.jam
build/docker build/docker
# Ignore release builder files
.env
release-build
cmake-*.tar.gz
# Ignore object files. # Ignore object files.
*.o *.o
build build

View File

@@ -117,14 +117,14 @@ can't build earlier Boost versions.
which allows you to statically link it with GCC, if you want. which allows you to statically link it with GCC, if you want.
``` ```
conan export external/snappy snappy/1.1.10@xahaud/stable conan export external/snappy snappy/1.1.9@
``` ```
5. Export our [Conan recipe for SOCI](./external/soci). 5. Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies. It patches their CMake to correctly import its dependencies.
``` ```
conan export external/soci soci/4.0.3@xahaud/stable conan export external/soci soci/4.0.3@
``` ```
### Build and Test ### Build and Test

View File

@@ -548,6 +548,7 @@ target_sources (rippled PRIVATE
src/ripple/nodestore/backend/CassandraFactory.cpp src/ripple/nodestore/backend/CassandraFactory.cpp
src/ripple/nodestore/backend/RWDBFactory.cpp src/ripple/nodestore/backend/RWDBFactory.cpp
src/ripple/nodestore/backend/MemoryFactory.cpp src/ripple/nodestore/backend/MemoryFactory.cpp
src/ripple/nodestore/backend/FlatmapFactory.cpp
src/ripple/nodestore/backend/NuDBFactory.cpp src/ripple/nodestore/backend/NuDBFactory.cpp
src/ripple/nodestore/backend/NullFactory.cpp src/ripple/nodestore/backend/NullFactory.cpp
src/ripple/nodestore/backend/RocksDBFactory.cpp src/ripple/nodestore/backend/RocksDBFactory.cpp
@@ -1067,11 +1068,6 @@ target_link_libraries (rippled
Ripple::opts Ripple::opts
Ripple::libs Ripple::libs
Ripple::xrpl_core Ripple::xrpl_core
# Workaround for a Conan 1.x bug that prevents static linking of libstdc++
# when a dependency (snappy) modifies system_libs. See the comment in
# external/snappy/conanfile.py for a full explanation.
# This is likely not strictly necessary, but listed explicitly as a good practice.
m
) )
exclude_if_included (rippled) exclude_if_included (rippled)
# define a macro for tests that might need to # define a macro for tests that might need to

View File

@@ -0,0 +1,33 @@
#[===================================================================[
NIH prefix path..this is where we will download
and build any ExternalProjects, and they will hopefully
survive across build directory deletion (manual cleans)
#]===================================================================]
string (REGEX REPLACE "[ \\/%]+" "_" gen_for_path ${CMAKE_GENERATOR})
string (TOLOWER ${gen_for_path} gen_for_path)
# HACK: trying to shorten paths for windows CI (which hits 260 MAXPATH easily)
# @see: https://issues.jenkins-ci.org/browse/JENKINS-38706?focusedCommentId=339847
string (REPLACE "visual_studio" "vs" gen_for_path ${gen_for_path})
if (NOT DEFINED NIH_CACHE_ROOT)
if (DEFINED ENV{NIH_CACHE_ROOT})
set (NIH_CACHE_ROOT $ENV{NIH_CACHE_ROOT})
else ()
set (NIH_CACHE_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/.nih_c")
endif ()
endif ()
set (nih_cache_path
"${NIH_CACHE_ROOT}/${gen_for_path}/${CMAKE_CXX_COMPILER_ID}_${CMAKE_CXX_COMPILER_VERSION}")
if (NOT is_multiconfig)
set (nih_cache_path "${nih_cache_path}/${CMAKE_BUILD_TYPE}")
endif ()
file(TO_CMAKE_PATH "${nih_cache_path}" nih_cache_path)
message (STATUS "NIH-EP cache path: ${nih_cache_path}")
## two convenience variables:
set (ep_lib_prefix ${CMAKE_STATIC_LIBRARY_PREFIX})
set (ep_lib_suffix ${CMAKE_STATIC_LIBRARY_SUFFIX})
# this is a setting for FetchContent and needs to be
# a cache variable
# https://cmake.org/cmake/help/latest/module/FetchContent.html#populating-the-content
set (FETCHCONTENT_BASE_DIR ${nih_cache_path} CACHE STRING "" FORCE)

View File

@@ -0,0 +1,52 @@
find_package(Boost 1.83 REQUIRED
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
program_options
regex
system
thread
)
add_library(ripple_boost INTERFACE)
add_library(Ripple::boost ALIAS ripple_boost)
if(XCODE)
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
else()
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
endif()
target_link_libraries(ripple_boost
INTERFACE
Boost::boost
Boost::chrono
Boost::container
Boost::coroutine
Boost::date_time
Boost::filesystem
Boost::program_options
Boost::regex
Boost::system
Boost::iostreams
Boost::thread)
if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
endif()
if(san AND is_clang)
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
# for gcc ?
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
endif()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options(opts
INTERFACE
# ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif()

View File

@@ -0,0 +1,22 @@
find_package(Protobuf 3.8)
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen)
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
set(CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen)
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
add_library(pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS})
target_include_directories(pbufs SYSTEM PUBLIC
${CMAKE_BINARY_DIR}/proto_gen
${CMAKE_BINARY_DIR}/proto_gen/src/ripple/proto
)
target_link_libraries(pbufs protobuf::libprotobuf)
target_compile_options(pbufs
PUBLIC
$<$<BOOL:${XCODE}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>
)
add_library(Ripple::pbufs ALIAS pbufs)

View File

@@ -0,0 +1,62 @@
find_package(gRPC 1.23)
#[=================================[
generate protobuf sources for
grpc defs and bundle into a
static lib
#]=================================]
set(GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc")
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
set(GRPC_PROTO_SRCS)
set(GRPC_PROTO_HDRS)
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto")
foreach(file ${GRPC_DEFINITION_FILES})
get_filename_component(_abs_file ${file} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${file} NAME_WE)
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
add_custom_command(
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
COMMAND protobuf::protoc
ARGS --grpc_out=${GRPC_GEN_DIR}
--cpp_out=${GRPC_GEN_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
-I ${_proto_inc} -I ${_rel_dir}
${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
VERBATIM)
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES GENERATED TRUE)
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
endforeach()
add_library(grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS})
#target_include_directories(grpc_pbufs PRIVATE src)
target_include_directories(grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR})
target_link_libraries(grpc_pbufs
"gRPC::grpc++"
# libgrpc is missing references.
absl::random_random
)
target_compile_options(grpc_pbufs
PRIVATE
$<$<BOOL:${MSVC}>:-wd4065>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
PUBLIC
$<$<BOOL:${MSVC}>:-wd4996>
$<$<BOOL:${XCODE}>:
--system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec
>)
add_library(Ripple::grpc_pbufs ALIAS grpc_pbufs)

View File

@@ -1,3 +1,51 @@
#[===================================================================[
NIH dep: boost
#]===================================================================]
if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT}))
set(BOOST_ROOT $ENV{BOOST_ROOT})
endif()
if((NOT DEFINED BOOST_LIBRARYDIR) AND(DEFINED ENV{BOOST_LIBRARYDIR}))
set(BOOST_LIBRARYDIR $ENV{BOOST_LIBRARYDIR})
endif()
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
if(WIN32 OR CYGWIN)
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
if((NOT DEFINED BOOST_LIBRARYDIR) AND (DEFINED BOOST_ROOT))
if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib)
elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage/lib)
elseif(IS_DIRECTORY ${BOOST_ROOT}/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/lib)
else()
message(WARNING "Did not find expected boost library dir. "
"Defaulting to ${BOOST_ROOT}")
set(BOOST_LIBRARYDIR ${BOOST_ROOT})
endif()
endif()
endif()
message(STATUS "BOOST_ROOT: ${BOOST_ROOT}")
message(STATUS "BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}")
# uncomment the following as needed to debug FindBoost issues:
#set(Boost_DEBUG ON)
#[=========================================================[
boost dynamic libraries don't trivially support @rpath
linking right now (cmake's default), so just force
static linking for macos, or if requested on linux by flag
#]=========================================================]
if(static)
set(Boost_USE_STATIC_LIBS ON)
endif()
set(Boost_USE_MULTITHREADED ON)
if(static AND NOT APPLE)
set(Boost_USE_STATIC_RUNTIME ON)
else()
set(Boost_USE_STATIC_RUNTIME OFF)
endif()
# TBD:
# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the
find_package(Boost 1.86 REQUIRED find_package(Boost 1.86 REQUIRED
COMPONENTS COMPONENTS
chrono chrono
@@ -9,12 +57,12 @@ find_package(Boost 1.86 REQUIRED
program_options program_options
regex regex
system system
thread iostreams
) thread)
add_library(ripple_boost INTERFACE) add_library(ripple_boost INTERFACE)
add_library(Ripple::boost ALIAS ripple_boost) add_library(Ripple::boost ALIAS ripple_boost)
if(XCODE) if(is_xcode)
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS}) target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/") target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
else() else()
@@ -29,10 +77,10 @@ target_link_libraries(ripple_boost
Boost::coroutine Boost::coroutine
Boost::date_time Boost::date_time
Boost::filesystem Boost::filesystem
Boost::iostreams
Boost::program_options Boost::program_options
Boost::regex Boost::regex
Boost::system Boost::system
Boost::iostreams
Boost::thread) Boost::thread)
if(Boost_COMPILER) if(Boost_COMPILER)
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking) target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)

View File

@@ -0,0 +1,28 @@
#[===================================================================[
NIH dep: ed25519-donna
#]===================================================================]
add_library (ed25519-donna STATIC
src/ed25519-donna/ed25519.c)
target_include_directories (ed25519-donna
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:include>
PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src/ed25519-donna)
#[=========================================================[
NOTE for macos:
https://github.com/floodyberry/ed25519-donna/issues/29
our source for ed25519-donna-portable.h has been
patched to workaround this.
#]=========================================================]
target_link_libraries (ed25519-donna PUBLIC OpenSSL::SSL)
add_library (NIH::ed25519-donna ALIAS ed25519-donna)
target_link_libraries (ripple_libs INTERFACE NIH::ed25519-donna)
#[===========================[
headers installation
#]===========================]
install (
FILES
src/ed25519-donna/ed25519.h
DESTINATION include/ed25519-donna)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
# - Try to find jemalloc
# Once done this will define
# JEMALLOC_FOUND - System has jemalloc
# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories
# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc
if(NOT USE_BUNDLED_JEMALLOC)
find_package(PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_check_modules(PC_JEMALLOC QUIET jemalloc)
endif()
else()
set(PC_JEMALLOC_INCLUDEDIR)
set(PC_JEMALLOC_INCLUDE_DIRS)
set(PC_JEMALLOC_LIBDIR)
set(PC_JEMALLOC_LIBRARY_DIRS)
set(LIMIT_SEARCH NO_DEFAULT_PATH)
endif()
set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER})
find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h
PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS}
${LIMIT_SEARCH})
# If we're asked to use static linkage, add libjemalloc.a as a preferred library name.
if(JEMALLOC_USE_STATIC)
list(APPEND JEMALLOC_NAMES
"${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}")
endif()
list(APPEND JEMALLOC_NAMES jemalloc)
find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES}
HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS}
${LIMIT_SEARCH})
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
include(FindPackageHandleStandardArgs)
# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE
# if all listed variables are TRUE
find_package_handle_standard_args(JeMalloc DEFAULT_MSG
JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR)
mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY)

View File

@@ -0,0 +1,22 @@
find_package (PkgConfig REQUIRED)
pkg_search_module (libarchive_PC QUIET libarchive>=3.4.3)
if(static)
set(LIBARCHIVE_LIB libarchive.a)
else()
set(LIBARCHIVE_LIB archive)
endif()
find_library (archive
NAMES ${LIBARCHIVE_LIB}
HINTS
${libarchive_PC_LIBDIR}
${libarchive_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (LIBARCHIVE_INCLUDE_DIR
NAMES archive.h
HINTS
${libarchive_PC_INCLUDEDIR}
${libarchive_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (lz4_PC QUIET liblz4>=1.9)
endif ()
if(static)
set(LZ4_LIB liblz4.a)
else()
set(LZ4_LIB lz4.so)
endif()
find_library (lz4
NAMES ${LZ4_LIB}
HINTS
${lz4_PC_LIBDIR}
${lz4_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (LZ4_INCLUDE_DIR
NAMES lz4.h
HINTS
${lz4_PC_INCLUDEDIR}
${lz4_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (secp256k1_PC QUIET libsecp256k1)
endif ()
if(static)
set(SECP256K1_LIB libsecp256k1.a)
else()
set(SECP256K1_LIB secp256k1)
endif()
find_library(secp256k1
NAMES ${SECP256K1_LIB}
HINTS
${secp256k1_PC_LIBDIR}
${secp256k1_PC_LIBRARY_PATHS}
NO_DEFAULT_PATH)
find_path (SECP256K1_INCLUDE_DIR
NAMES secp256k1.h
HINTS
${secp256k1_PC_INCLUDEDIR}
${secp256k1_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (snappy_PC QUIET snappy>=1.1.7)
endif ()
if(static)
set(SNAPPY_LIB libsnappy.a)
else()
set(SNAPPY_LIB libsnappy.so)
endif()
find_library (snappy
NAMES ${SNAPPY_LIB}
HINTS
${snappy_PC_LIBDIR}
${snappy_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (SNAPPY_INCLUDE_DIR
NAMES snappy.h
HINTS
${snappy_PC_INCLUDEDIR}
${snappy_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,19 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
# TBD - currently no soci pkgconfig
#pkg_search_module (soci_PC QUIET libsoci_core>=3.2)
endif ()
if(static)
set(SOCI_LIB libsoci.a)
else()
set(SOCI_LIB libsoci_core.so)
endif()
find_library (soci
NAMES ${SOCI_LIB})
find_path (SOCI_INCLUDE_DIR
NAMES soci/soci.h)
message("SOCI FOUND AT: ${SOCI_LIB}")

View File

@@ -0,0 +1,24 @@
find_package (PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_search_module (sqlite_PC QUIET sqlite3>=3.26.0)
endif ()
if(static)
set(SQLITE_LIB libsqlite3.a)
else()
set(SQLITE_LIB sqlite3.so)
endif()
find_library (sqlite3
NAMES ${SQLITE_LIB}
HINTS
${sqlite_PC_LIBDIR}
${sqlite_PC_LIBRARY_DIRS}
NO_DEFAULT_PATH)
find_path (SQLITE_INCLUDE_DIR
NAMES sqlite3.h
HINTS
${sqlite_PC_INCLUDEDIR}
${sqlite_PC_INCLUDEDIRS}
NO_DEFAULT_PATH)

View File

@@ -0,0 +1,163 @@
#[===================================================================[
NIH dep: libarchive
#]===================================================================]
option (local_libarchive "use local build of libarchive." OFF)
add_library (archive_lib UNKNOWN IMPORTED GLOBAL)
if (NOT local_libarchive)
if (NOT WIN32)
find_package(libarchive_pc REQUIRED)
endif ()
if (archive)
message (STATUS "Found libarchive using pkg-config. Using ${archive}.")
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${archive}
IMPORTED_LOCATION_RELEASE
${archive}
INTERFACE_INCLUDE_DIRECTORIES
${LIBARCHIVE_INCLUDE_DIR})
# pkg-config can return extra info for static lib linking
# this is probably needed/useful generally, but apply
# to APPLE for now (mostly for homebrew)
if (APPLE AND static AND libarchive_PC_STATIC_LIBRARIES)
message(STATUS "NOTE: libarchive static libs: ${libarchive_PC_STATIC_LIBRARIES}")
# also, APPLE seems to need iconv...maybe linux does too (TBD)
target_link_libraries (archive_lib
INTERFACE iconv ${libarchive_PC_STATIC_LIBRARIES})
endif ()
else ()
## now try searching using the minimal find module that cmake provides
find_package(LibArchive 3.4.3 QUIET)
if (LibArchive_FOUND)
if (static)
# find module doesn't find static libs currently, so we re-search
get_filename_component(_loc ${LibArchive_LIBRARY} DIRECTORY)
find_library(_la_static
NAMES libarchive.a archive_static.lib archive.lib
PATHS ${_loc})
if (_la_static)
set (_la_lib ${_la_static})
else ()
message (WARNING "unable to find libarchive static lib - switching to local build")
set (local_libarchive ON CACHE BOOL "" FORCE)
endif ()
else ()
set (_la_lib ${LibArchive_LIBRARY})
endif ()
if (NOT local_libarchive)
message (STATUS "Found libarchive using module/config. Using ${_la_lib}.")
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${_la_lib}
IMPORTED_LOCATION_RELEASE
${_la_lib}
INTERFACE_INCLUDE_DIRECTORIES
${LibArchive_INCLUDE_DIRS})
endif ()
else ()
set (local_libarchive ON CACHE BOOL "" FORCE)
endif ()
endif ()
endif()
if (local_libarchive)
set (lib_post "")
if (MSVC)
set (lib_post "_static")
endif ()
ExternalProject_Add (libarchive
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/libarchive/libarchive.git
GIT_TAG v3.4.3
CMAKE_ARGS
# passing the compiler seems to be needed for windows CI, sadly
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DENABLE_LZ4=ON
-ULZ4_*
-DLZ4_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
# because we are building a static lib, this lz4 library doesn't
# actually matter since you can't generally link static libs to other static
# libs. The include files are needed, but the library itself is not (until
# we link our application, at which point we use the lz4 we built above).
# nonetheless, we need to provide a library to libarchive else it will
# NOT include lz4 support when configuring
-DLZ4_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
-DENABLE_WERROR=OFF
-DENABLE_TAR=OFF
-DENABLE_TAR_SHARED=OFF
-DENABLE_INSTALL=ON
-DENABLE_NETTLE=OFF
-DENABLE_OPENSSL=OFF
-DENABLE_LZO=OFF
-DENABLE_LZMA=OFF
-DENABLE_ZLIB=OFF
-DENABLE_BZip2=OFF
-DENABLE_LIBXML2=OFF
-DENABLE_EXPAT=OFF
-DENABLE_PCREPOSIX=OFF
-DENABLE_LibGCC=OFF
-DENABLE_CNG=OFF
-DENABLE_CPIO=OFF
-DENABLE_CPIO_SHARED=OFF
-DENABLE_CAT=OFF
-DENABLE_CAT_SHARED=OFF
-DENABLE_XATTR=OFF
-DENABLE_ACL=OFF
-DENABLE_ICONV=OFF
-DENABLE_TEST=OFF
-DENABLE_COVERAGE=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LIST_SEPARATOR ::
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--target archive_static
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/libarchive/$<CONFIG>/${ep_lib_prefix}archive${lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/libarchive
>
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS lz4_lib
BUILD_BYPRODUCTS
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
<BINARY_DIR>/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
)
ExternalProject_Get_Property (libarchive BINARY_DIR)
ExternalProject_Get_Property (libarchive SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (libarchive)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/libarchive)
set_target_properties (archive_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/libarchive
INTERFACE_COMPILE_DEFINITIONS
LIBARCHIVE_STATIC)
endif()
add_dependencies (archive_lib libarchive)
target_link_libraries (archive_lib INTERFACE lz4_lib)
target_link_libraries (ripple_libs INTERFACE archive_lib)
exclude_if_included (libarchive)
exclude_if_included (archive_lib)

View File

@@ -0,0 +1,79 @@
#[===================================================================[
NIH dep: lz4
#]===================================================================]
add_library (lz4_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(lz4)
endif()
if(lz4)
set_target_properties (lz4_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${lz4}
IMPORTED_LOCATION_RELEASE
${lz4}
INTERFACE_INCLUDE_DIRECTORIES
${LZ4_INCLUDE_DIR})
else()
ExternalProject_Add (lz4
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/lz4/lz4.git
GIT_TAG v1.9.2
SOURCE_SUBDIR contrib/cmake_unofficial
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_STATIC_LIBS=ON
-DBUILD_SHARED_LIBS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--target lz4_static
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}lz4$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}lz4${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}lz4_d${ep_lib_suffix}
)
ExternalProject_Get_Property (lz4 BINARY_DIR)
ExternalProject_Get_Property (lz4 SOURCE_DIR)
file (MAKE_DIRECTORY ${SOURCE_DIR}/lz4)
set_target_properties (lz4_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}lz4_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}lz4${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/lib)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (lz4)
endif ()
add_dependencies (lz4_lib lz4)
target_link_libraries (ripple_libs INTERFACE lz4_lib)
exclude_if_included (lz4)
endif()
exclude_if_included (lz4_lib)

View File

@@ -0,0 +1,31 @@
#[===================================================================[
NIH dep: nudb
NuDB is header-only, thus is an INTERFACE lib in CMake.
TODO: move the library definition into NuDB repo and add
proper targets and export/install
#]===================================================================]
if (is_root_project) # NuDB not needed in the case of xrpl_core inclusion build
add_library (nudb INTERFACE)
FetchContent_Declare(
nudb_src
GIT_REPOSITORY https://github.com/CPPAlliance/NuDB.git
GIT_TAG 2.0.5
)
FetchContent_GetProperties(nudb_src)
if(NOT nudb_src_POPULATED)
message (STATUS "Pausing to download NuDB...")
FetchContent_Populate(nudb_src)
endif()
file(TO_CMAKE_PATH "${nudb_src_SOURCE_DIR}" nudb_src_SOURCE_DIR)
# specify as system includes so as to avoid warnings
target_include_directories (nudb SYSTEM INTERFACE ${nudb_src_SOURCE_DIR}/include)
target_link_libraries (nudb
INTERFACE
Boost::thread
Boost::system)
add_library (NIH::nudb ALIAS nudb)
target_link_libraries (ripple_libs INTERFACE NIH::nudb)
endif ()

View File

@@ -0,0 +1,48 @@
#[===================================================================[
NIH dep: openssl
#]===================================================================]
#[===============================================[
OPENSSL_ROOT_DIR is the only variable that
FindOpenSSL honors for locating, so convert any
OPENSSL_ROOT vars to this
#]===============================================]
if (NOT DEFINED OPENSSL_ROOT_DIR)
if (DEFINED ENV{OPENSSL_ROOT})
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
elseif (HOMEBREW)
execute_process (COMMAND ${HOMEBREW} --prefix openssl
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
OUTPUT_STRIP_TRAILING_WHITESPACE)
endif ()
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
endif ()
if (static)
set (OPENSSL_USE_STATIC_LIBS ON)
endif ()
set (OPENSSL_MSVC_STATIC_RT ON)
find_package (OpenSSL 1.1.1 REQUIRED)
target_link_libraries (ripple_libs
INTERFACE
OpenSSL::SSL
OpenSSL::Crypto)
# disable SSLv2...this can also be done when building/configuring OpenSSL
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2)
#[=========================================================[
https://gitlab.kitware.com/cmake/cmake/issues/16885
depending on how openssl is built, it might depend
on zlib. In fact, the openssl find package should
figure this out for us, but it does not currently...
so let's add zlib ourselves to the lib list
TODO: investigate linking to static zlib for static
build option
#]=========================================================]
find_package (ZLIB)
set (has_zlib FALSE)
if (TARGET ZLIB::ZLIB)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
set (has_zlib TRUE)
endif ()

View File

@@ -0,0 +1,70 @@
if(reporting)
find_package(PostgreSQL)
if(NOT PostgreSQL_FOUND)
message("find_package did not find postgres")
find_library(postgres NAMES pq libpq libpq-dev pq-dev postgresql-devel)
find_path(libpq-fe NAMES libpq-fe.h PATH_SUFFIXES postgresql pgsql include)
if(NOT libpq-fe_FOUND OR NOT postgres_FOUND)
message("No system installed Postgres found. Will build")
add_library(postgres SHARED IMPORTED GLOBAL)
add_library(pgport SHARED IMPORTED GLOBAL)
add_library(pgcommon SHARED IMPORTED GLOBAL)
ExternalProject_Add(postgres_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/postgres/postgres.git
GIT_TAG REL_14_5
CONFIGURE_COMMAND ./configure --without-readline > /dev/null
BUILD_COMMAND ${CMAKE_COMMAND} -E env --unset=MAKELEVEL make
UPDATE_COMMAND ""
BUILD_IN_SOURCE 1
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/src/interfaces/libpq/${ep_lib_prefix}pq.a
<BINARY_DIR>/src/common/${ep_lib_prefix}pgcommon.a
<BINARY_DIR>/src/port/${ep_lib_prefix}pgport.a
LOG_BUILD TRUE
)
ExternalProject_Get_Property (postgres_src SOURCE_DIR)
ExternalProject_Get_Property (postgres_src BINARY_DIR)
set (postgres_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${postgres_src_SOURCE_DIR})
list(APPEND INCLUDE_DIRS
${SOURCE_DIR}/src/include
${SOURCE_DIR}/src/interfaces/libpq
)
set_target_properties(postgres PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/interfaces/libpq/${ep_lib_prefix}pq.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
set_target_properties(pgcommon PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/common/${ep_lib_prefix}pgcommon.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
set_target_properties(pgport PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/src/port/${ep_lib_prefix}pgport.a
INTERFACE_INCLUDE_DIRECTORIES
"${INCLUDE_DIRS}"
)
add_dependencies(postgres postgres_src)
add_dependencies(pgcommon postgres_src)
add_dependencies(pgport postgres_src)
file(TO_CMAKE_PATH "${postgres_src_SOURCE_DIR}" postgres_src_SOURCE_DIR)
target_link_libraries(ripple_libs INTERFACE postgres pgcommon pgport)
else()
message("Found system installed Postgres via find_libary")
target_include_directories(ripple_libs INTERFACE ${libpq-fe})
target_link_libraries(ripple_libs INTERFACE ${postgres})
endif()
else()
message("Found system installed Postgres via find_package")
target_include_directories(ripple_libs INTERFACE ${PostgreSQL_INCLUDE_DIRS})
target_link_libraries(ripple_libs INTERFACE ${PostgreSQL_LIBRARIES})
endif()
endif()

View File

@@ -1,22 +1,155 @@
find_package(Protobuf 3.8) #[===================================================================[
import protobuf (lib and compiler) and create a lib
from our proto message definitions. If the system protobuf
is not found, fallback on EP to download and build a version
from official source.
#]===================================================================]
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen) if (static)
set(ccbd ${CMAKE_CURRENT_BINARY_DIR}) set (Protobuf_USE_STATIC_LIBS ON)
set(CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen) endif ()
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto) find_package (Protobuf 3.8)
set(CMAKE_CURRENT_BINARY_DIR ${ccbd}) if (is_multiconfig)
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARIES})
else ()
string(TOUPPER ${CMAKE_BUILD_TYPE} upper_cmake_build_type)
set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARY_${upper_cmake_build_type}})
endif ()
if (local_protobuf OR NOT (Protobuf_FOUND AND Protobuf_PROTOC_EXECUTABLE AND protobuf_protoc_lib))
include (GNUInstallDirs)
message (STATUS "using local protobuf build.")
set(protobuf_reqs Protobuf_PROTOC_EXECUTABLE protobuf_protoc_lib)
foreach(lib ${protobuf_reqs})
if(NOT ${lib})
message(STATUS "Couldn't find ${lib}")
endif()
endforeach()
if (WIN32)
# protobuf prepends lib even on windows
set (pbuf_lib_pre "lib")
else ()
set (pbuf_lib_pre ${ep_lib_prefix})
endif ()
# for the external project build of protobuf, we currently ignore the
# static option and always build static libs here. This is consistent
# with our other EP builds. Dynamic libs in an EP would add complexity
# because we'd need to get them into the runtime path, and probably
# install them.
ExternalProject_Add (protobuf_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/protocolbuffers/protobuf.git
GIT_TAG v3.8.0
SOURCE_SUBDIR cmake
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-Dprotobuf_BUILD_TESTS=OFF
-Dprotobuf_BUILD_EXAMPLES=OFF
-Dprotobuf_BUILD_PROTOC_BINARIES=ON
-Dprotobuf_MSVC_STATIC_RUNTIME=ON
-DBUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-DCMAKE_DEBUG_POSTFIX=_d
-Dprotobuf_DEBUG_POSTFIX=_d
-Dprotobuf_WITH_ZLIB=$<IF:$<BOOL:${has_zlib}>,ON,OFF>
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
<BINARY_DIR>/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
<BINARY_DIR>/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}
)
ExternalProject_Get_Property (protobuf_src BINARY_DIR)
ExternalProject_Get_Property (protobuf_src SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (protobuf_src)
endif ()
exclude_if_included (protobuf_src)
add_library(pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS}) if (NOT TARGET protobuf::libprotobuf)
target_include_directories(pbufs SYSTEM PUBLIC add_library (protobuf::libprotobuf STATIC IMPORTED GLOBAL)
${CMAKE_BINARY_DIR}/proto_gen endif ()
${CMAKE_BINARY_DIR}/proto_gen/src/ripple/proto file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
) set_target_properties (protobuf::libprotobuf PROPERTIES
target_link_libraries(pbufs protobuf::libprotobuf) IMPORTED_LOCATION_DEBUG
target_compile_options(pbufs ${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (protobuf::libprotobuf protobuf_src)
exclude_if_included (protobuf::libprotobuf)
if (NOT TARGET protobuf::libprotoc)
add_library (protobuf::libprotoc STATIC IMPORTED GLOBAL)
endif ()
set_target_properties (protobuf::libprotoc PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (protobuf::libprotoc protobuf_src)
exclude_if_included (protobuf::libprotoc)
if (NOT TARGET protobuf::protoc)
add_executable (protobuf::protoc IMPORTED)
exclude_if_included (protobuf::protoc)
endif ()
set_target_properties (protobuf::protoc PROPERTIES
IMPORTED_LOCATION "${BINARY_DIR}/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}")
add_dependencies (protobuf::protoc protobuf_src)
else ()
if (NOT TARGET protobuf::protoc)
if (EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
add_executable (protobuf::protoc IMPORTED)
set_target_properties (protobuf::protoc PROPERTIES
IMPORTED_LOCATION "${Protobuf_PROTOC_EXECUTABLE}")
else ()
message (FATAL_ERROR "Protobuf import failed")
endif ()
endif ()
endif ()
file (MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen)
set (save_CBD ${CMAKE_CURRENT_BINARY_DIR})
set (CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen)
protobuf_generate_cpp (
PROTO_SRCS
PROTO_HDRS
src/ripple/proto/ripple.proto)
set (CMAKE_CURRENT_BINARY_DIR ${save_CBD})
add_library (pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS})
target_include_directories (pbufs PRIVATE src)
target_include_directories (pbufs
SYSTEM PUBLIC ${CMAKE_BINARY_DIR}/proto_gen)
target_link_libraries (pbufs protobuf::libprotobuf)
target_compile_options (pbufs
PUBLIC PUBLIC
$<$<BOOL:${XCODE}>: $<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf" --system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec -Wno-deprecated-dynamic-exception-spec
> >)
) add_library (Ripple::pbufs ALIAS pbufs)
add_library(Ripple::pbufs ALIAS pbufs) target_link_libraries (ripple_libs INTERFACE Ripple::pbufs)
exclude_if_included (pbufs)

View File

@@ -0,0 +1,177 @@
#[===================================================================[
NIH dep: rocksdb
#]===================================================================]
add_library (rocksdb_lib UNKNOWN IMPORTED GLOBAL)
set_target_properties (rocksdb_lib
PROPERTIES INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1)
option (local_rocksdb "use local build of rocksdb." OFF)
if (NOT local_rocksdb)
find_package (RocksDB 6.27 QUIET CONFIG)
if (TARGET RocksDB::rocksdb)
message (STATUS "Found RocksDB using config.")
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_DEBUG)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${_rockslib_l})
endif ()
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_RELEASE)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${_rockslib_l})
endif ()
get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION)
if (_rockslib_l)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${_rockslib_l})
endif ()
get_target_property (_rockslib_i RocksDB::rocksdb INTERFACE_INCLUDE_DIRECTORIES)
if (_rockslib_i)
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${_rockslib_i})
endif ()
target_link_libraries (ripple_libs INTERFACE RocksDB::rocksdb)
else ()
# using a find module with rocksdb is difficult because
# you have no idea how it was configured (transitive dependencies).
# the code below will generally find rocksdb using the module, but
# will then result in linker errors for static linkage since the
# transitive dependencies are unknown. force local build here for now, but leave the code as
# a placeholder for future investigation.
if (static)
set (local_rocksdb ON CACHE BOOL "" FORCE)
# TBD if there is some way to extract transitive deps..then:
#set (RocksDB_USE_STATIC ON)
else ()
find_package (RocksDB 6.27 MODULE)
if (ROCKSDB_FOUND)
if (RocksDB_LIBRARY_DEBUG)
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${RocksDB_LIBRARY_DEBUG})
endif ()
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${RocksDB_LIBRARIES})
set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${RocksDB_LIBRARIES})
set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RocksDB_INCLUDE_DIRS})
else ()
set (local_rocksdb ON CACHE BOOL "" FORCE)
endif ()
endif ()
endif ()
endif ()
if (local_rocksdb)
message (STATUS "Using local build of RocksDB.")
ExternalProject_Add (rocksdb
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/facebook/rocksdb.git
GIT_TAG v6.27.3
PATCH_COMMAND
# only used by windows build
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocks_thirdparty.inc
<SOURCE_DIR>/thirdparty.inc
COMMAND
# fixup their build version file to keep the values
# from changing always
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocksdb_build_version.cc.in
<SOURCE_DIR>/util/build_version.cc.in
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DWITH_JEMALLOC=$<IF:$<BOOL:${jemalloc}>,ON,OFF>
-DWITH_SNAPPY=ON
-DWITH_LZ4=ON
-DWITH_ZLIB=OFF
-DUSE_RTTI=ON
-DWITH_ZSTD=OFF
-DWITH_GFLAGS=OFF
-DWITH_BZ2=OFF
-ULZ4_*
-Ulz4_*
-Dlz4_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:lz4_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-Dlz4_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:lz4_lib,IMPORTED_LOCATION_RELEASE>>
-Dlz4_FOUND=ON
-USNAPPY_*
-Usnappy_*
-USnappy_*
-Dsnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-Dsnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
-Dsnappy_FOUND=ON
-DSnappy_INCLUDE_DIRS=$<JOIN:$<TARGET_PROPERTY:snappy_lib,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DSnappy_LIBRARIES=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:snappy_lib,IMPORTED_LOCATION_RELEASE>>
-DSnappy_FOUND=ON
-DWITH_MD_LIBRARY=OFF
-DWITH_RUNTIME_DEBUG=$<IF:$<CONFIG:Debug>,ON,OFF>
-DFAIL_ON_WARNINGS=OFF
-DWITH_ASAN=OFF
-DWITH_TSAN=OFF
-DWITH_UBSAN=OFF
-DWITH_NUMA=OFF
-DWITH_TBB=OFF
-DWITH_WINDOWS_UTF8_FILENAMES=OFF
-DWITH_XPRESS=OFF
-DPORTABLE=ON
-DFORCE_SSE42=OFF
-DDISABLE_STALL_NOTIF=OFF
-DOPTDBG=ON
-DROCKSDB_LITE=OFF
-DWITH_FALLOCATE=ON
-DWITH_LIBRADOS=OFF
-DWITH_JNI=OFF
-DROCKSDB_INSTALL_ON_WINDOWS=OFF
-DWITH_TESTS=OFF
-DWITH_TOOLS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -MP /DNDEBUG"
>
$<$<NOT:$<BOOL:${MSVC}>>:
"-DCMAKE_CXX_FLAGS=-DNDEBUG"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}rocksdb$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
LIST_SEPARATOR ::
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS snappy_lib lz4_lib
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}rocksdb${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
)
ExternalProject_Get_Property (rocksdb BINARY_DIR)
ExternalProject_Get_Property (rocksdb SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (rocksdb)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
set_target_properties (rocksdb_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}rocksdb_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}rocksdb${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies (rocksdb_lib rocksdb)
exclude_if_included (rocksdb)
endif ()
target_link_libraries (rocksdb_lib
INTERFACE
snappy_lib
lz4_lib
$<$<BOOL:${MSVC}>:rpcrt4>)
exclude_if_included (rocksdb_lib)
target_link_libraries (ripple_libs INTERFACE rocksdb_lib)

View File

@@ -0,0 +1,58 @@
#[===================================================================[
NIH dep: secp256k1
#]===================================================================]
add_library (secp256k1_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(secp256k1)
endif()
if(secp256k1)
set_target_properties (secp256k1_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${secp256k1}
IMPORTED_LOCATION_RELEASE
${secp256k1}
INTERFACE_INCLUDE_DIRECTORIES
${SECP256K1_INCLUDE_DIR})
add_library (secp256k1 ALIAS secp256k1_lib)
add_library (NIH::secp256k1 ALIAS secp256k1_lib)
else()
set(INSTALL_SECP256K1 true)
add_library (secp256k1 STATIC
src/secp256k1/src/secp256k1.c)
target_compile_definitions (secp256k1
PRIVATE
USE_NUM_NONE
USE_FIELD_10X26
USE_FIELD_INV_BUILTIN
USE_SCALAR_8X32
USE_SCALAR_INV_BUILTIN)
target_include_directories (secp256k1
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:include>
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/secp256k1)
target_compile_options (secp256k1
PRIVATE
$<$<BOOL:${MSVC}>:-wd4319>
$<$<NOT:$<BOOL:${MSVC}>>:
-Wno-deprecated-declarations
-Wno-unused-function
>
$<$<BOOL:${is_gcc}>:-Wno-nonnull-compare>)
target_link_libraries (ripple_libs INTERFACE NIH::secp256k1)
#[===========================[
headers installation
#]===========================]
install (
FILES
src/secp256k1/include/secp256k1.h
DESTINATION include/secp256k1/include)
add_library (NIH::secp256k1 ALIAS secp256k1)
endif()

View File

@@ -0,0 +1,77 @@
#[===================================================================[
NIH dep: snappy
#]===================================================================]
add_library (snappy_lib STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(snappy)
endif()
if(snappy)
set_target_properties (snappy_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${snappy}
IMPORTED_LOCATION_RELEASE
${snappy}
INTERFACE_INCLUDE_DIRECTORIES
${SNAPPY_INCLUDE_DIR})
else()
ExternalProject_Add (snappy
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/google/snappy.git
GIT_TAG 1.1.7
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DSNAPPY_BUILD_TESTS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}snappy$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy_if_different <BINARY_DIR>/config.h <BINARY_DIR>/snappy-stubs-public.h <SOURCE_DIR>
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}snappy${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}snappy_d${ep_lib_suffix}
)
ExternalProject_Get_Property (snappy BINARY_DIR)
ExternalProject_Get_Property (snappy SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (snappy)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/snappy)
set_target_properties (snappy_lib PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}snappy_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}snappy${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR})
endif()
add_dependencies (snappy_lib snappy)
target_link_libraries (ripple_libs INTERFACE snappy_lib)
exclude_if_included (snappy)
exclude_if_included (snappy_lib)

View File

@@ -0,0 +1,165 @@
#[===================================================================[
NIH dep: soci
#]===================================================================]
foreach (_comp core empty sqlite3)
add_library ("soci_${_comp}" STATIC IMPORTED GLOBAL)
endforeach ()
if (NOT WIN32)
find_package(soci)
endif()
if (soci)
foreach (_comp core empty sqlite3)
set_target_properties ("soci_${_comp}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${soci}
IMPORTED_LOCATION_RELEASE
${soci}
INTERFACE_INCLUDE_DIRECTORIES
${SOCI_INCLUDE_DIR})
endforeach ()
else()
set (soci_lib_pre ${ep_lib_prefix})
set (soci_lib_post "")
if (WIN32)
# for some reason soci on windows still prepends lib (non-standard)
set (soci_lib_pre lib)
# this version in the name might change if/when we change versions of soci
set (soci_lib_post "_4_0")
endif ()
get_target_property (_boost_incs Boost::date_time INTERFACE_INCLUDE_DIRECTORIES)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION)
if (NOT _boost_dt)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_RELEASE)
endif ()
if (NOT _boost_dt)
get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_DEBUG)
endif ()
ExternalProject_Add (soci
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/SOCI/soci.git
GIT_TAG 04e1870294918d20761736743bb6136314c42dd5
# We had an issue with soci integer range checking for boost::optional
# and needed to remove the exception that SOCI throws in this case.
# This is *probably* a bug in SOCI, but has never been investigated more
# nor reported to the maintainers.
# This cmake script comments out the lines in question.
# This patch process is likely fragile and should be reviewed carefully
# whenever we update the GIT_TAG above.
PATCH_COMMAND
${CMAKE_COMMAND} -D RIPPLED_SOURCE=${CMAKE_CURRENT_SOURCE_DIR}
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/soci_patch.cmake
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_PREFIX_PATH=${CMAKE_BINARY_DIR}/sqlite3
-DCMAKE_MODULE_PATH=${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake
-DCMAKE_INCLUDE_PATH=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DCMAKE_LIBRARY_PATH=${sqlite_BINARY_DIR}
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DSOCI_CXX_C11=ON
-DSOCI_STATIC=ON
-DSOCI_LIBDIR=lib
-DSOCI_SHARED=OFF
-DSOCI_TESTS=OFF
# hacks to workaround the fact that soci doesn't currently use
# boost imported targets in its cmake. If they switch to
# proper imported targets, this next line can be removed
# (as well as the get_property above that sets _boost_incs)
-DBoost_INCLUDE_DIRS=$<JOIN:${_boost_incs},::>
-DBoost_INCLUDE_DIR=$<JOIN:${_boost_incs},::>
-DBOOST_ROOT=${BOOST_ROOT}
-DWITH_BOOST=ON
-DBoost_FOUND=ON
-DBoost_NO_BOOST_CMAKE=ON
-DBoost_DATE_TIME_FOUND=ON
-DSOCI_HAVE_BOOST=ON
-DSOCI_HAVE_BOOST_DATE_TIME=ON
-DBoost_DATE_TIME_LIBRARY=${_boost_dt}
-DSOCI_DB2=OFF
-DSOCI_FIREBIRD=OFF
-DSOCI_MYSQL=OFF
-DSOCI_ODBC=OFF
-DSOCI_ORACLE=OFF
-DSOCI_POSTGRESQL=OFF
-DSOCI_SQLITE3=ON
-DSQLITE3_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:sqlite,INTERFACE_INCLUDE_DIRECTORIES>,::>
-DSQLITE3_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:sqlite,IMPORTED_LOCATION_RELEASE>>
$<$<BOOL:${APPLE}>:-DCMAKE_FIND_FRAMEWORK=LAST>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
>
$<$<NOT:$<BOOL:${MSVC}>>:
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations"
>
# SEE: https://github.com/SOCI/soci/issues/640
$<$<AND:$<BOOL:${is_gcc}>,$<VERSION_GREATER_EQUAL:${CMAKE_CXX_COMPILER_VERSION},8>>:
"-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations -Wno-error=format-overflow -Wno-format-overflow -Wno-error=format-truncation"
>
LIST_SEPARATOR ::
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_core${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_empty${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib/$<CONFIG>/${soci_lib_pre}soci_sqlite3${soci_lib_post}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/lib
>
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS sqlite
BUILD_BYPRODUCTS
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_core${soci_lib_post}_d${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_empty${soci_lib_post}_d${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}${ep_lib_suffix}
<BINARY_DIR>/lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}_d${ep_lib_suffix}
)
ExternalProject_Get_Property (soci BINARY_DIR)
ExternalProject_Get_Property (soci SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (soci)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
file (MAKE_DIRECTORY ${BINARY_DIR}/include)
foreach (_comp core empty sqlite3)
set_target_properties ("soci_${_comp}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
"${SOURCE_DIR}/include;${BINARY_DIR}/include")
add_dependencies ("soci_${_comp}" soci) # something has to depend on the ExternalProject to trigger it
target_link_libraries (ripple_libs INTERFACE "soci_${_comp}")
if (NOT _comp STREQUAL "core")
target_link_libraries ("soci_${_comp}" INTERFACE soci_core)
endif ()
endforeach ()
endif()
foreach (_comp core empty sqlite3)
exclude_if_included ("soci_${_comp}")
endforeach ()
exclude_if_included (soci)

View File

@@ -0,0 +1,93 @@
#[===================================================================[
NIH dep: sqlite
#]===================================================================]
add_library (sqlite STATIC IMPORTED GLOBAL)
if (NOT WIN32)
find_package(sqlite)
endif()
if(sqlite3)
set_target_properties (sqlite PROPERTIES
IMPORTED_LOCATION_DEBUG
${sqlite3}
IMPORTED_LOCATION_RELEASE
${sqlite3}
INTERFACE_INCLUDE_DIRECTORIES
${SQLITE_INCLUDE_DIR})
else()
ExternalProject_Add (sqlite3
PREFIX ${nih_cache_path}
# sqlite doesn't use git, but it provides versioned tarballs
URL https://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
http://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip
https://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
http://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip
# ^^^ version is apparent in the URL: 3260000 => 3.26.0
URL_HASH SHA256=de5dcab133aa339a4cf9e97c40aa6062570086d6085d8f9ad7bc6ddf8a52096e
# Don't need to worry about MITM attacks too much because the download
# is checked against a strong hash
TLS_VERIFY false
# we wrote a very simple CMake file to build sqlite
# so that's what we copy here so that we can build with
# CMake. sqlite doesn't generally provided a build system
# for the single amalgamation source file.
PATCH_COMMAND
${CMAKE_COMMAND} -E copy_if_different
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/CMake_sqlite3.txt
<SOURCE_DIR>/CMakeLists.txt
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}sqlite3$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>
>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}sqlite3${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
)
ExternalProject_Get_Property (sqlite3 BINARY_DIR)
ExternalProject_Get_Property (sqlite3 SOURCE_DIR)
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (sqlite3)
endif ()
set_target_properties (sqlite PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/${ep_lib_prefix}sqlite3_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/${ep_lib_prefix}sqlite3${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR})
add_dependencies (sqlite sqlite3)
exclude_if_included (sqlite3)
endif()
target_link_libraries (sqlite INTERFACE $<$<NOT:$<BOOL:${MSVC}>>:dl>)
target_link_libraries (ripple_libs INTERFACE sqlite)
exclude_if_included (sqlite)
set(sqlite_BINARY_DIR ${BINARY_DIR})

View File

@@ -1 +1,84 @@
find_package(wasmedge REQUIRED) #[===================================================================[
NIH dep: wasmedge: web assembly runtime for hooks.
#]===================================================================]
find_package(Curses)
if(CURSES_FOUND)
include_directories(${CURSES_INCLUDE_DIR})
target_link_libraries(ripple_libs INTERFACE ${CURSES_LIBRARY})
else()
message(WARNING "CURSES library not found... (only important for mac builds)")
endif()
find_package(LLVM REQUIRED CONFIG)
message(STATUS "Found LLVM ${LLVM_PACKAGE_VERSION}")
message(STATUS "Using LLVMConfig.cmake in: ${LLVM_DIR}")
ExternalProject_Add (wasmedge_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/WasmEdge/WasmEdge.git
GIT_TAG 0.11.2
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
-DWASMEDGE_BUILD_SHARED_LIB=OFF
-DWASMEDGE_BUILD_STATIC_LIB=ON
-DWASMEDGE_BUILD_AOT_RUNTIME=ON
-DWASMEDGE_FORCE_DISABLE_LTO=ON
-DWASMEDGE_LINK_LLVM_STATIC=ON
-DWASMEDGE_LINK_TOOLS_STATIC=ON
-DWASMEDGE_BUILD_PLUGINS=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DLLVM_DIR=${LLVM_DIR}
-DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR}
-DLLVM_ENABLE_TERMINFO=OFF
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP -march=native"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_CONFIGURE ON
LOG_BUILD ON
LOG_CONFIGURE ON
COMMAND
pwd
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
$<$<VERSION_GREATER_EQUAL:${CMAKE_VERSION},3.12>:--parallel ${ep_procs}>
TEST_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS
<BINARY_DIR>/lib/api/libwasmedge.a
)
add_library (wasmedge STATIC IMPORTED GLOBAL)
ExternalProject_Get_Property (wasmedge_src BINARY_DIR)
ExternalProject_Get_Property (wasmedge_src SOURCE_DIR)
set (wasmedge_src_BINARY_DIR "${BINARY_DIR}")
add_dependencies (wasmedge wasmedge_src)
execute_process(
COMMAND
mkdir -p "${wasmedge_src_BINARY_DIR}/include/api"
)
set_target_properties (wasmedge PROPERTIES
IMPORTED_LOCATION_DEBUG
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
IMPORTED_LOCATION_RELEASE
"${wasmedge_src_BINARY_DIR}/lib/api/libwasmedge.a"
INTERFACE_INCLUDE_DIRECTORIES
"${wasmedge_src_BINARY_DIR}/include/api/"
)
target_link_libraries (ripple_libs INTERFACE wasmedge)
#RH NOTE: some compilers / versions of some libraries need these, most don't
find_library(XAR_LIBRARY NAMES xar)
if(XAR_LIBRARY)
target_link_libraries(ripple_libs INTERFACE ${XAR_LIBRARY})
else()
message(WARNING "xar library not found... (only important for mac builds)")
endif()
add_library (wasmedge::wasmedge ALIAS wasmedge)

View File

@@ -0,0 +1,167 @@
if(reporting)
find_library(cassandra NAMES cassandra)
if(NOT cassandra)
message("System installed Cassandra cpp driver not found. Will build")
find_library(zlib NAMES zlib1g-dev zlib-devel zlib z)
if(NOT zlib)
message("zlib not found. will build")
add_library(zlib STATIC IMPORTED GLOBAL)
ExternalProject_Add(zlib_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/madler/zlib.git
GIT_TAG v1.2.12
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}z.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (zlib_src SOURCE_DIR)
ExternalProject_Get_Property (zlib_src BINARY_DIR)
set (zlib_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${zlib_src_SOURCE_DIR}/include)
set_target_properties (zlib PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}z.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(zlib zlib_src)
file(TO_CMAKE_PATH "${zlib_src_SOURCE_DIR}" zlib_src_SOURCE_DIR)
endif()
find_library(krb5 NAMES krb5-dev libkrb5-dev)
if(NOT krb5)
message("krb5 not found. will build")
add_library(krb5 STATIC IMPORTED GLOBAL)
ExternalProject_Add(krb5_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/krb5/krb5.git
GIT_TAG krb5-1.20-final
UPDATE_COMMAND ""
CONFIGURE_COMMAND autoreconf src && CFLAGS=-fcommon ./src/configure --enable-static --disable-shared > /dev/null
BUILD_IN_SOURCE 1
BUILD_COMMAND make
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <SOURCE_DIR>/lib/${ep_lib_prefix}krb5.a
LOG_BUILD TRUE
)
ExternalProject_Get_Property (krb5_src SOURCE_DIR)
ExternalProject_Get_Property (krb5_src BINARY_DIR)
set (krb5_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${krb5_src_SOURCE_DIR}/include)
set_target_properties (krb5 PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/lib/${ep_lib_prefix}krb5.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(krb5 krb5_src)
file(TO_CMAKE_PATH "${krb5_src_SOURCE_DIR}" krb5_src_SOURCE_DIR)
endif()
find_library(libuv1 NAMES uv1 libuv1 liubuv1-dev libuv1:amd64)
if(NOT libuv1)
message("libuv1 not found, will build")
add_library(libuv1 STATIC IMPORTED GLOBAL)
ExternalProject_Add(libuv_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/libuv/libuv.git
GIT_TAG v1.44.2
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}uv_a.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (libuv_src SOURCE_DIR)
ExternalProject_Get_Property (libuv_src BINARY_DIR)
set (libuv_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${libuv_src_SOURCE_DIR}/include)
set_target_properties (libuv1 PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}uv_a.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(libuv1 libuv_src)
file(TO_CMAKE_PATH "${libuv_src_SOURCE_DIR}" libuv_src_SOURCE_DIR)
endif()
add_library (cassandra STATIC IMPORTED GLOBAL)
ExternalProject_Add(cassandra_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/datastax/cpp-driver.git
GIT_TAG 2.16.2
CMAKE_ARGS
-DLIBUV_ROOT_DIR=${BINARY_DIR}
-DLIBUV_LIBARY=${BINARY_DIR}/libuv_a.a
-DLIBUV_INCLUDE_DIR=${SOURCE_DIR}/include
-DCASS_BUILD_STATIC=ON
-DCASS_BUILD_SHARED=OFF
-DOPENSSL_ROOT_DIR=/opt/local/openssl
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}cassandra_static.a
LOG_BUILD TRUE
LOG_CONFIGURE TRUE
)
ExternalProject_Get_Property (cassandra_src SOURCE_DIR)
ExternalProject_Get_Property (cassandra_src BINARY_DIR)
set (cassandra_src_SOURCE_DIR "${SOURCE_DIR}")
file (MAKE_DIRECTORY ${cassandra_src_SOURCE_DIR}/include)
set_target_properties (cassandra PROPERTIES
IMPORTED_LOCATION
${BINARY_DIR}/${ep_lib_prefix}cassandra_static.a
INTERFACE_INCLUDE_DIRECTORIES
${SOURCE_DIR}/include)
add_dependencies(cassandra cassandra_src)
if(NOT libuv1)
ExternalProject_Add_StepDependencies(cassandra_src build libuv1)
target_link_libraries(cassandra INTERFACE libuv1)
else()
target_link_libraries(cassandra INTERFACE ${libuv1})
endif()
if(NOT krb5)
ExternalProject_Add_StepDependencies(cassandra_src build krb5)
target_link_libraries(cassandra INTERFACE krb5)
else()
target_link_libraries(cassandra INTERFACE ${krb5})
endif()
if(NOT zlib)
ExternalProject_Add_StepDependencies(cassandra_src build zlib)
target_link_libraries(cassandra INTERFACE zlib)
else()
target_link_libraries(cassandra INTERFACE ${zlib})
endif()
file(TO_CMAKE_PATH "${cassandra_src_SOURCE_DIR}" cassandra_src_SOURCE_DIR)
target_link_libraries(ripple_libs INTERFACE cassandra)
else()
message("Found system installed cassandra cpp driver")
find_path(cassandra_includes NAMES cassandra.h REQUIRED)
target_link_libraries (ripple_libs INTERFACE ${cassandra})
target_include_directories(ripple_libs INTERFACE ${cassandra_includes})
endif()
exclude_if_included (cassandra)
endif()

View File

@@ -0,0 +1,18 @@
#[===================================================================[
NIH dep: date
the main library is header-only, thus is an INTERFACE lib in CMake.
NOTE: this has been accepted into c++20 so can likely be replaced
when we update to that standard
#]===================================================================]
find_package (date QUIET)
if (NOT TARGET date::date)
FetchContent_Declare(
hh_date_src
GIT_REPOSITORY https://github.com/HowardHinnant/date.git
GIT_TAG fc4cf092f9674f2670fb9177edcdee870399b829
)
FetchContent_MakeAvailable(hh_date_src)
endif ()

View File

@@ -1,15 +1,324 @@
find_package(gRPC 1.23)
# currently linking to unsecure versions...if we switch, we'll
# need to add ssl as a link dependency to the grpc targets
option (use_secure_grpc "use TLS version of grpc libs." OFF)
if (use_secure_grpc)
set (grpc_suffix "")
else ()
set (grpc_suffix "_unsecure")
endif ()
find_package (gRPC 1.23 CONFIG QUIET)
if (TARGET gRPC::gpr AND NOT local_grpc)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_DEBUG)
if (NOT _grpc_l)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_RELEASE)
endif ()
if (NOT _grpc_l)
get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION)
endif ()
message (STATUS "Found cmake config for gRPC. Using ${_grpc_l}.")
else ()
find_package (PkgConfig QUIET)
if (PKG_CONFIG_FOUND)
pkg_check_modules (grpc QUIET "grpc${grpc_suffix}>=1.25" "grpc++${grpc_suffix}" gpr)
endif ()
if (grpc_FOUND)
message (STATUS "Found gRPC using pkg-config. Using ${grpc_gpr_PREFIX}.")
endif ()
add_executable (gRPC::grpc_cpp_plugin IMPORTED)
exclude_if_included (gRPC::grpc_cpp_plugin)
if (grpc_FOUND AND NOT local_grpc)
# use installed grpc (via pkg-config)
macro (add_imported_grpc libname_)
if (static)
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}${libname_}${CMAKE_STATIC_LIBRARY_SUFFIX}")
else ()
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}${libname_}${CMAKE_SHARED_LIBRARY_SUFFIX}")
endif()
find_library(_found_${libname_}
NAMES ${_search}
HINTS ${grpc_LIBRARY_DIRS})
if (_found_${libname_})
message (STATUS "importing ${libname_} as ${_found_${libname_}}")
else ()
message (FATAL_ERROR "using pkg-config for grpc, can't find ${_search}")
endif ()
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
set_target_properties ("gRPC::${libname_}" PROPERTIES IMPORTED_LOCATION ${_found_${libname_}})
if (grpc_INCLUDE_DIRS)
set_target_properties ("gRPC::${libname_}" PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${grpc_INCLUDE_DIRS})
endif ()
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
exclude_if_included ("gRPC::${libname_}")
endmacro ()
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${grpc_gpr_PREFIX}/bin/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
pkg_check_modules (cares QUIET libcares)
if (cares_FOUND)
if (static)
set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}cares${CMAKE_STATIC_LIBRARY_SUFFIX}")
set (_prefix cares_STATIC)
set (_static STATIC)
else ()
set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}")
set (_prefix cares)
set (_static)
endif()
find_library(_location NAMES ${_search} HINTS ${cares_LIBRARY_DIRS})
if (NOT _location)
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
endif ()
if(${_location} MATCHES "\\.a$")
add_library(c-ares::cares STATIC IMPORTED GLOBAL)
else()
add_library(c-ares::cares SHARED IMPORTED GLOBAL)
endif()
set_target_properties (c-ares::cares PROPERTIES
IMPORTED_LOCATION ${_location}
INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}"
INTERFACE_LINK_OPTIONS "${${_prefix}_LDFLAGS}"
)
exclude_if_included (c-ares::cares)
else ()
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
endif ()
else ()
#[===========================[
c-ares (grpc requires)
#]===========================]
ExternalProject_Add (c-ares_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/c-ares/c-ares.git
GIT_TAG cares-1_15_0
CMAKE_ARGS
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-DCARES_SHARED=OFF
-DCARES_STATIC=ON
-DCARES_STATIC_PIC=ON
-DCARES_INSTALL=ON
-DCARES_MSVC_STATIC_RUNTIME=ON
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
)
exclude_if_included (c-ares_src)
ExternalProject_Get_Property (c-ares_src BINARY_DIR)
set (cares_binary_dir "${BINARY_DIR}")
add_library (c-ares::cares STATIC IMPORTED GLOBAL)
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
set_target_properties (c-ares::cares PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (c-ares::cares c-ares_src)
exclude_if_included (c-ares::cares)
if (NOT has_zlib)
#[===========================[
zlib (grpc requires)
#]===========================]
if (MSVC)
set (zlib_debug_postfix "d") # zlib cmake sets this internally for MSVC, so we really don't have a choice
set (zlib_base "zlibstatic")
else ()
set (zlib_debug_postfix "_d")
set (zlib_base "z")
endif ()
ExternalProject_Add (zlib_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/madler/zlib.git
GIT_TAG v1.2.11
CMAKE_ARGS
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
-DCMAKE_DEBUG_POSTFIX=${zlib_debug_postfix}
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DCMAKE_INSTALL_PREFIX=<BINARY_DIR>/_installed_
-DBUILD_SHARED_LIBS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_C_FLAGS_DEBUG=-MTd"
"-DCMAKE_C_FLAGS_RELEASE=-MT"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
TEST_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $<CONFIG> --target install
BUILD_BYPRODUCTS
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
<BINARY_DIR>/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
)
exclude_if_included (zlib_src)
ExternalProject_Get_Property (zlib_src BINARY_DIR)
set (zlib_binary_dir "${BINARY_DIR}")
add_library (ZLIB::ZLIB STATIC IMPORTED GLOBAL)
file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include)
set_target_properties (ZLIB::ZLIB PROPERTIES
IMPORTED_LOCATION_DEBUG
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${BINARY_DIR}/_installed_/include)
add_dependencies (ZLIB::ZLIB zlib_src)
exclude_if_included (ZLIB::ZLIB)
endif ()
#[===========================[
grpc
#]===========================]
ExternalProject_Add (grpc_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/grpc/grpc.git
GIT_TAG v1.25.0
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_STANDARD=17
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
$<$<BOOL:${unity}>:-DCMAKE_UNITY_BUILD=ON}>
-DCMAKE_DEBUG_POSTFIX=_d
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
-DgRPC_BUILD_TESTS=OFF
-DgRPC_BENCHMARK_PROVIDER=""
-DgRPC_BUILD_CSHARP_EXT=OFF
-DgRPC_MSVC_STATIC_RUNTIME=ON
-DgRPC_INSTALL=OFF
-DgRPC_CARES_PROVIDER=package
-Dc-ares_DIR=${cares_binary_dir}/_installed_/lib/cmake/c-ares
-DgRPC_SSL_PROVIDER=package
-DOPENSSL_ROOT_DIR=${OPENSSL_ROOT_DIR}
-DgRPC_PROTOBUF_PROVIDER=package
-DProtobuf_USE_STATIC_LIBS=$<IF:$<AND:$<BOOL:${Protobuf_FOUND}>,$<NOT:$<BOOL:${static}>>>,OFF,ON>
-DProtobuf_INCLUDE_DIR=$<JOIN:$<TARGET_PROPERTY:protobuf::libprotobuf,INTERFACE_INCLUDE_DIRECTORIES>,:_:>
-DProtobuf_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotobuf,IMPORTED_LOCATION_RELEASE>>
-DProtobuf_PROTOC_LIBRARY=$<IF:$<CONFIG:Debug>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_DEBUG>,$<TARGET_PROPERTY:protobuf::libprotoc,IMPORTED_LOCATION_RELEASE>>
-DProtobuf_PROTOC_EXECUTABLE=$<TARGET_PROPERTY:protobuf::protoc,IMPORTED_LOCATION>
-DgRPC_ZLIB_PROVIDER=package
$<$<NOT:$<BOOL:${has_zlib}>>:-DZLIB_ROOT=${zlib_binary_dir}/_installed_>
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP"
>
LOG_BUILD ON
LOG_CONFIGURE ON
BUILD_COMMAND
${CMAKE_COMMAND}
--build .
--config $<CONFIG>
--parallel ${ep_procs}
$<$<BOOL:${is_multiconfig}>:
COMMAND
${CMAKE_COMMAND} -E copy
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}grpc++${grpc_suffix}$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}address_sorting$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/${ep_lib_prefix}gpr$<$<CONFIG:Debug>:_d>${ep_lib_suffix}
<BINARY_DIR>/$<CONFIG>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
<BINARY_DIR>
>
LIST_SEPARATOR :_:
TEST_COMMAND ""
INSTALL_COMMAND ""
DEPENDS c-ares_src
BUILD_BYPRODUCTS
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc${grpc_suffix}_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}grpc++${grpc_suffix}_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}address_sorting${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}address_sorting_d${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}gpr${ep_lib_suffix}
<BINARY_DIR>/${ep_lib_prefix}gpr_d${ep_lib_suffix}
<BINARY_DIR>/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}
)
if (TARGET protobuf_src)
ExternalProject_Add_StepDependencies(grpc_src build protobuf_src)
endif ()
exclude_if_included (grpc_src)
ExternalProject_Get_Property (grpc_src BINARY_DIR)
ExternalProject_Get_Property (grpc_src SOURCE_DIR)
set (grpc_binary_dir "${BINARY_DIR}")
set (grpc_source_dir "${SOURCE_DIR}")
if (CMAKE_VERBOSE_MAKEFILE)
print_ep_logs (grpc_src)
endif ()
file (MAKE_DIRECTORY ${SOURCE_DIR}/include)
macro (add_imported_grpc libname_)
add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL)
set_target_properties ("gRPC::${libname_}" PROPERTIES
IMPORTED_LOCATION_DEBUG
${grpc_binary_dir}/${ep_lib_prefix}${libname_}_d${ep_lib_suffix}
IMPORTED_LOCATION_RELEASE
${grpc_binary_dir}/${ep_lib_prefix}${libname_}${ep_lib_suffix}
INTERFACE_INCLUDE_DIRECTORIES
${grpc_source_dir}/include)
add_dependencies ("gRPC::${libname_}" grpc_src)
target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}")
exclude_if_included ("gRPC::${libname_}")
endmacro ()
set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${grpc_binary_dir}/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}")
add_dependencies (gRPC::grpc_cpp_plugin grpc_src)
endif ()
add_imported_grpc (gpr)
add_imported_grpc ("grpc${grpc_suffix}")
add_imported_grpc ("grpc++${grpc_suffix}")
add_imported_grpc (address_sorting)
target_link_libraries ("gRPC::grpc${grpc_suffix}" INTERFACE c-ares::cares gRPC::gpr gRPC::address_sorting ZLIB::ZLIB)
target_link_libraries ("gRPC::grpc++${grpc_suffix}" INTERFACE "gRPC::grpc${grpc_suffix}" gRPC::gpr)
endif ()
#[=================================[ #[=================================[
generate protobuf sources for generate protobuf sources for
grpc defs and bundle into a grpc defs and bundle into a
static lib static lib
#]=================================] #]=================================]
set(GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc") set (GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc")
file(MAKE_DIRECTORY ${GRPC_GEN_DIR}) file (MAKE_DIRECTORY ${GRPC_GEN_DIR})
set(GRPC_PROTO_SRCS) set (GRPC_PROTO_SRCS)
set(GRPC_PROTO_HDRS) set (GRPC_PROTO_HDRS)
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org") set (GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto") file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto")
foreach(file ${GRPC_DEFINITION_FILES}) foreach(file ${GRPC_DEFINITION_FILES})
get_filename_component(_abs_file ${file} ABSOLUTE) get_filename_component(_abs_file ${file} ABSOLUTE)
@@ -20,10 +329,10 @@ foreach(file ${GRPC_DEFINITION_FILES})
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY) get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir}) file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc") set (src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc") set (src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h") set (hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h") set (hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
add_custom_command( add_custom_command(
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2} OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
COMMAND protobuf::protoc COMMAND protobuf::protoc
@@ -41,22 +350,20 @@ foreach(file ${GRPC_DEFINITION_FILES})
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2}) list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
endforeach() endforeach()
add_library(grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS}) add_library (grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS})
#target_include_directories(grpc_pbufs PRIVATE src) #target_include_directories (grpc_pbufs PRIVATE src)
target_include_directories(grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR}) target_include_directories (grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR})
target_link_libraries(grpc_pbufs target_link_libraries (grpc_pbufs protobuf::libprotobuf "gRPC::grpc++${grpc_suffix}")
"gRPC::grpc++" target_compile_options (grpc_pbufs
# libgrpc is missing references.
absl::random_random
)
target_compile_options(grpc_pbufs
PRIVATE PRIVATE
$<$<BOOL:${MSVC}>:-wd4065> $<$<BOOL:${MSVC}>:-wd4065>
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations> $<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
PUBLIC PUBLIC
$<$<BOOL:${MSVC}>:-wd4996> $<$<BOOL:${MSVC}>:-wd4996>
$<$<BOOL:${XCODE}>: $<$<BOOL:${is_xcode}>:
--system-header-prefix="google/protobuf" --system-header-prefix="google/protobuf"
-Wno-deprecated-dynamic-exception-spec -Wno-deprecated-dynamic-exception-spec
>) >)
add_library(Ripple::grpc_pbufs ALIAS grpc_pbufs) add_library (Ripple::grpc_pbufs ALIAS grpc_pbufs)
target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
exclude_if_included (grpc_pbufs)

View File

@@ -50,6 +50,11 @@ if(CMAKE_TOOLCHAIN_FILE)
endif() endif()
endif() endif()
if (NOT USE_CONAN)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
endif()
include (CheckCXXCompilerFlag) include (CheckCXXCompilerFlag)
include (FetchContent) include (FetchContent)
include (ExternalProject) include (ExternalProject)
@@ -62,7 +67,9 @@ endif ()
include(RippledSanity) include(RippledSanity)
include(RippledVersion) include(RippledVersion)
include(RippledSettings) include(RippledSettings)
if (NOT USE_CONAN)
include(RippledNIH)
endif()
# this check has to remain in the top-level cmake # this check has to remain in the top-level cmake
# because of the early return statement # because of the early return statement
if (packages_only) if (packages_only)
@@ -74,64 +81,86 @@ endif ()
include(RippledCompiler) include(RippledCompiler)
include(RippledInterface) include(RippledInterface)
include(deps/Boost) ###
find_package(OpenSSL 1.1.1 REQUIRED) if (NOT USE_CONAN)
set_target_properties(OpenSSL::SSL PROPERTIES add_subdirectory(src/secp256k1)
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2 add_subdirectory(src/ed25519-donna)
) include(deps/Boost)
add_subdirectory(src/secp256k1) include(deps/OpenSSL)
add_subdirectory(src/ed25519-donna) # include(deps/Secp256k1)
find_package(lz4 REQUIRED) # include(deps/Ed25519-donna)
# Target names with :: are not allowed in a generator expression. include(deps/Lz4)
# We need to pull the include directories and imported location properties include(deps/Libarchive)
# from separate targets. include(deps/Sqlite)
find_package(LibArchive REQUIRED) include(deps/Soci)
find_package(SOCI REQUIRED) include(deps/Snappy)
find_package(SQLite3 REQUIRED) include(deps/Rocksdb)
find_package(Snappy REQUIRED) include(deps/Nudb)
# find_package(wasmedge REQUIRED) include(deps/date)
option(rocksdb "Enable RocksDB" ON) include(deps/Protobuf)
if(rocksdb) include(deps/gRPC)
find_package(RocksDB REQUIRED) include(deps/cassandra)
set_target_properties(RocksDB::rocksdb PROPERTIES include(deps/Postgres)
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1 include(deps/WasmEdge)
)
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
endif()
find_package(nudb REQUIRED)
find_package(date REQUIRED)
include(deps/Protobuf)
include(deps/gRPC)
include(deps/WasmEdge)
if(TARGET nudb::core)
set(nudb nudb::core)
elseif(TARGET NuDB::nudb)
set(nudb NuDB::nudb)
else() else()
message(FATAL_ERROR "unknown nudb target") include(conan/Boost)
endif() find_package(OpenSSL 1.1.1 REQUIRED)
target_link_libraries(ripple_libs INTERFACE ${nudb}) set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
add_subdirectory(src/secp256k1)
add_subdirectory(src/ed25519-donna)
find_package(lz4 REQUIRED)
# Target names with :: are not allowed in a generator expression.
# We need to pull the include directories and imported location properties
# from separate targets.
find_package(LibArchive REQUIRED)
find_package(SOCI REQUIRED)
find_package(SQLite3 REQUIRED)
find_package(Snappy REQUIRED)
find_package(wasmedge REQUIRED)
option(rocksdb "Enable RocksDB" ON)
if(rocksdb)
find_package(RocksDB REQUIRED)
set_target_properties(RocksDB::rocksdb PROPERTIES
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
)
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
endif()
find_package(nudb REQUIRED)
find_package(date REQUIRED)
include(conan/Protobuf)
include(conan/gRPC)
if(TARGET nudb::core)
set(nudb nudb::core)
elseif(TARGET NuDB::nudb)
set(nudb NuDB::nudb)
else()
message(FATAL_ERROR "unknown nudb target")
endif()
target_link_libraries(ripple_libs INTERFACE ${nudb})
if(reporting) if(reporting)
find_package(cassandra-cpp-driver REQUIRED) find_package(cassandra-cpp-driver REQUIRED)
find_package(PostgreSQL REQUIRED) find_package(PostgreSQL REQUIRED)
target_link_libraries(ripple_libs INTERFACE
cassandra-cpp-driver::cassandra-cpp-driver
PostgreSQL::PostgreSQL
)
endif()
target_link_libraries(ripple_libs INTERFACE target_link_libraries(ripple_libs INTERFACE
cassandra-cpp-driver::cassandra-cpp-driver ed25519::ed25519
PostgreSQL::PostgreSQL LibArchive::LibArchive
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
Ripple::grpc_pbufs
Ripple::pbufs
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
) )
endif() endif()
target_link_libraries(ripple_libs INTERFACE
ed25519::ed25519
LibArchive::LibArchive
lz4::lz4
OpenSSL::Crypto
OpenSSL::SSL
Ripple::grpc_pbufs
Ripple::pbufs
secp256k1::secp256k1
soci::soci
SQLite::SQLite3
)
### ###

View File

@@ -5,6 +5,8 @@
# debugging. # debugging.
set -ex set -ex
set -e
echo "START INSIDE CONTAINER - CORE" echo "START INSIDE CONTAINER - CORE"
echo "-- BUILD CORES: $3" echo "-- BUILD CORES: $3"
@@ -25,8 +27,7 @@ if [[ "$?" -ne "0" ]]; then
exit 127 exit 127
fi fi
BUILD_TYPE=Release perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" Builds/CMake/deps/Rocksdb.cmake &&
mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old && mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old &&
echo "find_package(LLVM REQUIRED CONFIG) echo "find_package(LLVM REQUIRED CONFIG)
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\") message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
@@ -37,30 +38,13 @@ target_link_libraries (ripple_libs INTERFACE wasmedge)
add_library (wasmedge::wasmedge ALIAS wasmedge) add_library (wasmedge::wasmedge ALIAS wasmedge)
message(\"WasmEdge DONE\") message(\"WasmEdge DONE\")
" > Builds/CMake/deps/WasmEdge.cmake && " > Builds/CMake/deps/WasmEdge.cmake &&
export LDFLAGS="-static-libstdc++"
git config --global --add safe.directory /io &&
git checkout src/ripple/protocol/impl/BuildInfo.cpp && git checkout src/ripple/protocol/impl/BuildInfo.cpp &&
sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)$(if [ -n "$4" ]; then echo "+$4"; fi)\"/g src/ripple/protocol/impl/BuildInfo.cpp && sed -i s/\"0.0.0\"/\"$(date +%Y).$(date +%-m).$(date +%-d)-$(git rev-parse --abbrev-ref HEAD)+$4\"/g src/ripple/protocol/impl/BuildInfo.cpp &&
conan export external/snappy snappy/1.1.10@xahaud/stable &&
conan export external/soci soci/4.0.3@xahaud/stable &&
cd release-build && cd release-build &&
conan install .. --output-folder . --build missing --settings build_type=$BUILD_TYPE && cmake .. -DCMAKE_BUILD_TYPE=Release -DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DWasmEdge_LIB=/usr/local/lib64/libwasmedge.a &&
cmake .. -G Ninja \ make -j$3 VERBOSE=1 &&
-DCMAKE_BUILD_TYPE=$BUILD_TYPE \ strip -s rippled &&
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_EXE_LINKER_FLAGS="-static-libstdc++" \
-DLLVM_DIR=$LLVM_DIR \
-DWasmEdge_LIB=$WasmEdge_LIB \
-Dxrpld=TRUE \
-Dtests=TRUE &&
ccache -z &&
ninja -j $3 &&
ccache -s &&
strip -s rippled &&
mv rippled xahaud && mv rippled xahaud &&
libcheck xahaud &&
echo "Build host: `hostname`" > release.info && echo "Build host: `hostname`" > release.info &&
echo "Build date: `date`" >> release.info && echo "Build date: `date`" >> release.info &&
echo "Build md5: `md5sum xahaud`" >> release.info && echo "Build md5: `md5sum xahaud`" >> release.info &&
@@ -85,8 +69,8 @@ fi
cd ..; cd ..;
mv src/ripple/net/impl/RegisterSSLCerts.cpp.old src/ripple/net/impl/RegisterSSLCerts.cpp; mv src/ripple/net/impl/RegisterSSLCerts.cpp.old src/ripple/net/impl/RegisterSSLCerts.cpp;
mv Builds/CMake/deps/Rocksdb.cmake.old Builds/CMake/deps/Rocksdb.cmake;
mv Builds/CMake/deps/WasmEdge.old Builds/CMake/deps/WasmEdge.cmake; mv Builds/CMake/deps/WasmEdge.old Builds/CMake/deps/WasmEdge.cmake;
rm src/certs/certbundle.h;
git checkout src/ripple/protocol/impl/BuildInfo.cpp;
echo "END INSIDE CONTAINER - CORE" echo "END INSIDE CONTAINER - CORE"

View File

@@ -3,6 +3,8 @@
# processes launched or upon any unbound variable. # processes launched or upon any unbound variable.
# We use set -x to print commands before running them to help with # We use set -x to print commands before running them to help with
# debugging. # debugging.
set -ex
set -e set -e
echo "START INSIDE CONTAINER - FULL" echo "START INSIDE CONTAINER - FULL"
@@ -14,6 +16,13 @@ echo "-- GITHUB_RUN_NUMBER: $4"
umask 0000; umask 0000;
echo "Fixing CentOS 7 EOL"
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-*
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-*
yum clean all
yum-config-manager --disable centos-sclo-sclo
#### ####
cd /io; cd /io;
@@ -64,15 +73,92 @@ then
#endif/g" src/ripple/net/impl/RegisterSSLCerts.cpp && #endif/g" src/ripple/net/impl/RegisterSSLCerts.cpp &&
sed -i "s/#include <ripple\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/ripple/net/impl/RegisterSSLCerts.cpp sed -i "s/#include <ripple\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/ripple/net/impl/RegisterSSLCerts.cpp
fi fi
# Environment setup moved to Dockerfile in release-builder.sh mkdir -p .nih_c;
source /opt/rh/gcc-toolset-11/enable mkdir -p .nih_toolchain;
export PATH=/usr/local/bin:$PATH cd .nih_toolchain &&
export CC='ccache gcc' && yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
export CXX='ccache g++' && devtoolset-7-gcc-c++ \
devtoolset-9-gcc-c++ \
devtoolset-10-gcc-c++ \
snappy snappy-devel \
zlib zlib-devel \
lz4-devel \
libasan &&
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-7/g"` &&
echo "-- Install ZStd 1.1.3 --" &&
yum install epel-release -y &&
ZSTD_VERSION="1.1.3" &&
( wget -nc -q -O zstd-${ZSTD_VERSION}.tar.gz https://github.com/facebook/zstd/archive/v${ZSTD_VERSION}.tar.gz; echo "" ) &&
tar xzvf zstd-${ZSTD_VERSION}.tar.gz &&
cd zstd-${ZSTD_VERSION} &&
make -j$3 install &&
cd .. &&
echo "-- Install Cmake 3.23.1 --" &&
pwd &&
( wget -nc -q https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1-linux-x86_64.tar.gz; echo "" ) &&
tar -xzf cmake-3.23.1-linux-x86_64.tar.gz -C /hbb/ &&
echo "-- Install Boost 1.86.0 --" &&
pwd &&
( wget -nc -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz; echo "" ) &&
tar -xzf boost_1_86_0.tar.gz &&
cd boost_1_86_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
cd ../ &&
echo "-- Install Protobuf 3.20.0 --" &&
pwd &&
( wget -nc -q https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-all-3.20.0.tar.gz; echo "" ) &&
tar -xzf protobuf-all-3.20.0.tar.gz &&
cd protobuf-3.20.0/ &&
./autogen.sh && ./configure --prefix=/usr --disable-shared link=static && make -j$3 && make install &&
cd .. &&
echo "-- Build LLD --" &&
pwd &&
ln /usr/bin/llvm-config-13 /usr/bin/llvm-config &&
mv /opt/rh/devtoolset-9/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar-9 &&
ln /opt/rh/devtoolset-10/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar &&
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/lld-13.0.1.src.tar.xz; echo "" ) &&
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/libunwind-13.0.1.src.tar.xz; echo "" ) &&
tar -xf lld-13.0.1.src.tar.xz &&
tar -xf libunwind-13.0.1.src.tar.xz &&
cp -r libunwind-13.0.1.src/include libunwind-13.0.1.src/src lld-13.0.1.src/ &&
cd lld-13.0.1.src &&
rm -rf build CMakeCache.txt &&
mkdir -p build &&
cd build &&
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
make -j$3 install &&
ln -s /usr/lib64/llvm13/lib/include/lld /usr/include/lld &&
cp /usr/lib64/llvm13/lib/liblld*.a /usr/local/lib/ &&
cd ../../ &&
echo "-- Build WasmEdge --" &&
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) &&
cd WasmEdge-0.11.2 &&
( mkdir -p build; echo "" ) &&
cd build &&
export BOOST_ROOT="/usr/local/src/boost_1_86_0" &&
export Boost_LIBRARY_DIRS="/usr/local/lib" &&
export BOOST_INCLUDEDIR="/usr/local/src/boost_1_86_0" &&
export PATH=`echo $PATH | sed -E "s/devtoolset-7/devtoolset-9/g"` &&
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DWASMEDGE_BUILD_SHARED_LIB=OFF \
-DWASMEDGE_BUILD_STATIC_LIB=ON \
-DWASMEDGE_BUILD_AOT_RUNTIME=ON \
-DWASMEDGE_FORCE_DISABLE_LTO=ON \
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
-DWASMEDGE_LINK_LLVM_STATIC=ON \
-DWASMEDGE_BUILD_PLUGINS=OFF \
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
-DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ &&
make -j$3 install &&
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-10/g"` &&
cp -r include/api/wasmedge /usr/include/ &&
cd /io/ &&
echo "-- Build Rippled --" && echo "-- Build Rippled --" &&
pwd && pwd &&
cp Builds/CMake/deps/Rocksdb.cmake Builds/CMake/deps/Rocksdb.cmake.old &&
echo "MOVING TO [ build-core.sh ]"; echo "MOVING TO [ build-core.sh ]"
cd /io;
printenv > .env.temp; printenv > .env.temp;
cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env; cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env;

View File

@@ -32,8 +32,8 @@ class Xrpl(ConanFile):
'nudb/2.0.8', 'nudb/2.0.8',
'openssl/1.1.1u', 'openssl/1.1.1u',
'protobuf/3.21.9', 'protobuf/3.21.9',
'snappy/1.1.10@xahaud/stable', 'snappy/1.1.10',
'soci/4.0.3@xahaud/stable', 'soci/4.0.3',
'sqlite3/3.42.0', 'sqlite3/3.42.0',
'zlib/1.2.13', 'zlib/1.2.13',
'wasmedge/0.11.2', 'wasmedge/0.11.2',

View File

@@ -8,4 +8,4 @@ if [[ "$GITHUB_REPOSITORY" == "" ]]; then
fi fi
echo "Mounting $(pwd)/io in ubuntu and running unit tests" echo "Mounting $(pwd)/io in ubuntu and running unit tests"
docker run --rm -i -v $(pwd):/io --platform=linux/amd64 -e BUILD_CORES=$BUILD_CORES ubuntu sh -c '/io/release-build/xahaud --unittest-jobs $BUILD_CORES -u' docker run --rm -i -v $(pwd):/io -e BUILD_CORES=$BUILD_CORES ubuntu sh -c '/io/release-build/xahaud --unittest-jobs $BUILD_CORES -u'

View File

@@ -77,14 +77,9 @@ class SnappyConan(ConanFile):
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy") self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed # TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["snappylib"].libs = ["snappy"] self.cpp_info.components["snappylib"].libs = ["snappy"]
# The following block is commented out as a workaround for a bug in the if not self.options.shared:
# Conan 1.x CMakeDeps generator. Including system_libs ("m") here if self.settings.os in ["Linux", "FreeBSD"]:
# incorrectly triggers a heuristic that adds a dynamic link to `stdc++` self.cpp_info.components["snappylib"].system_libs.append("m")
# (-lstdc++), preventing a fully static build.
# This behavior is expected to be corrected in Conan 2.
# if not self.options.shared:
# if self.settings.os in ["Linux", "FreeBSD"]:
# self.cpp_info.components["snappylib"].system_libs.append("m")
# TODO: to remove in conan v2 once cmake_find_package* generators removed # TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "Snappy" self.cpp_info.names["cmake_find_package"] = "Snappy"

View File

@@ -154,7 +154,7 @@ class SociConan(ConanFile):
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix)) self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)] self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
if self.options.with_boost: if self.options.with_boost:
self.cpp_info.components["soci_core"].requires.append("boost::headers") self.cpp_info.components["soci_core"].requires.append("boost::boost")
# soci_empty # soci_empty
if self.options.empty: if self.options.empty:

View File

@@ -1,8 +1,9 @@
#!/bin/bash #!/bin/bash -u
# We use set -e and bash with -u to bail on first non zero exit code of any # We use set -e and bash with -u to bail on first non zero exit code of any
# processes launched or upon any unbound variable. # processes launched or upon any unbound variable.
# We use set -x to print commands before running them to help with # We use set -x to print commands before running them to help with
# debugging. # debugging.
set -ex
echo "START BUILDING (HOST)" echo "START BUILDING (HOST)"
@@ -13,7 +14,7 @@ BUILD_CORES=$(echo "scale=0 ; `nproc` / 1.337" | bc)
if [[ "$GITHUB_REPOSITORY" == "" ]]; then if [[ "$GITHUB_REPOSITORY" == "" ]]; then
#Default #Default
BUILD_CORES=${BUILD_CORES:-8} BUILD_CORES=8
fi fi
# Ensure still works outside of GH Actions by setting these to /dev/null # Ensure still works outside of GH Actions by setting these to /dev/null
@@ -45,166 +46,29 @@ fi
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l) STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
CACHE_VOLUME_NAME="xahau-release-builder-cache"
# if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then # if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
if false; then if false; then
echo "Static container, execute in static container to have max. cache" echo "Static container, execute in static container to have max. cache"
docker start $CONTAINER_NAME docker start $CONTAINER_NAME
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-core.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'" docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
docker stop $CONTAINER_NAME docker stop $CONTAINER_NAME
else else
echo "No static container, build on temp container" echo "No static container, build on temp container"
rm -rf release-build; rm -rf release-build;
mkdir -p release-build; mkdir -p release-build;
docker volume create $CACHE_VOLUME_NAME
# Create inline Dockerfile with environment setup for build-full.sh
DOCKERFILE_CONTENT=$(cat <<'DOCKERFILE_EOF'
FROM ghcr.io/phusion/holy-build-box:4.0.1-amd64
ARG BUILD_CORES=8
# Enable repositories and install dependencies
RUN /hbb_exe/activate-exec bash -c "dnf install -y epel-release && \
dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled crb && \
dnf install -y --enablerepo=devel \
wget git \
gcc-toolset-11-gcc-c++ gcc-toolset-11-binutils gcc-toolset-11-libatomic-devel \
lz4 lz4-devel \
ncurses-static ncurses-devel \
snappy snappy-devel \
zlib zlib-devel zlib-static \
libasan \
python3 python3-pip \
ccache \
ninja-build \
patch \
glibc-devel glibc-static \
libxml2-devel \
autoconf \
automake \
texinfo \
libtool \
llvm14-static llvm14-devel && \
dnf clean all"
# Install Conan and CMake
RUN /hbb_exe/activate-exec pip3 install "conan==1.66.0" && \
/hbb_exe/activate-exec wget -q https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1-linux-x86_64.tar.gz -O cmake.tar.gz && \
mkdir cmake && \
tar -xzf cmake.tar.gz --strip-components=1 -C cmake && \
rm cmake.tar.gz
# Install Boost 1.86.0
RUN /hbb_exe/activate-exec bash -c "cd /tmp && \
wget -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz -O boost.tar.gz && \
mkdir boost && \
tar -xzf boost.tar.gz --strip-components=1 -C boost && \
cd boost && \
./bootstrap.sh && \
./b2 link=static -j${BUILD_CORES} && \
./b2 install && \
cd /tmp && \
rm -rf boost boost.tar.gz"
ENV BOOST_ROOT=/usr/local/src/boost_1_86_0
ENV Boost_LIBRARY_DIRS=/usr/local/lib
ENV BOOST_INCLUDEDIR=/usr/local/src/boost_1_86_0
ENV CMAKE_EXE_LINKER_FLAGS="-static-libstdc++"
ENV LLVM_DIR=/usr/lib64/llvm14/lib/cmake/llvm
ENV WasmEdge_LIB=/usr/local/lib64/libwasmedge.a
ENV CC='ccache gcc'
ENV CXX='ccache g++'
# Install LLD
RUN /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && \
cd /tmp && \
wget -q https://github.com/llvm/llvm-project/releases/download/llvmorg-14.0.3/lld-14.0.3.src.tar.xz && \
wget -q https://github.com/llvm/llvm-project/releases/download/llvmorg-14.0.3/libunwind-14.0.3.src.tar.xz && \
tar -xf lld-14.0.3.src.tar.xz && \
tar -xf libunwind-14.0.3.src.tar.xz && \
cp -r libunwind-14.0.3.src/include libunwind-14.0.3.src/src lld-14.0.3.src/ && \
cd lld-14.0.3.src && \
mkdir -p build && cd build && \
cmake .. \
-DLLVM_LIBRARY_DIR=/usr/lib64/llvm14/lib/ \
-DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm14/ \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_EXE_LINKER_FLAGS=\"\$CMAKE_EXE_LINKER_FLAGS\" && \
make -j${BUILD_CORES} install && \
ln -s /usr/lib64/llvm14/lib/include/lld /usr/include/lld && \
cp /usr/lib64/llvm14/lib/liblld*.a /usr/local/lib/ && \
cd /tmp && rm -rf lld-* libunwind-*"
# Build and install WasmEdge (static version)
# Note: Conan only provides WasmEdge with shared library linking.
# For a fully static build, we need to manually install:
# * Boost: Static C++ libraries for filesystem and system operations (built from source above)
# * LLVM: Static LLVM libraries for WebAssembly compilation (installed via llvm14-static package)
# * LLD: Static linker to produce the final static binary (built from source above)
# These were installed above to enable WASMEDGE_LINK_LLVM_STATIC=ON
RUN cd /tmp && \
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) && \
cd WasmEdge-0.11.2 && \
( mkdir -p build; echo "" ) && \
cd build && \
/hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && \
ln -sf /opt/rh/gcc-toolset-11/root/usr/bin/ar /usr/bin/ar && \
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr/local \
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
-DWASMEDGE_BUILD_SHARED_LIB=OFF \
-DWASMEDGE_BUILD_STATIC_LIB=ON \
-DWASMEDGE_BUILD_AOT_RUNTIME=ON \
-DWASMEDGE_FORCE_DISABLE_LTO=ON \
-DWASMEDGE_LINK_LLVM_STATIC=ON \
-DWASMEDGE_BUILD_PLUGINS=OFF \
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
-DBoost_NO_BOOST_CMAKE=ON \
-DCMAKE_EXE_LINKER_FLAGS=\"\$CMAKE_EXE_LINKER_FLAGS\" \
&& \
make -j${BUILD_CORES} install" && \
cp -r include/api/wasmedge /usr/include/ && \
cd /tmp && rm -rf WasmEdge* 0.11.2.zip
# Set environment variables
ENV PATH=/usr/local/bin:$PATH
# Configure ccache and Conan
RUN /hbb_exe/activate-exec bash -c "ccache -M 10G && \
ccache -o cache_dir=/cache/ccache && \
ccache -o compiler_check=content && \
conan config set storage.path=/cache/conan && \
(conan profile new default --detect || true) && \
conan profile update settings.compiler.libcxx=libstdc++11 default && \
conan profile update settings.compiler.cppstd=20 default"
DOCKERFILE_EOF
)
# Build custom Docker image
IMAGE_NAME="xahaud-builder:latest"
echo "Building custom Docker image with dependencies..."
echo "$DOCKERFILE_CONTENT" | docker build --build-arg BUILD_CORES="$BUILD_CORES" -t "$IMAGE_NAME" - || exit 1
if [[ "$GITHUB_REPOSITORY" == "" ]]; then if [[ "$GITHUB_REPOSITORY" == "" ]]; then
# Non GH, local building # Non GH, local building
echo "Non-GH runner, local building, temp container" echo "Non-GH runner, local building, temp container"
docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io -v "$CACHE_VOLUME_NAME":/cache --network host "$IMAGE_NAME" /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-full.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'" docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
else else
# GH Action, runner # GH Action, runner
echo "GH Action, runner, clean & re-create create persistent container" echo "GH Action, runner, clean & re-create create persistent container"
docker rm -f $CONTAINER_NAME docker rm -f $CONTAINER_NAME
echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT" echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT"
echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT" echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT"
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io -v "$CACHE_VOLUME_NAME":/cache --network host "$IMAGE_NAME" /hbb_exe/activate-exec bash docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -c "source /opt/rh/gcc-toolset-11/enable && bash -x /io/build-full.sh '$GITHUB_REPOSITORY' '$GITHUB_SHA' '$BUILD_CORES' '$GITHUB_RUN_NUMBER'" docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
docker stop $CONTAINER_NAME docker stop $CONTAINER_NAME
fi fi
fi fi

View File

@@ -0,0 +1,851 @@
#ifndef RIPPLE_APP_RDB_BACKEND_FLATMAPDATABASE_H_INCLUDED
#define RIPPLE_APP_RDB_BACKEND_FLATMAPDATABASE_H_INCLUDED
#include <ripple/app/ledger/AcceptedLedger.h>
#include <ripple/app/ledger/LedgerMaster.h>
#include <ripple/app/ledger/TransactionMaster.h>
#include <ripple/app/rdb/backend/SQLiteDatabase.h>
#include <algorithm>
#include <map>
#include <mutex>
#include <optional>
#include <shared_mutex>
#include <vector>
#include <boost/unordered/concurrent_flat_map.hpp>
namespace ripple {
struct base_uint_hasher
{
using result_type = std::size_t;
result_type
operator()(base_uint<256> const& value) const
{
return hardened_hash<>{}(value);
}
result_type
operator()(AccountID const& value) const
{
return hardened_hash<>{}(value);
}
};
class FlatmapDatabase : public SQLiteDatabase
{
private:
struct LedgerData
{
LedgerInfo info;
boost::unordered::
concurrent_flat_map<uint256, AccountTx, base_uint_hasher>
transactions;
};
struct AccountTxData
{
boost::unordered::
concurrent_flat_map<std::pair<uint32_t, uint32_t>, AccountTx>
transactions;
};
Application& app_;
boost::unordered::concurrent_flat_map<LedgerIndex, LedgerData> ledgers_;
boost::unordered::
concurrent_flat_map<uint256, LedgerIndex, base_uint_hasher>
ledgerHashToSeq_;
boost::unordered::concurrent_flat_map<uint256, AccountTx, base_uint_hasher>
transactionMap_;
boost::unordered::
concurrent_flat_map<AccountID, AccountTxData, base_uint_hasher>
accountTxMap_;
public:
FlatmapDatabase(Application& app, Config const& config, JobQueue& jobQueue)
: app_(app)
{
}
std::optional<LedgerIndex>
getMinLedgerSeq() override
{
std::optional<LedgerIndex> minSeq;
ledgers_.visit_all([&minSeq](auto const& pair) {
if (!minSeq || pair.first < *minSeq)
{
minSeq = pair.first;
}
});
return minSeq;
}
std::optional<LedgerIndex>
getTransactionsMinLedgerSeq() override
{
std::optional<LedgerIndex> minSeq;
transactionMap_.visit_all([&minSeq](auto const& pair) {
LedgerIndex seq = pair.second.second->getLgrSeq();
if (!minSeq || seq < *minSeq)
{
minSeq = seq;
}
});
return minSeq;
}
std::optional<LedgerIndex>
getAccountTransactionsMinLedgerSeq() override
{
std::optional<LedgerIndex> minSeq;
accountTxMap_.visit_all([&minSeq](auto const& pair) {
pair.second.transactions.visit_all([&minSeq](auto const& tx) {
if (!minSeq || tx.first.first < *minSeq)
{
minSeq = tx.first.first;
}
});
});
return minSeq;
}
std::optional<LedgerIndex>
getMaxLedgerSeq() override
{
std::optional<LedgerIndex> maxSeq;
ledgers_.visit_all([&maxSeq](auto const& pair) {
if (!maxSeq || pair.first > *maxSeq)
{
maxSeq = pair.first;
}
});
return maxSeq;
}
void
deleteTransactionByLedgerSeq(LedgerIndex ledgerSeq) override
{
ledgers_.visit(ledgerSeq, [this](auto& item) {
item.second.transactions.visit_all([this](auto const& txPair) {
transactionMap_.erase(txPair.first);
});
item.second.transactions.clear();
});
accountTxMap_.visit_all([ledgerSeq](auto& item) {
item.second.transactions.erase_if([ledgerSeq](auto const& tx) {
return tx.first.first == ledgerSeq;
});
});
}
void
deleteBeforeLedgerSeq(LedgerIndex ledgerSeq) override
{
ledgers_.erase_if([this, ledgerSeq](auto const& item) {
if (item.first < ledgerSeq)
{
item.second.transactions.visit_all([this](auto const& txPair) {
transactionMap_.erase(txPair.first);
});
ledgerHashToSeq_.erase(item.second.info.hash);
return true;
}
return false;
});
accountTxMap_.visit_all([ledgerSeq](auto& item) {
item.second.transactions.erase_if([ledgerSeq](auto const& tx) {
return tx.first.first < ledgerSeq;
});
});
}
void
deleteTransactionsBeforeLedgerSeq(LedgerIndex ledgerSeq) override
{
ledgers_.visit_all([this, ledgerSeq](auto& item) {
if (item.first < ledgerSeq)
{
item.second.transactions.visit_all([this](auto const& txPair) {
transactionMap_.erase(txPair.first);
});
item.second.transactions.clear();
}
});
accountTxMap_.visit_all([ledgerSeq](auto& item) {
item.second.transactions.erase_if([ledgerSeq](auto const& tx) {
return tx.first.first < ledgerSeq;
});
});
}
void
deleteAccountTransactionsBeforeLedgerSeq(LedgerIndex ledgerSeq) override
{
accountTxMap_.visit_all([ledgerSeq](auto& item) {
item.second.transactions.erase_if([ledgerSeq](auto const& tx) {
return tx.first.first < ledgerSeq;
});
});
}
std::size_t
getTransactionCount() override
{
return transactionMap_.size();
}
std::size_t
getAccountTransactionCount() override
{
std::size_t count = 0;
accountTxMap_.visit_all([&count](auto const& item) {
count += item.second.transactions.size();
});
return count;
}
CountMinMax
getLedgerCountMinMax() override
{
CountMinMax result{0, 0, 0};
ledgers_.visit_all([&result](auto const& item) {
result.numberOfRows++;
if (result.minLedgerSequence == 0 ||
item.first < result.minLedgerSequence)
{
result.minLedgerSequence = item.first;
}
if (item.first > result.maxLedgerSequence)
{
result.maxLedgerSequence = item.first;
}
});
return result;
}
bool
saveValidatedLedger(
std::shared_ptr<Ledger const> const& ledger,
bool current) override
{
try
{
LedgerData ledgerData;
ledgerData.info = ledger->info();
auto aLedger = std::make_shared<AcceptedLedger>(ledger, app_);
for (auto const& acceptedLedgerTx : *aLedger)
{
auto const& txn = acceptedLedgerTx->getTxn();
auto const& meta = acceptedLedgerTx->getMeta();
auto const& id = txn->getTransactionID();
std::string reason;
auto accTx = std::make_pair(
std::make_shared<ripple::Transaction>(txn, reason, app_),
std::make_shared<ripple::TxMeta>(meta));
ledgerData.transactions.emplace(id, accTx);
transactionMap_.emplace(id, accTx);
for (auto const& account : meta.getAffectedAccounts())
{
accountTxMap_.visit(account, [&](auto& data) {
data.second.transactions.emplace(
std::make_pair(
ledger->info().seq,
acceptedLedgerTx->getTxnSeq()),
accTx);
});
}
}
ledgers_.emplace(ledger->info().seq, std::move(ledgerData));
ledgerHashToSeq_.emplace(ledger->info().hash, ledger->info().seq);
if (current)
{
auto const cutoffSeq =
ledger->info().seq > app_.config().LEDGER_HISTORY
? ledger->info().seq - app_.config().LEDGER_HISTORY
: 0;
if (cutoffSeq > 0)
{
const std::size_t BATCH_SIZE = 128;
std::size_t deleted = 0;
ledgers_.erase_if([&](auto const& item) {
if (deleted >= BATCH_SIZE)
return false;
if (item.first < cutoffSeq)
{
item.second.transactions.visit_all(
[this](auto const& txPair) {
transactionMap_.erase(txPair.first);
});
ledgerHashToSeq_.erase(item.second.info.hash);
deleted++;
return true;
}
return false;
});
if (deleted > 0)
{
accountTxMap_.visit_all([cutoffSeq](auto& item) {
item.second.transactions.erase_if(
[cutoffSeq](auto const& tx) {
return tx.first.first < cutoffSeq;
});
});
}
app_.getLedgerMaster().clearPriorLedgers(cutoffSeq);
}
}
return true;
}
catch (std::exception const&)
{
deleteTransactionByLedgerSeq(ledger->info().seq);
return false;
}
}
std::optional<LedgerInfo>
getLedgerInfoByIndex(LedgerIndex ledgerSeq) override
{
std::optional<LedgerInfo> result;
ledgers_.visit(ledgerSeq, [&result](auto const& item) {
result = item.second.info;
});
return result;
}
std::optional<LedgerInfo>
getNewestLedgerInfo() override
{
std::optional<LedgerInfo> result;
ledgers_.visit_all([&result](auto const& item) {
if (!result || item.second.info.seq > result->seq)
{
result = item.second.info;
}
});
return result;
}
std::optional<LedgerInfo>
getLimitedOldestLedgerInfo(LedgerIndex ledgerFirstIndex) override
{
std::optional<LedgerInfo> result;
ledgers_.visit_all([&](auto const& item) {
if (item.first >= ledgerFirstIndex &&
(!result || item.first < result->seq))
{
result = item.second.info;
}
});
return result;
}
std::optional<LedgerInfo>
getLimitedNewestLedgerInfo(LedgerIndex ledgerFirstIndex) override
{
std::optional<LedgerInfo> result;
ledgers_.visit_all([&](auto const& item) {
if (item.first >= ledgerFirstIndex &&
(!result || item.first > result->seq))
{
result = item.second.info;
}
});
return result;
}
std::optional<LedgerInfo>
getLedgerInfoByHash(uint256 const& ledgerHash) override
{
std::optional<LedgerInfo> result;
ledgerHashToSeq_.visit(ledgerHash, [this, &result](auto const& item) {
ledgers_.visit(item.second, [&result](auto const& item) {
result = item.second.info;
});
});
return result;
}
uint256
getHashByIndex(LedgerIndex ledgerIndex) override
{
uint256 result;
ledgers_.visit(ledgerIndex, [&result](auto const& item) {
result = item.second.info.hash;
});
return result;
}
std::optional<LedgerHashPair>
getHashesByIndex(LedgerIndex ledgerIndex) override
{
std::optional<LedgerHashPair> result;
ledgers_.visit(ledgerIndex, [&result](auto const& item) {
result = LedgerHashPair{
item.second.info.hash, item.second.info.parentHash};
});
return result;
}
std::map<LedgerIndex, LedgerHashPair>
getHashesByIndex(LedgerIndex minSeq, LedgerIndex maxSeq) override
{
std::map<LedgerIndex, LedgerHashPair> result;
ledgers_.visit_all([&](auto const& item) {
if (item.first >= minSeq && item.first <= maxSeq)
{
result[item.first] = LedgerHashPair{
item.second.info.hash, item.second.info.parentHash};
}
});
return result;
}
std::variant<AccountTx, TxSearched>
getTransaction(
uint256 const& id,
std::optional<ClosedInterval<std::uint32_t>> const& range,
error_code_i& ec) override
{
std::variant<AccountTx, TxSearched> result = TxSearched::unknown;
transactionMap_.visit(id, [&](auto const& item) {
auto const& tx = item.second;
if (!range ||
(range->lower() <= tx.second->getLgrSeq() &&
tx.second->getLgrSeq() <= range->upper()))
{
result = tx;
}
else
{
result = TxSearched::all;
}
});
return result;
}
bool
ledgerDbHasSpace(Config const& config) override
{
return true; // In-memory database always has space
}
bool
transactionDbHasSpace(Config const& config) override
{
return true; // In-memory database always has space
}
std::uint32_t
getKBUsedAll() override
{
std::uint32_t size = sizeof(*this);
size += ledgers_.size() * (sizeof(LedgerIndex) + sizeof(LedgerData));
size +=
ledgerHashToSeq_.size() * (sizeof(uint256) + sizeof(LedgerIndex));
size += transactionMap_.size() * (sizeof(uint256) + sizeof(AccountTx));
accountTxMap_.visit_all([&size](auto const& item) {
size += sizeof(AccountID) + sizeof(AccountTxData);
size += item.second.transactions.size() * sizeof(AccountTx);
});
return size / 1024; // Convert to KB
}
std::uint32_t
getKBUsedLedger() override
{
std::uint32_t size =
ledgers_.size() * (sizeof(LedgerIndex) + sizeof(LedgerData));
size +=
ledgerHashToSeq_.size() * (sizeof(uint256) + sizeof(LedgerIndex));
return size / 1024;
}
std::uint32_t
getKBUsedTransaction() override
{
std::uint32_t size =
transactionMap_.size() * (sizeof(uint256) + sizeof(AccountTx));
accountTxMap_.visit_all([&size](auto const& item) {
size += sizeof(AccountID) + sizeof(AccountTxData);
size += item.second.transactions.size() * sizeof(AccountTx);
});
return size / 1024;
}
void
closeLedgerDB() override
{
// No-op for in-memory database
}
void
closeTransactionDB() override
{
// No-op for in-memory database
}
~FlatmapDatabase()
{
// Concurrent maps need visit_all
accountTxMap_.visit_all(
[](auto& pair) { pair.second.transactions.clear(); });
accountTxMap_.clear();
transactionMap_.clear();
ledgers_.visit_all(
[](auto& pair) { pair.second.transactions.clear(); });
ledgers_.clear();
ledgerHashToSeq_.clear();
}
std::vector<std::shared_ptr<Transaction>>
getTxHistory(LedgerIndex startIndex) override
{
std::vector<std::shared_ptr<Transaction>> result;
transactionMap_.visit_all([&](auto const& item) {
if (item.second.second->getLgrSeq() >= startIndex)
{
result.push_back(item.second.first);
}
});
std::sort(
result.begin(), result.end(), [](auto const& a, auto const& b) {
return a->getLedger() > b->getLedger();
});
if (result.size() > 20)
{
result.resize(20);
}
return result;
}
// Helper function to handle limits
template <typename Container>
void
applyLimit(Container& container, std::size_t limit, bool bUnlimited)
{
if (!bUnlimited && limit > 0 && container.size() > limit)
{
container.resize(limit);
}
}
AccountTxs
getOldestAccountTxs(AccountTxOptions const& options) override
{
AccountTxs result;
accountTxMap_.visit(options.account, [&](auto const& item) {
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
result.push_back(tx.second);
}
});
});
std::sort(
result.begin(), result.end(), [](auto const& a, auto const& b) {
return a.second->getLgrSeq() < b.second->getLgrSeq();
});
applyLimit(result, options.limit, options.bUnlimited);
return result;
}
AccountTxs
getNewestAccountTxs(AccountTxOptions const& options) override
{
AccountTxs result;
accountTxMap_.visit(options.account, [&](auto const& item) {
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
result.push_back(tx.second);
}
});
});
std::sort(
result.begin(), result.end(), [](auto const& a, auto const& b) {
return a.second->getLgrSeq() > b.second->getLgrSeq();
});
applyLimit(result, options.limit, options.bUnlimited);
return result;
}
MetaTxsList
getOldestAccountTxsB(AccountTxOptions const& options) override
{
MetaTxsList result;
accountTxMap_.visit(options.account, [&](auto const& item) {
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
result.emplace_back(
tx.second.first->getSTransaction()
->getSerializer()
.peekData(),
tx.second.second->getAsObject()
.getSerializer()
.peekData(),
tx.first.first);
}
});
});
std::sort(
result.begin(), result.end(), [](auto const& a, auto const& b) {
return std::get<2>(a) < std::get<2>(b);
});
applyLimit(result, options.limit, options.bUnlimited);
return result;
}
MetaTxsList
getNewestAccountTxsB(AccountTxOptions const& options) override
{
MetaTxsList result;
accountTxMap_.visit(options.account, [&](auto const& item) {
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
result.emplace_back(
tx.second.first->getSTransaction()
->getSerializer()
.peekData(),
tx.second.second->getAsObject()
.getSerializer()
.peekData(),
tx.first.first);
}
});
});
std::sort(
result.begin(), result.end(), [](auto const& a, auto const& b) {
return std::get<2>(a) > std::get<2>(b);
});
applyLimit(result, options.limit, options.bUnlimited);
return result;
}
std::pair<AccountTxs, std::optional<AccountTxMarker>>
oldestAccountTxPage(AccountTxPageOptions const& options) override
{
AccountTxs result;
std::optional<AccountTxMarker> marker;
accountTxMap_.visit(options.account, [&](auto const& item) {
std::vector<std::pair<std::pair<uint32_t, uint32_t>, AccountTx>>
txs;
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
txs.emplace_back(tx);
}
});
std::sort(txs.begin(), txs.end(), [](auto const& a, auto const& b) {
return a.first < b.first;
});
auto it = txs.begin();
if (options.marker)
{
it = std::find_if(txs.begin(), txs.end(), [&](auto const& tx) {
return tx.first.first == options.marker->ledgerSeq &&
tx.first.second == options.marker->txnSeq;
});
if (it != txs.end())
++it;
}
for (; it != txs.end() &&
(options.limit == 0 || result.size() < options.limit);
++it)
{
result.push_back(it->second);
}
if (it != txs.end())
{
marker = AccountTxMarker{it->first.first, it->first.second};
}
});
return {result, marker};
}
std::pair<AccountTxs, std::optional<AccountTxMarker>>
newestAccountTxPage(AccountTxPageOptions const& options) override
{
AccountTxs result;
std::optional<AccountTxMarker> marker;
accountTxMap_.visit(options.account, [&](auto const& item) {
std::vector<std::pair<std::pair<uint32_t, uint32_t>, AccountTx>>
txs;
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
txs.emplace_back(tx);
}
});
std::sort(txs.begin(), txs.end(), [](auto const& a, auto const& b) {
return a.first > b.first;
});
auto it = txs.begin();
if (options.marker)
{
it = std::find_if(txs.begin(), txs.end(), [&](auto const& tx) {
return tx.first.first == options.marker->ledgerSeq &&
tx.first.second == options.marker->txnSeq;
});
if (it != txs.end())
++it;
}
for (; it != txs.end() &&
(options.limit == 0 || result.size() < options.limit);
++it)
{
result.push_back(it->second);
}
if (it != txs.end())
{
marker = AccountTxMarker{it->first.first, it->first.second};
}
});
return {result, marker};
}
std::pair<MetaTxsList, std::optional<AccountTxMarker>>
oldestAccountTxPageB(AccountTxPageOptions const& options) override
{
MetaTxsList result;
std::optional<AccountTxMarker> marker;
accountTxMap_.visit(options.account, [&](auto const& item) {
std::vector<std::tuple<uint32_t, uint32_t, AccountTx>> txs;
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
txs.emplace_back(
tx.first.first, tx.first.second, tx.second);
}
});
std::sort(txs.begin(), txs.end());
auto it = txs.begin();
if (options.marker)
{
it = std::find_if(txs.begin(), txs.end(), [&](auto const& tx) {
return std::get<0>(tx) == options.marker->ledgerSeq &&
std::get<1>(tx) == options.marker->txnSeq;
});
if (it != txs.end())
++it;
}
for (; it != txs.end() &&
(options.limit == 0 || result.size() < options.limit);
++it)
{
const auto& [_, __, tx] = *it;
result.emplace_back(
tx.first->getSTransaction()->getSerializer().peekData(),
tx.second->getAsObject().getSerializer().peekData(),
std::get<0>(*it));
}
if (it != txs.end())
{
marker = AccountTxMarker{std::get<0>(*it), std::get<1>(*it)};
}
});
return {result, marker};
}
std::pair<MetaTxsList, std::optional<AccountTxMarker>>
newestAccountTxPageB(AccountTxPageOptions const& options) override
{
MetaTxsList result;
std::optional<AccountTxMarker> marker;
accountTxMap_.visit(options.account, [&](auto const& item) {
std::vector<std::tuple<uint32_t, uint32_t, AccountTx>> txs;
item.second.transactions.visit_all([&](auto const& tx) {
if (tx.first.first >= options.minLedger &&
tx.first.first <= options.maxLedger)
{
txs.emplace_back(
tx.first.first, tx.first.second, tx.second);
}
});
std::sort(txs.begin(), txs.end(), std::greater<>());
auto it = txs.begin();
if (options.marker)
{
it = std::find_if(txs.begin(), txs.end(), [&](auto const& tx) {
return std::get<0>(tx) == options.marker->ledgerSeq &&
std::get<1>(tx) == options.marker->txnSeq;
});
if (it != txs.end())
++it;
}
for (; it != txs.end() &&
(options.limit == 0 || result.size() < options.limit);
++it)
{
const auto& [_, __, tx] = *it;
result.emplace_back(
tx.first->getSTransaction()->getSerializer().peekData(),
tx.second->getAsObject().getSerializer().peekData(),
std::get<0>(*it));
}
if (it != txs.end())
{
marker = AccountTxMarker{std::get<0>(*it), std::get<1>(*it)};
}
});
return {result, marker};
}
};
// Factory function
std::unique_ptr<SQLiteDatabase>
getFlatmapDatabase(Application& app, Config const& config, JobQueue& jobQueue)
{
return std::make_unique<FlatmapDatabase>(app, config, jobQueue);
}
} // namespace ripple
#endif // RIPPLE_APP_RDB_BACKEND_FLATMAPDATABASE_H_INCLUDED

View File

@@ -28,8 +28,9 @@ private:
struct AccountTxData struct AccountTxData
{ {
std::map<uint32_t, AccountTxs> AccountTxs transactions;
ledgerTxMap; // ledgerSeq -> vector of AccountTx std::map<uint32_t, std::map<uint32_t, size_t>>
ledgerTxMap; // ledgerSeq -> txSeq -> index in transactions
}; };
Application& app_; Application& app_;
@@ -64,12 +65,9 @@ public:
return {}; return {};
std::shared_lock<std::shared_mutex> lock(mutex_); std::shared_lock<std::shared_mutex> lock(mutex_);
for (const auto& [ledgerSeq, ledgerData] : ledgers_) if (transactionMap_.empty())
{ return std::nullopt;
if (!ledgerData.transactions.empty()) return transactionMap_.begin()->second.second->getLgrSeq();
return ledgerSeq;
}
return std::nullopt;
} }
std::optional<LedgerIndex> std::optional<LedgerIndex>
@@ -165,6 +163,14 @@ public:
{ {
txIt = accountData.ledgerTxMap.erase(txIt); txIt = accountData.ledgerTxMap.erase(txIt);
} }
accountData.transactions.erase(
std::remove_if(
accountData.transactions.begin(),
accountData.transactions.end(),
[ledgerSeq](const AccountTx& tx) {
return tx.second->getLgrSeq() < ledgerSeq;
}),
accountData.transactions.end());
} }
} }
std::size_t std::size_t
@@ -187,10 +193,7 @@ public:
std::size_t count = 0; std::size_t count = 0;
for (const auto& [_, accountData] : accountTxMap_) for (const auto& [_, accountData] : accountTxMap_)
{ {
for (const auto& [_, txVector] : accountData.ledgerTxMap) count += accountData.transactions.size();
{
count += txVector.size();
}
} }
return count; return count;
} }
@@ -290,7 +293,10 @@ public:
accountTxMap_[account] = AccountTxData(); accountTxMap_[account] = AccountTxData();
auto& accountData = accountTxMap_[account]; auto& accountData = accountTxMap_[account];
accountData.ledgerTxMap[seq].push_back(accTx); accountData.transactions.push_back(accTx);
accountData
.ledgerTxMap[seq][acceptedLedgerTx->getTxnSeq()] =
accountData.transactions.size() - 1;
} }
app_.getMasterTransaction().inLedger( app_.getMasterTransaction().inLedger(
@@ -304,46 +310,6 @@ public:
// Overwrite Current Ledger // Overwrite Current Ledger
ledgers_[seq] = std::move(ledgerData); ledgers_[seq] = std::move(ledgerData);
ledgerHashToSeq_[ledger->info().hash] = seq; ledgerHashToSeq_[ledger->info().hash] = seq;
// Automatic cleanup based on LEDGER_HISTORY (ported from
// FlatmapDatabase)
if (current)
{
auto const cutoffSeq =
ledger->info().seq > app_.config().LEDGER_HISTORY
? ledger->info().seq - app_.config().LEDGER_HISTORY
: 0;
if (cutoffSeq > 0)
{
// Delete old ledgers before cutoff
auto it = ledgers_.begin();
while (it != ledgers_.end() && it->first < cutoffSeq)
{
// Clean up transactions from this ledger
for (const auto& [txHash, _] : it->second.transactions)
{
transactionMap_.erase(txHash);
}
ledgerHashToSeq_.erase(it->second.info.hash);
it = ledgers_.erase(it);
}
// Clean up account transactions before cutoff
for (auto& [_, accountData] : accountTxMap_)
{
auto txIt = accountData.ledgerTxMap.begin();
while (txIt != accountData.ledgerTxMap.end() &&
txIt->first < cutoffSeq)
{
txIt = accountData.ledgerTxMap.erase(txIt);
}
}
app_.getLedgerMaster().clearPriorLedgers(cutoffSeq);
}
}
return true; return true;
} }
@@ -497,9 +463,11 @@ public:
for (const auto& [_, accountData] : accountTxMap_) for (const auto& [_, accountData] : accountTxMap_)
{ {
size += sizeof(AccountID) + sizeof(AccountTxData); size += sizeof(AccountID) + sizeof(AccountTxData);
for (const auto& [_, txVector] : accountData.ledgerTxMap) size += accountData.transactions.size() * sizeof(AccountTx);
for (const auto& [_, innerMap] : accountData.ledgerTxMap)
{ {
size += sizeof(uint32_t) + txVector.size() * sizeof(AccountTx); size += sizeof(uint32_t) +
innerMap.size() * (sizeof(uint32_t) + sizeof(size_t));
} }
} }
return size / 1024; return size / 1024;
@@ -528,9 +496,11 @@ public:
for (const auto& [_, accountData] : accountTxMap_) for (const auto& [_, accountData] : accountTxMap_)
{ {
size += sizeof(AccountID) + sizeof(AccountTxData); size += sizeof(AccountID) + sizeof(AccountTxData);
for (const auto& [_, txVector] : accountData.ledgerTxMap) size += accountData.transactions.size() * sizeof(AccountTx);
for (const auto& [_, innerMap] : accountData.ledgerTxMap)
{ {
size += sizeof(uint32_t) + txVector.size() * sizeof(AccountTx); size += sizeof(uint32_t) +
innerMap.size() * (sizeof(uint32_t) + sizeof(size_t));
} }
} }
return size / 1024; return size / 1024;
@@ -635,13 +605,14 @@ public:
(options.bUnlimited || result.size() < options.limit); (options.bUnlimited || result.size() < options.limit);
++txIt) ++txIt)
{ {
for (const auto& accountTx : txIt->second) for (const auto& [txSeq, txIndex] : txIt->second)
{ {
if (skipped < options.offset) if (skipped < options.offset)
{ {
++skipped; ++skipped;
continue; continue;
} }
AccountTx const accountTx = accountData.transactions[txIndex];
std::uint32_t const inLedger = rangeCheckedCast<std::uint32_t>( std::uint32_t const inLedger = rangeCheckedCast<std::uint32_t>(
accountTx.second->getLgrSeq()); accountTx.second->getLgrSeq());
accountTx.first->setStatus(COMMITTED); accountTx.first->setStatus(COMMITTED);
@@ -686,7 +657,8 @@ public:
++skipped; ++skipped;
continue; continue;
} }
const AccountTx& accountTx = *innerRIt; AccountTx const accountTx =
accountData.transactions[innerRIt->second];
std::uint32_t const inLedger = rangeCheckedCast<std::uint32_t>( std::uint32_t const inLedger = rangeCheckedCast<std::uint32_t>(
accountTx.second->getLgrSeq()); accountTx.second->getLgrSeq());
accountTx.first->setLedger(inLedger); accountTx.first->setLedger(inLedger);
@@ -720,14 +692,14 @@ public:
(options.bUnlimited || result.size() < options.limit); (options.bUnlimited || result.size() < options.limit);
++txIt) ++txIt)
{ {
for (const auto& accountTx : txIt->second) for (const auto& [txSeq, txIndex] : txIt->second)
{ {
if (skipped < options.offset) if (skipped < options.offset)
{ {
++skipped; ++skipped;
continue; continue;
} }
const auto& [txn, txMeta] = accountTx; const auto& [txn, txMeta] = accountData.transactions[txIndex];
result.emplace_back( result.emplace_back(
txn->getSTransaction()->getSerializer().peekData(), txn->getSTransaction()->getSerializer().peekData(),
txMeta->getAsObject().getSerializer().peekData(), txMeta->getAsObject().getSerializer().peekData(),
@@ -771,7 +743,8 @@ public:
++skipped; ++skipped;
continue; continue;
} }
const auto& [txn, txMeta] = *innerRIt; const auto& [txn, txMeta] =
accountData.transactions[innerRIt->second];
result.emplace_back( result.emplace_back(
txn->getSTransaction()->getSerializer().peekData(), txn->getSTransaction()->getSerializer().peekData(),
txMeta->getAsObject().getSerializer().peekData(), txMeta->getAsObject().getSerializer().peekData(),
@@ -843,9 +816,11 @@ public:
for (; txIt != txEnd; ++txIt) for (; txIt != txEnd; ++txIt)
{ {
std::uint32_t const ledgerSeq = txIt->first; std::uint32_t const ledgerSeq = txIt->first;
for (size_t txnSeq = 0; txnSeq < txIt->second.size(); ++txnSeq) for (auto seqIt = txIt->second.begin();
seqIt != txIt->second.end();
++seqIt)
{ {
const auto& accountTx = txIt->second[txnSeq]; const auto& [txnSeq, index] = *seqIt;
if (lookingForMarker) if (lookingForMarker)
{ {
if (findLedger == ledgerSeq && findSeq == txnSeq) if (findLedger == ledgerSeq && findSeq == txnSeq)
@@ -858,15 +833,16 @@ public:
else if (numberOfResults == 0) else if (numberOfResults == 0)
{ {
newmarker = { newmarker = {
rangeCheckedCast<std::uint32_t>(ledgerSeq), rangeCheckedCast<std::uint32_t>(ledgerSeq), txnSeq};
static_cast<std::uint32_t>(txnSeq)};
return {newmarker, total}; return {newmarker, total};
} }
Blob rawTxn = accountTx.first->getSTransaction() Blob rawTxn = accountData.transactions[index]
.first->getSTransaction()
->getSerializer() ->getSerializer()
.peekData(); .peekData();
Blob rawMeta = accountTx.second->getAsObject() Blob rawMeta = accountData.transactions[index]
.second->getAsObject()
.getSerializer() .getSerializer()
.peekData(); .peekData();
@@ -895,10 +871,11 @@ public:
for (; rtxIt != rtxEnd; ++rtxIt) for (; rtxIt != rtxEnd; ++rtxIt)
{ {
std::uint32_t const ledgerSeq = rtxIt->first; std::uint32_t const ledgerSeq = rtxIt->first;
for (int txnSeq = rtxIt->second.size() - 1; txnSeq >= 0; for (auto innerRIt = rtxIt->second.rbegin();
--txnSeq) innerRIt != rtxIt->second.rend();
++innerRIt)
{ {
const auto& accountTx = rtxIt->second[txnSeq]; const auto& [txnSeq, index] = *innerRIt;
if (lookingForMarker) if (lookingForMarker)
{ {
if (findLedger == ledgerSeq && findSeq == txnSeq) if (findLedger == ledgerSeq && findSeq == txnSeq)
@@ -911,15 +888,16 @@ public:
else if (numberOfResults == 0) else if (numberOfResults == 0)
{ {
newmarker = { newmarker = {
rangeCheckedCast<std::uint32_t>(ledgerSeq), rangeCheckedCast<std::uint32_t>(ledgerSeq), txnSeq};
static_cast<std::uint32_t>(txnSeq)};
return {newmarker, total}; return {newmarker, total};
} }
Blob rawTxn = accountTx.first->getSTransaction() Blob rawTxn = accountData.transactions[index]
.first->getSTransaction()
->getSerializer() ->getSerializer()
.peekData(); .peekData();
Blob rawMeta = accountTx.second->getAsObject() Blob rawMeta = accountData.transactions[index]
.second->getAsObject()
.getSerializer() .getSerializer()
.peekData(); .peekData();

View File

@@ -19,6 +19,7 @@
#include <ripple/app/main/Application.h> #include <ripple/app/main/Application.h>
#include <ripple/app/rdb/RelationalDatabase.h> #include <ripple/app/rdb/RelationalDatabase.h>
#include <ripple/app/rdb/backend/FlatmapDatabase.h>
#include <ripple/app/rdb/backend/RWDBDatabase.h> #include <ripple/app/rdb/backend/RWDBDatabase.h>
#include <ripple/core/ConfigSections.h> #include <ripple/core/ConfigSections.h>
#include <ripple/nodestore/DatabaseShard.h> #include <ripple/nodestore/DatabaseShard.h>
@@ -40,6 +41,7 @@ RelationalDatabase::init(
bool use_sqlite = false; bool use_sqlite = false;
bool use_postgres = false; bool use_postgres = false;
bool use_rwdb = false; bool use_rwdb = false;
bool use_flatmap = false;
if (config.reporting()) if (config.reporting())
{ {
@@ -58,6 +60,10 @@ RelationalDatabase::init(
{ {
use_rwdb = true; use_rwdb = true;
} }
else if (boost::iequals(get(rdb_section, "backend"), "flatmap"))
{
use_flatmap = true;
}
else else
{ {
Throw<std::runtime_error>( Throw<std::runtime_error>(
@@ -83,6 +89,10 @@ RelationalDatabase::init(
{ {
return getRWDBDatabase(app, config, jobQueue); return getRWDBDatabase(app, config, jobQueue);
} }
else if (use_flatmap)
{
return getFlatmapDatabase(app, config, jobQueue);
}
return std::unique_ptr<RelationalDatabase>(); return std::unique_ptr<RelationalDatabase>();
} }

View File

@@ -363,7 +363,7 @@ public:
(!section("node_db").empty() && (!section("node_db").empty() &&
(boost::beast::iequals(get(section("node_db"), "type"), "rwdb") || (boost::beast::iequals(get(section("node_db"), "type"), "rwdb") ||
boost::beast::iequals( boost::beast::iequals(
get(section("node_db"), "type"), "memory"))); get(section("node_db"), "type"), "flatmap")));
// RHNOTE: memory type is not selected for here because it breaks // RHNOTE: memory type is not selected for here because it breaks
// tests // tests
return isMem; return isMem;

View File

@@ -0,0 +1,235 @@
#include <ripple/basics/contract.h>
#include <ripple/nodestore/Factory.h>
#include <ripple/nodestore/Manager.h>
#include <ripple/nodestore/impl/DecodedBlob.h>
#include <ripple/nodestore/impl/EncodedBlob.h>
#include <ripple/nodestore/impl/codec.h>
#include <boost/beast/core/string.hpp>
#include <boost/core/ignore_unused.hpp>
#include <boost/unordered/concurrent_flat_map.hpp>
#include <memory>
#include <mutex>
namespace ripple {
namespace NodeStore {
class FlatmapBackend : public Backend
{
private:
std::string name_;
beast::Journal journal_;
bool isOpen_{false};
struct base_uint_hasher
{
using result_type = std::size_t;
result_type
operator()(base_uint<256> const& value) const
{
return hardened_hash<>{}(value);
}
};
using DataStore = boost::unordered::concurrent_flat_map<
uint256,
std::vector<std::uint8_t>, // Store compressed blob data
base_uint_hasher>;
DataStore table_;
public:
FlatmapBackend(
size_t keyBytes,
Section const& keyValues,
beast::Journal journal)
: name_(get(keyValues, "path")), journal_(journal)
{
boost::ignore_unused(journal_);
if (name_.empty())
name_ = "node_db";
}
~FlatmapBackend() override
{
close();
}
std::string
getName() override
{
return name_;
}
void
open(bool createIfMissing) override
{
if (isOpen_)
Throw<std::runtime_error>("already open");
isOpen_ = true;
}
bool
isOpen() override
{
return isOpen_;
}
void
close() override
{
table_.clear();
isOpen_ = false;
}
Status
fetch(void const* key, std::shared_ptr<NodeObject>* pObject) override
{
if (!isOpen_)
return notFound;
uint256 const hash(uint256::fromVoid(key));
bool found = table_.visit(hash, [&](const auto& key_value_pair) {
nudb::detail::buffer bf;
auto const result = nodeobject_decompress(
key_value_pair.second.data(), key_value_pair.second.size(), bf);
DecodedBlob decoded(hash.data(), result.first, result.second);
if (!decoded.wasOk())
{
*pObject = nullptr;
return;
}
*pObject = decoded.createObject();
});
return found ? (*pObject ? ok : dataCorrupt) : notFound;
}
std::pair<std::vector<std::shared_ptr<NodeObject>>, Status>
fetchBatch(std::vector<uint256 const*> const& hashes) override
{
std::vector<std::shared_ptr<NodeObject>> results;
results.reserve(hashes.size());
for (auto const& h : hashes)
{
std::shared_ptr<NodeObject> nObj;
Status status = fetch(h->begin(), &nObj);
if (status != ok)
results.push_back({});
else
results.push_back(nObj);
}
return {results, ok};
}
void
store(std::shared_ptr<NodeObject> const& object) override
{
if (!isOpen_)
return;
if (!object)
return;
EncodedBlob encoded(object);
nudb::detail::buffer bf;
auto const result =
nodeobject_compress(encoded.getData(), encoded.getSize(), bf);
std::vector<std::uint8_t> compressed(
static_cast<const std::uint8_t*>(result.first),
static_cast<const std::uint8_t*>(result.first) + result.second);
table_.insert_or_assign(object->getHash(), std::move(compressed));
}
void
storeBatch(Batch const& batch) override
{
for (auto const& e : batch)
store(e);
}
void
sync() override
{
}
void
for_each(std::function<void(std::shared_ptr<NodeObject>)> f) override
{
if (!isOpen_)
return;
table_.visit_all([&f](const auto& entry) {
nudb::detail::buffer bf;
auto const result = nodeobject_decompress(
entry.second.data(), entry.second.size(), bf);
DecodedBlob decoded(
entry.first.data(), result.first, result.second);
if (decoded.wasOk())
f(decoded.createObject());
});
}
int
getWriteLoad() override
{
return 0;
}
void
setDeletePath() override
{
close();
}
int
fdRequired() const override
{
return 0;
}
private:
size_t
size() const
{
return table_.size();
}
};
class FlatmapFactory : public Factory
{
public:
FlatmapFactory()
{
Manager::instance().insert(*this);
}
~FlatmapFactory() override
{
Manager::instance().erase(*this);
}
std::string
getName() const override
{
return "Flatmap";
}
std::unique_ptr<Backend>
createInstance(
size_t keyBytes,
Section const& keyValues,
std::size_t burstSize,
Scheduler& scheduler,
beast::Journal journal) override
{
return std::make_unique<FlatmapBackend>(keyBytes, keyValues, journal);
}
};
static FlatmapFactory flatmapFactory;
} // namespace NodeStore
} // namespace ripple

View File

@@ -216,10 +216,6 @@ public:
} }
BEAST_EXPECT(store.getLastRotated() == lastRotated); BEAST_EXPECT(store.getLastRotated() == lastRotated);
SQLiteDatabase* const db =
dynamic_cast<SQLiteDatabase*>(&env.app().getRelationalDatabase());
BEAST_EXPECT(*db->getTransactionsMinLedgerSeq() == 3);
for (auto i = 3; i < deleteInterval + lastRotated; ++i) for (auto i = 3; i < deleteInterval + lastRotated; ++i)
{ {
ledgers.emplace( ledgers.emplace(