mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-20 18:45:55 +00:00
revert docker dependency cache
This commit is contained in:
@@ -3,12 +3,8 @@ name: Release - SH Runner
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["dev", "candidate", "release"]
|
branches: ["dev", "candidate", "release"]
|
||||||
workflow_dispatch:
|
pull_request:
|
||||||
inputs:
|
branches: ["dev", "candidate", "release"]
|
||||||
reason:
|
|
||||||
description: 'Release Build'
|
|
||||||
required: false
|
|
||||||
default: 'Manual PR testing'
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
@@ -19,7 +15,6 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
checkout:
|
checkout:
|
||||||
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
|
||||||
runs-on: [self-hosted, vanity]
|
runs-on: [self-hosted, vanity]
|
||||||
outputs:
|
outputs:
|
||||||
checkout_path: ${{ steps.vars.outputs.checkout_path }}
|
checkout_path: ${{ steps.vars.outputs.checkout_path }}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
install (
|
install (
|
||||||
TARGETS
|
TARGETS
|
||||||
ed25519-donna
|
|
||||||
common
|
common
|
||||||
opts
|
opts
|
||||||
ripple_syslibs
|
ripple_syslibs
|
||||||
@@ -16,17 +15,6 @@ install (
|
|||||||
RUNTIME DESTINATION bin
|
RUNTIME DESTINATION bin
|
||||||
INCLUDES DESTINATION include)
|
INCLUDES DESTINATION include)
|
||||||
|
|
||||||
if(${INSTALL_SECP256K1})
|
|
||||||
install (
|
|
||||||
TARGETS
|
|
||||||
secp256k1
|
|
||||||
EXPORT RippleExports
|
|
||||||
LIBRARY DESTINATION lib
|
|
||||||
ARCHIVE DESTINATION lib
|
|
||||||
RUNTIME DESTINATION bin
|
|
||||||
INCLUDES DESTINATION include)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
install (EXPORT RippleExports
|
install (EXPORT RippleExports
|
||||||
FILE RippleTargets.cmake
|
FILE RippleTargets.cmake
|
||||||
NAMESPACE Ripple::
|
NAMESPACE Ripple::
|
||||||
|
|||||||
@@ -36,6 +36,22 @@ if(thread_safety_analysis)
|
|||||||
add_link_options("-stdlib=libc++")
|
add_link_options("-stdlib=libc++")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
option(USE_CONAN "Use Conan package manager for dependencies" OFF)
|
||||||
|
# Then, auto-detect if conan_toolchain.cmake is being used
|
||||||
|
if(CMAKE_TOOLCHAIN_FILE)
|
||||||
|
# Check if the toolchain file path contains "conan_toolchain"
|
||||||
|
if(CMAKE_TOOLCHAIN_FILE MATCHES "conan_toolchain")
|
||||||
|
set(USE_CONAN ON CACHE BOOL "Using Conan detected from toolchain file" FORCE)
|
||||||
|
message(STATUS "Conan toolchain detected: ${CMAKE_TOOLCHAIN_FILE}")
|
||||||
|
message(STATUS "Building with Conan dependencies")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (NOT USE_CONAN)
|
||||||
|
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||||
|
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
||||||
|
endif()
|
||||||
|
|
||||||
include (CheckCXXCompilerFlag)
|
include (CheckCXXCompilerFlag)
|
||||||
include (FetchContent)
|
include (FetchContent)
|
||||||
include (ExternalProject)
|
include (ExternalProject)
|
||||||
@@ -48,6 +64,9 @@ endif ()
|
|||||||
include(RippledSanity)
|
include(RippledSanity)
|
||||||
include(RippledVersion)
|
include(RippledVersion)
|
||||||
include(RippledSettings)
|
include(RippledSettings)
|
||||||
|
if (NOT USE_CONAN)
|
||||||
|
include(RippledNIH)
|
||||||
|
endif()
|
||||||
# this check has to remain in the top-level cmake
|
# this check has to remain in the top-level cmake
|
||||||
# because of the early return statement
|
# because of the early return statement
|
||||||
if (packages_only)
|
if (packages_only)
|
||||||
@@ -60,19 +79,7 @@ include(RippledCompiler)
|
|||||||
include(RippledInterface)
|
include(RippledInterface)
|
||||||
|
|
||||||
###
|
###
|
||||||
option(USE_CONAN "Use Conan package manager for dependencies" OFF)
|
|
||||||
# Then, auto-detect if conan_toolchain.cmake is being used
|
|
||||||
if(CMAKE_TOOLCHAIN_FILE)
|
|
||||||
# Check if the toolchain file path contains "conan_toolchain"
|
|
||||||
if(CMAKE_TOOLCHAIN_FILE MATCHES "conan_toolchain")
|
|
||||||
set(USE_CONAN ON CACHE BOOL "Using Conan detected from toolchain file" FORCE)
|
|
||||||
message(STATUS "Conan toolchain detected: ${CMAKE_TOOLCHAIN_FILE}")
|
|
||||||
message(STATUS "Building with Conan dependencies")
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
if (NOT USE_CONAN)
|
if (NOT USE_CONAN)
|
||||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
|
||||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
|
||||||
add_subdirectory(src/secp256k1)
|
add_subdirectory(src/secp256k1)
|
||||||
add_subdirectory(src/ed25519-donna)
|
add_subdirectory(src/ed25519-donna)
|
||||||
include(deps/Boost)
|
include(deps/Boost)
|
||||||
|
|||||||
@@ -27,10 +27,8 @@ if [[ "$?" -ne "0" ]]; then
|
|||||||
exit 127
|
exit 127
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" Builds/CMake/deps/Rocksdb.cmake &&
|
perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" Builds/CMake/deps/Rocksdb.cmake &&
|
||||||
mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old &&
|
mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old &&
|
||||||
cp Builds/CMake/deps/Rocksdb.cmake Builds/CMake/deps/Rocksdb.cmake.old &&
|
|
||||||
echo "find_package(LLVM REQUIRED CONFIG)
|
echo "find_package(LLVM REQUIRED CONFIG)
|
||||||
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
|
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
|
||||||
message(STATUS \"Using LLVMConfig.cmake in: \${LLVM_DIR}\")
|
message(STATUS \"Using LLVMConfig.cmake in: \${LLVM_DIR}\")
|
||||||
@@ -75,4 +73,4 @@ mv Builds/CMake/deps/Rocksdb.cmake.old Builds/CMake/deps/Rocksdb.cmake;
|
|||||||
mv Builds/CMake/deps/WasmEdge.old Builds/CMake/deps/WasmEdge.cmake;
|
mv Builds/CMake/deps/WasmEdge.old Builds/CMake/deps/WasmEdge.cmake;
|
||||||
|
|
||||||
|
|
||||||
echo "END INSIDE CONTAINER - CORE"
|
echo "END INSIDE CONTAINER - CORE"
|
||||||
114
build-deps.sh → build-full.sh
Executable file → Normal file
114
build-deps.sh → build-full.sh
Executable file → Normal file
@@ -5,12 +5,14 @@
|
|||||||
# debugging.
|
# debugging.
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
echo "START INSIDE CONTAINER - FULL"
|
echo "START INSIDE CONTAINER - FULL"
|
||||||
|
|
||||||
# Set default BUILD_CORES if not defined
|
echo "-- BUILD CORES: $3"
|
||||||
: ${BUILD_CORES:=8}
|
echo "-- GITHUB_REPOSITORY: $1"
|
||||||
|
echo "-- GITHUB_SHA: $2"
|
||||||
echo "-- BUILD CORES: $BUILD_CORES"
|
echo "-- GITHUB_RUN_NUMBER: $4"
|
||||||
|
|
||||||
umask 0000;
|
umask 0000;
|
||||||
|
|
||||||
@@ -23,11 +25,56 @@ yum-config-manager --disable centos-sclo-sclo
|
|||||||
|
|
||||||
####
|
####
|
||||||
|
|
||||||
mkdir -p /io
|
cd /io;
|
||||||
cd /io
|
mkdir -p src/certs;
|
||||||
|
curl --silent -k https://raw.githubusercontent.com/RichardAH/rippled-release-builder/main/ca-bundle/certbundle.h -o src/certs/certbundle.h;
|
||||||
|
if [ "`grep certbundle.h src/ripple/net/impl/RegisterSSLCerts.cpp | wc -l`" -eq "0" ]
|
||||||
|
then
|
||||||
|
cp src/ripple/net/impl/RegisterSSLCerts.cpp src/ripple/net/impl/RegisterSSLCerts.cpp.old
|
||||||
|
perl -i -pe "s/^{/{
|
||||||
|
#ifdef EMBEDDED_CA_BUNDLE
|
||||||
|
BIO *cbio = BIO_new_mem_buf(ca_bundle.data(), ca_bundle.size());
|
||||||
|
X509_STORE *cts = SSL_CTX_get_cert_store(ctx.native_handle());
|
||||||
|
if(!cts || !cbio)
|
||||||
|
JLOG(j.warn())
|
||||||
|
<< \"Failed to create cts\/cbio when loading embedded certs.\";
|
||||||
|
else
|
||||||
|
{
|
||||||
|
X509_INFO *itmp;
|
||||||
|
int i, count = 0, type = X509_FILETYPE_PEM;
|
||||||
|
STACK_OF(X509_INFO) *inf = PEM_X509_INFO_read_bio(cbio, NULL, NULL, NULL);
|
||||||
|
|
||||||
mkdir -p .nih_c
|
if (!inf)
|
||||||
mkdir -p .nih_toolchain
|
{
|
||||||
|
BIO_free(cbio);
|
||||||
|
JLOG(j.warn())
|
||||||
|
<< \"Failed to read cbio when loading embedded certs.\";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for (i = 0; i < sk_X509_INFO_num(inf); i++)
|
||||||
|
{
|
||||||
|
itmp = sk_X509_INFO_value(inf, i);
|
||||||
|
if (itmp->x509)
|
||||||
|
{
|
||||||
|
X509_STORE_add_cert(cts, itmp->x509);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
if (itmp->crl)
|
||||||
|
{
|
||||||
|
X509_STORE_add_crl(cts, itmp->crl);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sk_X509_INFO_pop_free(inf, X509_INFO_free);
|
||||||
|
BIO_free(cbio);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif/g" src/ripple/net/impl/RegisterSSLCerts.cpp &&
|
||||||
|
sed -i "s/#include <ripple\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/ripple/net/impl/RegisterSSLCerts.cpp
|
||||||
|
fi
|
||||||
|
mkdir -p .nih_c;
|
||||||
|
mkdir -p .nih_toolchain;
|
||||||
cd .nih_toolchain &&
|
cd .nih_toolchain &&
|
||||||
yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
|
yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
|
||||||
devtoolset-7-gcc-c++ \
|
devtoolset-7-gcc-c++ \
|
||||||
@@ -44,7 +91,7 @@ ZSTD_VERSION="1.1.3" &&
|
|||||||
( wget -nc -q -O zstd-${ZSTD_VERSION}.tar.gz https://github.com/facebook/zstd/archive/v${ZSTD_VERSION}.tar.gz; echo "" ) &&
|
( wget -nc -q -O zstd-${ZSTD_VERSION}.tar.gz https://github.com/facebook/zstd/archive/v${ZSTD_VERSION}.tar.gz; echo "" ) &&
|
||||||
tar xzvf zstd-${ZSTD_VERSION}.tar.gz &&
|
tar xzvf zstd-${ZSTD_VERSION}.tar.gz &&
|
||||||
cd zstd-${ZSTD_VERSION} &&
|
cd zstd-${ZSTD_VERSION} &&
|
||||||
make -j$BUILD_CORES install &&
|
make -j$3 install &&
|
||||||
cd .. &&
|
cd .. &&
|
||||||
echo "-- Install Cmake 3.23.1 --" &&
|
echo "-- Install Cmake 3.23.1 --" &&
|
||||||
pwd &&
|
pwd &&
|
||||||
@@ -54,23 +101,20 @@ echo "-- Install Boost 1.86.0 --" &&
|
|||||||
pwd &&
|
pwd &&
|
||||||
( wget -nc -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz; echo "" ) &&
|
( wget -nc -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz; echo "" ) &&
|
||||||
tar -xzf boost_1_86_0.tar.gz &&
|
tar -xzf boost_1_86_0.tar.gz &&
|
||||||
cd boost_1_86_0 && ./bootstrap.sh && ./b2 link=static -j$BUILD_CORES && ./b2 install &&
|
cd boost_1_86_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
|
||||||
cd ../ &&
|
cd ../ &&
|
||||||
# Copy Boost to the expected location
|
|
||||||
mkdir -p /usr/local/src/ &&
|
|
||||||
cp -r boost_1_86_0 /usr/local/src/ &&
|
|
||||||
echo "-- Install Protobuf 3.20.0 --" &&
|
echo "-- Install Protobuf 3.20.0 --" &&
|
||||||
pwd &&
|
pwd &&
|
||||||
( wget -nc -q https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-all-3.20.0.tar.gz; echo "" ) &&
|
( wget -nc -q https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-all-3.20.0.tar.gz; echo "" ) &&
|
||||||
tar -xzf protobuf-all-3.20.0.tar.gz &&
|
tar -xzf protobuf-all-3.20.0.tar.gz &&
|
||||||
cd protobuf-3.20.0/ &&
|
cd protobuf-3.20.0/ &&
|
||||||
./autogen.sh && ./configure --prefix=/usr --disable-shared link=static && make -j$BUILD_CORES && make install &&
|
./autogen.sh && ./configure --prefix=/usr --disable-shared link=static && make -j$3 && make install &&
|
||||||
cd .. &&
|
cd .. &&
|
||||||
echo "-- Build LLD --" &&
|
echo "-- Build LLD --" &&
|
||||||
pwd &&
|
pwd &&
|
||||||
ln -sf /usr/bin/llvm-config-13 /usr/bin/llvm-config &&
|
ln /usr/bin/llvm-config-13 /usr/bin/llvm-config &&
|
||||||
mv /opt/rh/devtoolset-9/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar-9 &&
|
mv /opt/rh/devtoolset-9/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar-9 &&
|
||||||
ln -sf /opt/rh/devtoolset-10/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar &&
|
ln /opt/rh/devtoolset-10/root/usr/bin/ar /opt/rh/devtoolset-9/root/usr/bin/ar &&
|
||||||
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/lld-13.0.1.src.tar.xz; echo "" ) &&
|
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/lld-13.0.1.src.tar.xz; echo "" ) &&
|
||||||
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/libunwind-13.0.1.src.tar.xz; echo "" ) &&
|
( wget -nc -q https://github.com/llvm/llvm-project/releases/download/llvmorg-13.0.1/libunwind-13.0.1.src.tar.xz; echo "" ) &&
|
||||||
tar -xf lld-13.0.1.src.tar.xz &&
|
tar -xf lld-13.0.1.src.tar.xz &&
|
||||||
@@ -81,12 +125,12 @@ rm -rf build CMakeCache.txt &&
|
|||||||
mkdir -p build &&
|
mkdir -p build &&
|
||||||
cd build &&
|
cd build &&
|
||||||
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
|
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
|
||||||
make -j$BUILD_CORES install &&
|
make -j$3 install &&
|
||||||
ln -s /usr/lib64/llvm13/lib/include/lld /usr/include/lld &&
|
ln -s /usr/lib64/llvm13/lib/include/lld /usr/include/lld &&
|
||||||
cp /usr/lib64/llvm13/lib/liblld*.a /usr/local/lib/ &&
|
cp /usr/lib64/llvm13/lib/liblld*.a /usr/local/lib/ &&
|
||||||
cd ../../ &&
|
cd ../../ &&
|
||||||
echo "-- Build WasmEdge --" &&
|
echo "-- Build WasmEdge --" &&
|
||||||
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; echo ""; unzip -o 0.11.2.zip; ) &&
|
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) &&
|
||||||
cd WasmEdge-0.11.2 &&
|
cd WasmEdge-0.11.2 &&
|
||||||
( mkdir -p build; echo "" ) &&
|
( mkdir -p build; echo "" ) &&
|
||||||
cd build &&
|
cd build &&
|
||||||
@@ -105,35 +149,33 @@ cmake .. \
|
|||||||
-DWASMEDGE_BUILD_PLUGINS=OFF \
|
-DWASMEDGE_BUILD_PLUGINS=OFF \
|
||||||
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
|
-DWASMEDGE_LINK_TOOLS_STATIC=ON \
|
||||||
-DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ &&
|
-DBoost_NO_BOOST_CMAKE=ON -DLLVM_DIR=/usr/lib64/llvm13/lib/cmake/llvm/ -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ &&
|
||||||
make -j$BUILD_CORES install &&
|
make -j$3 install &&
|
||||||
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-10/g"` &&
|
export PATH=`echo $PATH | sed -E "s/devtoolset-9/devtoolset-10/g"` &&
|
||||||
cp -r include/api/wasmedge /usr/include/ &&
|
cp -r include/api/wasmedge /usr/include/ &&
|
||||||
cd /io/ &&
|
cd /io/ &&
|
||||||
echo "-- Build Rippled --" &&
|
echo "-- Build Rippled --" &&
|
||||||
|
pwd &&
|
||||||
|
cp Builds/CMake/deps/Rocksdb.cmake Builds/CMake/deps/Rocksdb.cmake.old &&
|
||||||
|
|
||||||
echo "MOVING TO [ build-core.sh ]"
|
echo "MOVING TO [ build-core.sh ]"
|
||||||
|
cd /io;
|
||||||
|
|
||||||
cd /io
|
printenv > .env.temp;
|
||||||
|
cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env;
|
||||||
# Save current environment to .env file
|
rm .env.temp;
|
||||||
printenv > .env.temp
|
|
||||||
cat .env.temp | grep '=' | sed s/\\\(^[^=]\\+=\\\)/\\1\\\"/g|sed s/\$/\\\"/g > .env
|
|
||||||
rm .env.temp
|
|
||||||
|
|
||||||
echo "Persisting ENV:"
|
echo "Persisting ENV:"
|
||||||
cat .env
|
cat .env
|
||||||
|
|
||||||
# Create a deps summary
|
./build-core.sh "$1" "$2" "$3" "$4"
|
||||||
mkdir -p /usr/local
|
|
||||||
DEPS_SUMMARY="/usr/local/deps-summary.txt"
|
|
||||||
echo "Dependencies built at: $(date)" > $DEPS_SUMMARY
|
|
||||||
echo "Boost dir: $(ls -la /usr/local/src/boost_1_86_0 2>/dev/null || echo 'NOT FOUND')" >> $DEPS_SUMMARY
|
|
||||||
echo "ZStd version: $(zstd --version | head -n 1 || echo 'NOT FOUND')" >> $DEPS_SUMMARY
|
|
||||||
echo "Protobuf version: $(protoc --version || echo 'NOT FOUND')" >> $DEPS_SUMMARY
|
|
||||||
echo "CMAKE version: $(/hbb/cmake-3.23.1-linux-x86_64/bin/cmake --version | head -n 1 || echo 'NOT FOUND')" >> $DEPS_SUMMARY
|
|
||||||
|
|
||||||
echo "-------- DEPENDENCY SUMMARY --------"
|
echo $?
|
||||||
cat $DEPS_SUMMARY
|
if [[ "$?" -ne "0" ]]; then
|
||||||
echo "------------------------------------"
|
echo "ERR build-core.sh non 0 exit code"
|
||||||
|
exit 127
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "END [ build-core.sh ]"
|
||||||
|
|
||||||
echo "END INSIDE CONTAINER - FULL"
|
echo "END INSIDE CONTAINER - FULL"
|
||||||
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
|
|
||||||
# docker build -t xahaud-hbb:latest build.hbb.dockerfile .
|
|
||||||
FROM ghcr.io/foobarwidget/holy-build-box-x64
|
|
||||||
|
|
||||||
# Set environment variables
|
|
||||||
ENV BUILD_CORES=8
|
|
||||||
|
|
||||||
# Create /io directory for mounting the working directory
|
|
||||||
RUN mkdir -p /io
|
|
||||||
|
|
||||||
# Copy dependency build script
|
|
||||||
COPY build-deps.sh /hbb/build-deps.sh
|
|
||||||
RUN chmod +x /hbb/build-deps.sh
|
|
||||||
|
|
||||||
# Run the dependency build script with proper error handling
|
|
||||||
RUN /hbb_exe/activate-exec bash -ex /hbb/build-deps.sh || (echo "ERROR: Dependency build failed" && exit 1)
|
|
||||||
|
|
||||||
# Set the entrypoint to activate the HBB environment and load our env settings
|
|
||||||
ENTRYPOINT ["/hbb_exe/activate-exec"]
|
|
||||||
CMD ["bash"]
|
|
||||||
@@ -11,24 +11,32 @@ echo "Cleaning previously built binary"
|
|||||||
rm -f release-build/xahaud
|
rm -f release-build/xahaud
|
||||||
|
|
||||||
BUILD_CORES=$(echo "scale=0 ; `nproc` / 1.337" | bc)
|
BUILD_CORES=$(echo "scale=0 ; `nproc` / 1.337" | bc)
|
||||||
GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-""}
|
|
||||||
GITHUB_SHA=${GITHUB_SHA:-"local"}
|
|
||||||
GITHUB_RUN_NUMBER=${GITHUB_RUN_NUMBER:-"0"}
|
|
||||||
GITHUB_WORKFLOW=${GITHUB_WORKFLOW:-"local"}
|
|
||||||
GITHUB_REF=${GITHUB_REF:-"local"}
|
|
||||||
|
|
||||||
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||||
#Default
|
#Default
|
||||||
BUILD_CORES=8
|
BUILD_CORES=8
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
EXIT_IF_CONTAINER_RUNNING=${EXIT_IF_CONTAINER_RUNNING:-1}
|
||||||
# Ensure still works outside of GH Actions by setting these to /dev/null
|
# Ensure still works outside of GH Actions by setting these to /dev/null
|
||||||
# GA will run this script and then delete it at the end of the job
|
# GA will run this script and then delete it at the end of the job
|
||||||
JOB_CLEANUP_SCRIPT=${JOB_CLEANUP_SCRIPT:-/dev/null}
|
JOB_CLEANUP_SCRIPT=${JOB_CLEANUP_SCRIPT:-/dev/null}
|
||||||
NORMALIZED_WORKFLOW=$(echo "$GITHUB_WORKFLOW" | tr -c 'a-zA-Z0-9' '-')
|
NORMALIZED_WORKFLOW=$(echo "$GITHUB_WORKFLOW" | tr -c 'a-zA-Z0-9' '-')
|
||||||
NORMALIZED_REF=$(echo "$GITHUB_REF" | tr -c 'a-zA-Z0-9' '-')
|
NORMALIZED_REF=$(echo "$GITHUB_REF" | tr -c 'a-zA-Z0-9' '-')
|
||||||
CONTAINER_NAME="xahaud_cached_builder_${NORMALIZED_WORKFLOW}-${NORMALIZED_REF}"
|
CONTAINER_NAME="xahaud_cached_builder_${NORMALIZED_WORKFLOW}-${NORMALIZED_REF}"
|
||||||
DEPENDENCY_IMAGE="xahaud-hbb:latest"
|
|
||||||
|
# Check if the container is already running
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo "⚠️ A running container (${CONTAINER_NAME}) was detected."
|
||||||
|
|
||||||
|
if [[ "$EXIT_IF_CONTAINER_RUNNING" -eq 1 ]]; then
|
||||||
|
echo "❌ EXIT_IF_CONTAINER_RUNNING is set. Exiting."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "🛑 Stopping the running container: ${CONTAINER_NAME}"
|
||||||
|
docker stop "${CONTAINER_NAME}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
echo "-- BUILD CORES: $BUILD_CORES"
|
echo "-- BUILD CORES: $BUILD_CORES"
|
||||||
echo "-- GITHUB_REPOSITORY: $GITHUB_REPOSITORY"
|
echo "-- GITHUB_REPOSITORY: $GITHUB_REPOSITORY"
|
||||||
@@ -50,51 +58,31 @@ then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if dependency image exists
|
|
||||||
if ! docker image inspect "$DEPENDENCY_IMAGE" &> /dev/null; then
|
|
||||||
echo "Dependency image doesn't exist. Building it now..."
|
|
||||||
|
|
||||||
# Create a temporary directory for dependency build files
|
|
||||||
TMP_DIR=$(mktemp -d)
|
|
||||||
|
|
||||||
# Copy the dependency build files
|
|
||||||
cp ./build-deps.sh "$TMP_DIR/"
|
|
||||||
cp ./build.hbb.dockerfile "$TMP_DIR/Dockerfile"
|
|
||||||
|
|
||||||
# Build the dependency image
|
|
||||||
docker build -t "$DEPENDENCY_IMAGE" "$TMP_DIR"
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
rm -rf "$TMP_DIR"
|
|
||||||
|
|
||||||
echo "Dependency image built successfully."
|
|
||||||
fi
|
|
||||||
|
|
||||||
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
|
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
|
||||||
|
|
||||||
# if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
#if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||||
if false; then
|
if false; then
|
||||||
echo "Static container, execute in static container to have max. cache"
|
echo "Static container, execute in static container to have max. cache"
|
||||||
docker start $CONTAINER_NAME
|
docker start $CONTAINER_NAME
|
||||||
docker exec -i $CONTAINER_NAME bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||||
docker stop $CONTAINER_NAME
|
docker stop $CONTAINER_NAME
|
||||||
else
|
else
|
||||||
echo "No static container, build using dependency image"
|
echo "No static container, build on temp container"
|
||||||
rm -rf release-build;
|
rm -rf release-build;
|
||||||
mkdir -p release-build;
|
mkdir -p release-build;
|
||||||
|
|
||||||
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||||
# Non GH, local building
|
# Non GH, local building
|
||||||
echo "Non-GH runner, local building, temp container"
|
echo "Non-GH runner, local building, temp container"
|
||||||
docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io --network host "$DEPENDENCY_IMAGE" bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
docker run -i --user 0:$(id -g) --rm -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||||
else
|
else
|
||||||
# GH Action, runner
|
# GH Action, runner
|
||||||
echo "GH Action, runner, clean & re-create create persistent container"
|
echo "GH Action, runner, clean & re-create create persistent container"
|
||||||
docker rm -f $CONTAINER_NAME
|
docker rm -f $CONTAINER_NAME
|
||||||
echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT"
|
echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT"
|
||||||
echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT"
|
echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT"
|
||||||
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io --network host "$DEPENDENCY_IMAGE" bash
|
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash
|
||||||
docker exec -i $CONTAINER_NAME bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||||
docker stop $CONTAINER_NAME
|
docker stop $CONTAINER_NAME
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
Reference in New Issue
Block a user