Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Cache files downloaded from cache #10950

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,12 @@ terraform.tfstate*
.secret
.bb_tmp


# tmux
tmux-client-*.log
.supermavenignore

# parallel
joblog.txt
joblog.txt

# bootstrap cache
*.cache
7 changes: 4 additions & 3 deletions Earthfile
Original file line number Diff line number Diff line change
Expand Up @@ -264,12 +264,13 @@ rollup-verifier-contract-with-cache:
FROM +bootstrap
ENV CI=1
ENV USE_CACHE=1
LET artifact=rollup-verifier-contract-$(./noir-projects/bootstrap.sh hash).tar.gz
LET artifact_hash=$(./noir-projects/bootstrap.sh hash)
# Running this directly in the 'if' means files are not permanent
RUN ci3/cache_download rollup-verifier-contract-3e3a78f9a68f1f1e04240acf0728522d87a313ac-linux-gnu-x86_64 || true
# TODO(palla/cache): Shouldn't the hash below be the artifact_hash?
RUN ci3/cache_download rollup-verifier-contract 3e3a78f9a68f1f1e04240acf0728522d87a313ac-linux-gnu-x86_64 || true
IF ! [ -d /usr/src/bb ]
COPY --dir +rollup-verifier-contract/usr/src/bb /usr/src
RUN ci3/cache_upload $artifact bb
RUN ci3/cache_upload rollup-verifier-contract artifact_hash bb
END
SAVE ARTIFACT /usr/src/bb /usr/src/bb

Expand Down
5 changes: 2 additions & 3 deletions avm-transpiler/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,9 @@ hash=$(cache_content_hash ../noir/.rebuild_patterns .rebuild_patterns)

function build {
github_group "avm-transpiler build"
artifact=avm-transpiler-$hash.tar.gz
if ! cache_download $artifact; then
if ! cache_download avm-transpiler $hash; then
denoise ./scripts/bootstrap_native.sh
cache_upload $artifact target/release
cache_upload avm-transpiler $hash target/release
fi
github_endgroup
}
Expand Down
16 changes: 8 additions & 8 deletions barretenberg/cpp/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,43 +23,43 @@ hash=$(cache_content_hash .rebuild_patterns)

function build_native {
set -eu
if ! cache_download barretenberg-release-$hash.tar.gz; then
if ! cache_download barretenberg-release $hash; then
rm -f build/CMakeCache.txt
echo "Building with preset: $preset"
cmake --preset $preset -Bbuild
cmake --build build --target bb
cache_upload barretenberg-release-$hash.tar.gz build/bin
cache_upload barretenberg-release $hash build/bin
fi

(cd src/barretenberg/world_state_napi && yarn --frozen-lockfile --prefer-offline)
if ! cache_download barretenberg-release-world-state-$hash.tar.gz; then
if ! cache_download barretenberg-release-world-state $hash; then
rm -f build-pic/CMakeCache.txt
cmake --preset $pic_preset -DCMAKE_BUILD_TYPE=RelWithAssert
cmake --build --preset $pic_preset --target world_state_napi
cache_upload barretenberg-release-world-state-$hash.tar.gz build-pic/lib/world_state_napi.node
cache_upload barretenberg-release-world-state $hash build-pic/lib/world_state_napi.node
fi
}

function build_wasm {
set -eu
if ! cache_download barretenberg-wasm-$hash.tar.gz; then
if ! cache_download barretenberg-wasm $hash; then
rm -f build-wasm/CMakeCache.txt
cmake --preset wasm
cmake --build --preset wasm
/opt/wasi-sdk/bin/llvm-strip ./build-wasm/bin/barretenberg.wasm
cache_upload barretenberg-wasm-$hash.tar.gz build-wasm/bin
cache_upload barretenberg-wasm $hash build-wasm/bin
fi
(cd ./build-wasm/bin && gzip barretenberg.wasm -c > barretenberg.wasm.gz)
}

function build_wasm_threads {
set -eu
if ! cache_download barretenberg-wasm-threads-$hash.tar.gz; then
if ! cache_download barretenberg-wasm-threads $hash; then
rm -f build-wasm-threads/CMakeCache.txt
cmake --preset wasm-threads
cmake --build --preset wasm-threads
/opt/wasi-sdk/bin/llvm-strip ./build-wasm-threads/bin/barretenberg.wasm
cache_upload barretenberg-wasm-threads-$hash.tar.gz build-wasm-threads/bin
cache_upload barretenberg-wasm-threads $hash build-wasm-threads/bin
fi
(cd ./build-wasm-threads/bin && gzip barretenberg.wasm -c > barretenberg.wasm.gz)
}
Expand Down
4 changes: 2 additions & 2 deletions barretenberg/ts/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ hash=$(cache_content_hash ../cpp/.rebuild_patterns .rebuild_patterns)

function build {
github_group "bb.js build"
if ! cache_download bb.js-$hash.tar.gz; then
if ! cache_download bb.js $hash; then
denoise yarn install
find . -exec touch -d "@0" {} + 2>/dev/null || true

denoise yarn build
cache_upload bb.js-$hash.tar.gz dest
cache_upload bb.js $hash dest
else
denoise yarn install
fi
Expand Down
12 changes: 7 additions & 5 deletions build-system/s3-cache-scripts/earthly-s3-cache.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,34 +8,36 @@
set -eu

# definitions
FILE="$prefix-$(cat .content-hash).tar.gz"
KEY="$prefix"
HASH="$(cat .content-hash)"

function s3_download() {
if [ "${S3_BUILD_CACHE_DOWNLOAD:-true}" = "false" ] || [ "${AWS_ACCESS_KEY_ID}" == "" ] ; then
return 1 # require a rebuild
fi
/usr/src/build-system/s3-cache-scripts/cache_download "$FILE"
/usr/src/build-system/s3-cache-scripts/cache_download "$KEY" "$HASH"
}
function s3_upload() {
if [ "${S3_BUILD_CACHE_UPLOAD:-true}" = "false" ] || [ "${AWS_ACCESS_KEY_ID}" == "" ] ; then
return 0 # exit silently
fi
/usr/src/build-system/s3-cache-scripts/cache_upload "$FILE" $build_artifacts || echo "WARNING: S3 upload failed!" >&2
/usr/src/build-system/s3-cache-scripts/cache_upload "$KEY" "$HASH" $build_artifacts || echo "WARNING: S3 upload failed!" >&2
}
function minio_download() {
if [ -z "$S3_BUILD_CACHE_MINIO_URL" ] ; then
return 1 # require rebuild
fi
# minio is S3-compatible
S3_BUILD_CACHE_AWS_PARAMS="--endpoint-url $S3_BUILD_CACHE_MINIO_URL" AWS_SECRET_ACCESS_KEY=minioadmin AWS_ACCESS_KEY_ID=minioadmin \
/usr/src/build-system/s3-cache-scripts/cache_download "$FILE"
/usr/src/build-system/s3-cache-scripts/cache_download "$KEY" "$HASH"
}
function minio_upload() {
if [ -z "$S3_BUILD_CACHE_MINIO_URL" ] ; then
return 0 # exit silently
fi
# minio is S3-compatible
S3_BUILD_CACHE_AWS_PARAMS="--endpoint-url $S3_BUILD_CACHE_MINIO_URL" AWS_SECRET_ACCESS_KEY=minioadmin AWS_ACCESS_KEY_ID=minioadmin \
/usr/src/build-system/s3-cache-scripts/cache_upload "$FILE" $build_artifacts || echo "WARNING Minio upload failed!" >&2
/usr/src/build-system/s3-cache-scripts/cache_upload "$KEY" "$HASH" $build_artifacts || echo "WARNING Minio upload failed!" >&2
}

# commands
Expand Down
20 changes: 15 additions & 5 deletions ci3/cache_download
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
set -eu -o pipefail
[ "${BUILD_SYSTEM_DEBUG:-}" = 1 ] && set -x

if [ "$#" -lt 1 ]; then
echo "Usage: $0 <tar.gz_file_to_download_and_extract>" >&2
if [ "$#" -lt 2 ]; then
echo "Usage: $0 <key> <hash>" >&2
exit 1
fi

Expand All @@ -12,18 +12,28 @@ if [ "${USE_CACHE:-0}" -lt 1 ]; then
echo "Not using cache for $1 because USE_CACHE=0."
exit 1
fi
# Get the tar.gz file name from the argument
TAR_FILE="$1"
OUT_DIR="${2:-.}"

KEY="$1"
HASH="$2"
OUT_DIR="${3:-.}"

TAR_FILE="$KEY-$HASH.tar.gz"
CACHE_FILE="$OUT_DIR/$KEY.cache"

mkdir -p "$OUT_DIR"
# Extract endpoint URL if S3_BUILD_CACHE_AWS_PARAMS is set
if [[ -n "${S3_BUILD_CACHE_AWS_PARAMS:-}" ]]; then
aws $S3_BUILD_CACHE_AWS_PARAMS s3 cp "s3://aztec-ci-artifacts/build-cache/$TAR_FILE" "-" | tar -xzf - -C "$OUT_DIR" 2>/dev/null
else
# Do not go to S3 if we have already downloaded this same file
if [[ -z "${SKIP_LOCAL_CACHE_FILE:-}" && -f "$CACHE_FILE" && "$(cat "$CACHE_FILE")" == $HASH ]]; then
echo "File $TAR_FILE is already downloaded according to $CACHE_FILE" >&2 && exit 0
fi
# Default to AWS S3 URL if no custom endpoint is set
S3_ENDPOINT="http://aztec-ci-artifacts.s3.amazonaws.com"
# Attempt to download and extract the cache file
(curl -s -f "$S3_ENDPOINT/build-cache/$TAR_FILE" | tar -xzf - -C "$OUT_DIR" 2>/dev/null) || (echo "Cache download of $TAR_FILE failed." >&2 && exit 1)
# Record locally that we have downloaded this file
echo $HASH > $CACHE_FILE
fi
echo "Cache download and extraction of $TAR_FILE complete." >&2
20 changes: 15 additions & 5 deletions ci3/cache_upload
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,32 @@
set -eu

if [ "$#" -lt 2 ]; then
echo "Usage: $0 <my-artifact.tar.gz> <binary_paths_to_tar_gz_and_upload...>" >&2
echo "Usage: $0 <key> <hash> <binary_paths_to_tar_gz_and_upload...>" >&2
exit 1
fi

# Name, intended to have .tar.gz ending
name="$1"
# Key and hash, intended to form the full tar file name
key="$1"
hash="$2"
name="$key-$hash.tar.gz"

# Now $@ = our binary path args
shift 1
shift 2

if [ -z ${S3_FORCE_UPLOAD:-} ] && aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 ls "s3://aztec-ci-artifacts/build-cache/$name" >/dev/null 2>&1; then
echo "Skipping upload, already exists: $name" >&2
exit 0
fi
# Pipe tar directly to AWS S3 cp
if tar -czf - "$@" | aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 cp - "s3://aztec-ci-artifacts/build-cache/$name" >&2 ; then
echo "Cache upload of $name complete." >&2
# Record locally that we have this file version, so we don't re-download
if [ -n "${SKIP_LOCAL_CACHE_FILE:-}" ]; then
echo "Cache upload of $name complete" >&2
else
echo "$hash" > "$key.cache"
echo "Cache upload of $name complete and registered locally in $key.cache" >&2
fi
exit 0
else
echo "Cache upload of $name failed." >&2
exit 0
Expand Down
5 changes: 2 additions & 3 deletions l1-contracts/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@ export hash=$(cache_content_hash .rebuild_patterns)

function build {
github_group "l1-contracts build"
local artifact=l1-contracts-$hash.tar.gz
if ! cache_download $artifact; then
if ! cache_download l1-contracts $hash; then
# Clean
rm -rf broadcast cache out serve

Expand All @@ -21,7 +20,7 @@ function build {
# Compile contracts
forge build

cache_upload $artifact out
cache_upload l1-contracts $hash out
fi
github_endgroup
}
Expand Down
8 changes: 4 additions & 4 deletions noir-projects/noir-contracts/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,11 @@ function process_function() {
# Build hash, check if in cache.
# If it's in the cache it's extracted to $tmp_dir/$hash
hash=$((echo "$BB_HASH"; echo "$bytecode_b64") | sha256sum | tr -d ' -')
if ! cache_download vk-$hash.tar.gz &> /dev/null; then
if ! cache_download vk $hash &> /dev/null; then
# It's not in the cache. Generate the vk file and upload it to the cache.
echo_stderr "Generating vk for function: $name..."
echo "$bytecode_b64" | base64 -d | gunzip | $BB write_vk_for_ivc -b - -o $tmp_dir/$hash 2>/dev/null
cache_upload vk-$hash.tar.gz $tmp_dir/$hash &> /dev/null
cache_upload vk $hash $tmp_dir/$hash &> /dev/null
fi

# Return (echo) json containing the base64 encoded verification key.
Expand Down Expand Up @@ -104,10 +104,10 @@ function compile {
"^noir-projects/noir-contracts/contracts/$contract/" \
"^noir-projects/aztec-nr/" \
)"
if ! cache_download contract-$contract_hash.tar.gz &> /dev/null; then
if ! cache_download contract $contract_hash &> /dev/null; then
$NARGO compile --package $contract --silence-warnings --inliner-aggressiveness 0
$TRANSPILER $json_path $json_path
cache_upload contract-$contract_hash.tar.gz $json_path &> /dev/null
cache_upload contract $contract_hash $json_path &> /dev/null
fi

# Pipe each contract function, one per line (jq -c), into parallel calls of process_function.
Expand Down
Loading
Loading