🐧🧹 HealthChecks 🖳🗑️ #8198
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: 🐧🧹 HealthChecks 🖳🗑️ | |
#MAX_RUNTIME: 02 Minutes */10 * * * * | |
on: | |
#push: | |
workflow_dispatch: | |
schedule: | |
# - cron: "45 02 * * *" # 02:45 AM UTC --> 08:30 AM Morning NPT | |
- cron: "*/120 * * * *" # Every 120 Mins | |
env: | |
RCLONE_CF_R2_PUB: "${{ secrets.RCLONE_CF_R2_PUB }}" | |
#------------------------------------------------------------------------------------# | |
jobs: | |
check: | |
runs-on: ubuntu-latest | |
permissions: | |
contents: write | |
steps: | |
- name: Debloat | |
run: | | |
##presets | |
set +x ; set +e | |
#-------------# | |
#bash <(curl -qfsSL "https://pub.ajam.dev/repos/Azathothas/Arsenal/misc/Github/Runners/Ubuntu/debloat.sh") | |
bash <(curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Arsenal/main/misc/Github/Runners/Ubuntu/debloat.sh") | |
continue-on-error: true | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
path: main | |
filter: "blob:none" #https://github.blog/2020-12-21-get-up-to-speed-with-partial-clone-and-shallow-clone/ | |
- name: Setup Env | |
run: | | |
##presets | |
set +x ; set +e | |
#-------------# | |
##CoreUtils | |
sudo apt update -y | |
sudo apt install bc coreutils curl dos2unix fdupes jq moreutils wget -y | |
sudo apt-get install apt-transport-https apt-utils ca-certificates coreutils dos2unix gnupg2 jq moreutils p7zip-full rename rsync software-properties-common texinfo tmux util-linux wget -y 2>/dev/null ; sudo apt-get update -y 2>/dev/null | |
#binutils | |
sudo apt install binutils-aarch64-linux-gnu -y | |
#libguestfs-tools | |
sudo apt install libguestfs-tools -y | |
#qemu | |
sudo apt install qemu qemu-user-static qemu-utils -y | |
#temp | |
SYSTMP="$(dirname $(mktemp -u))" && export SYSTMP="$SYSTMP" | |
echo "SYSTMP=$SYSTMP" >> "$GITHUB_ENV" | |
#In case of removed/privated GH repos | |
# https://git-scm.com/docs/git#Documentation/git.txt-codeGITTERMINALPROMPTcode | |
export GIT_TERMINAL_PROMPT="0" ; echo "GIT_TERMINAL_PROMPT=$GIT_TERMINAL_PROMPT" >> "$GITHUB_ENV" | |
#https://git-scm.com/docs/git#Documentation/git.txt-codeGITASKPASScode | |
export GIT_ASKPASS="/bin/echo" ; echo "GIT_ASKPASS=$GIT_ASKPASS" >> "$GITHUB_ENV" | |
##Setup rClone | |
mkdir -p "$HOME/.config/rclone" | |
echo "${{ secrets.RCLONE_CF_R2_PUB }}" > "$HOME/.config/rclone/rclone.conf" | |
export RCLONE_STATS="120s" ; echo "RCLONE_STATS=$RCLONE_STATS" >> "$GITHUB_ENV" | |
##User-Agent | |
USER_AGENT="$(curl -qfsSL 'https://pub.ajam.dev/repos/Azathothas/Wordlists/Misc/User-Agents/ua_chrome_macos_latest.txt')" && export USER_AGENT="$USER_AGENT" | |
echo "USER_AGENT=$USER_AGENT" >> "$GITHUB_ENV" | |
continue-on-error: true | |
- name: Install Addons | |
run: | | |
##presets | |
set +x ; set +e | |
#-------------# | |
#bash <(curl -qfsSL "https://pub.ajam.dev/repos/Azathothas/Arsenal/misc/Linux/install_bb_tools.sh") | |
bash <(curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Arsenal/main/misc/Linux/install_bb_tools.sh") | |
continue-on-error: true | |
- name: Fetch gh workflows | |
run: | | |
#Presets | |
set +x ; set +e | |
#--------------# | |
#Runners | |
mkdir -p "$GITHUB_WORKSPACE/main/misc/Github/Runners/macOS" | |
mkdir -p "$GITHUB_WORKSPACE/main/misc/Github/Runners/Ubuntu" | |
mkdir -p "$GITHUB_WORKSPACE/main/misc/Github/Runners/Windows" | |
#macOS | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Runners/main/.github/workflows/0xf_gh_runner_macos_debug.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/macOS/0xf_gh_runner_macos_debug.yaml" | |
#Ubuntu | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Runners/main/.github/workflows/0xf_gh_runner_ubuntu_debug.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Ubuntu/0xf_gh_runner_ubuntu_debug.yaml" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Runners/main/.github/workflows/0xf_gh_runner_ubuntu_toolpacker.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Ubuntu/0xf_gh_runner_ubuntu_toolpacker.yaml" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Runners/main/.github/workflows/0xf_gh_runner_ubuntu_vanilla.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Ubuntu/0xf_gh_runner_ubuntu_vanilla.yaml" | |
curl -qfsSL "https://raw.githubusercontent.com/Aseem0xff/Aseem0xff-Runners/main/.github/workflows/0xf_gh_runner_ubuntu_ng_no_ts_debug.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Ubuntu/0xf_gh_runner_ubuntu_ng_no_ts_debug.yaml" | |
#Windows | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Runners/main/.github/workflows/0xf_gh_runner_windows_debug.yaml" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Windows/0xf_gh_runner_windows_debug.yaml" | |
##Self-Hosted | |
mkdir -p "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/aarch64-ubuntu.dockerfile" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/aarch64-ubuntu.dockerfile" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/x86_64-ubuntu.dockerfile" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/x86_64-ubuntu.dockerfile" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/ubuntu-systemd-base.dockerfile" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/ubuntu-systemd-base.dockerfile" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/.env-example" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/.env-example" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/cron/README.md" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/cron_README.md" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/README.md" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/README.md" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/run.sh" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/run.sh" | |
curl -qfsSL "https://raw.githubusercontent.com/Azathothas/Toolpacks/main/.github/runners/startup.sh" -o "$GITHUB_WORKSPACE/main/misc/Github/Runners/Self-Hosted/manager.sh" | |
continue-on-error: true | |
- name: Dos2Unix Everything | |
run: | | |
#Presets | |
set +x ; set +e | |
#--------------# | |
cd "$GITHUB_WORKSPACE/main" | |
find . -type f ! -path "./.git/*" -exec dos2unix {} \; 2>/dev/null | |
continue-on-error: true | |
- name: ActionLint | |
run: | | |
#Presets | |
set +x ; set +e | |
#--------------# | |
cd "$GITHUB_WORKSPACE/main" | |
find ".github/workflows" -type f -name "*ml" -exec actionlint {} \; | |
continue-on-error: true | |
- name: Generate Repo Metadata (git-sizer) | |
run: | | |
#Presets | |
set +x ; set +e | |
#--------------# | |
cd "$GITHUB_WORKSPACE/main" | |
#Dust sizes | |
echo '```mathematica' > "$GITHUB_WORKSPACE/main/.github/SIZE.md" | |
dust -b -c -i -r -n 99999999 "$GITHUB_WORKSPACE/main" | tee -a "$GITHUB_WORKSPACE/main/.github/SIZE.md" | |
dust -b -c -i -r -n 99999999 "$GITHUB_WORKSPACE/main" | tee "$GITHUB_WORKSPACE/main/.github/SIZE.txt" | |
echo '```' >> "$GITHUB_WORKSPACE/main/.github/SIZE.md" | |
continue-on-error: true | |
- name: Git Pull | |
run: | | |
cd "$GITHUB_WORKSPACE/main" && git pull origin main | |
continue-on-error: true | |
- name: Ensure Healthy Commit Nums (RESET ON >= 1000) | |
run: | | |
#Presets | |
set +x ; set +e | |
#--------------# | |
pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --filter="blob:none" "$GITHUB_SERVER_URL/$GITHUB_REPOSITORY" | |
cd "$(find . -maxdepth 1 -type d -exec basename {} \; | grep -Ev '^\.$' | xargs -I {} realpath {})" | |
CLONED_DIR="$(realpath .)" && export CLONED_DIR="$CLONED_DIR" | |
COMMIT_NUMS="$(git rev-list --count HEAD)" && export COMMIT_NUMS="$COMMIT_NUMS" | |
popd >/dev/null 2>&1 ; rm -rf "$CLONED_DIR" 2>/dev/null | |
cd "$GITHUB_WORKSPACE/main" | |
if [ "$COMMIT_NUMS" -gt 1000 ]; then | |
echo -e "\n[+] Total number of commits exceeds 1000. ($COMMIT_NUMS)\n" | |
cd "$GITHUB_WORKSPACE/main" | |
git config "user.name" "Azathothas" | |
git config "user.email" "[email protected]" | |
git checkout --orphan temp | |
git add --all --verbose && git commit -m "Purge (Re:Init)" | |
git branch -D "main" | |
git branch -m "main" | |
git push --set-upstream origin main --force | |
echo "RESET_HISTORY=YES" >> "$GITHUB_ENV" | |
else | |
echo -e "\n[+] Total number of commits looks healthy. ($COMMIT_NUMS)\n" | |
echo "RESET_HISTORY=NO" >> "$GITHUB_ENV" | |
fi | |
pushd "$GITHUB_WORKSPACE/main" >/dev/null 2>&1 && git pull origin main && popd >/dev/null 2>&1 | |
continue-on-error: true | |
- name: Get DateTime | |
run: | | |
# Date Time | |
NEPALI_TIME=$(TZ='Asia/Kathmandu' date +'%Y-%m-%d (%I:%M:%S %p)') | |
echo "NEPALI_TIME=$NEPALI_TIME" >> $GITHUB_ENV | |
continue-on-error: true | |
- uses: stefanzweifel/git-auto-commit-action@v5 | |
with: | |
repository: ./main | |
commit_user_name: Azathothas # defaults to "github-actions[bot]" | |
commit_user_email: [email protected] # defaults to "41898282+github-actions[bot]@users.noreply.github.com" | |
commit_message: "✅ 🐧🧹 🖳🗑️" | |
#push_options: '--force' | |
continue-on-error: true | |
#------------------------------------------------------------------------------------# | |
- name: rClone BackUp Repo ("https://pub.ajam.dev/repos/$GITHUB_REPOSITORY") | |
run: | | |
# Presets | |
set +x ; set +e | |
#--------------# | |
#touch "$HOME/.rclone.conf" | |
echo "${{ secrets.RCLONE_CF_R2_PUB }}" > "$HOME/.rclone.conf" | |
#chdir to Repo | |
cd "$GITHUB_WORKSPACE/main" | |
#Git pull | |
git pull origin main --no-edit 2>/dev/null | |
#Del Bloat | |
rm -rf "$(pwd)/.git" | |
#Upload to Pub | |
echo -e "[+] Syncing $GITHUB_REPOSITORY to pub.ajam.dev/repos/$GITHUB_REPOSITORY \n" | |
rclone sync "." "r2:/pub/repos/$GITHUB_REPOSITORY/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
continue-on-error: true | |
#------------------------------------------------------------------------------------# | |
- name: rClone BackUp Repo (Alts) | |
run: | | |
# Presets | |
set +x ; set +e | |
#--------------# | |
#touch "$HOME/.rclone.conf" | |
echo "${{ secrets.RCLONE_CF_R2_PUB }}" > "$HOME/.rclone.conf" | |
##metis-os/hysp-pkgs | |
pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --depth="1" --filter="blob:none" "https://github.com/metis-os/hysp-pkgs" && cd "./hysp-pkgs" | |
GITHUB_REPOSITORY="$(git config --get remote.origin.url | awk -F'/' '{print $(NF-1) "/" $NF}' | sed 's/\.git$//')" && export GITHUB_REPOSITORY="$GITHUB_REPOSITORY" | |
rm -rf "$(pwd)/.git" | |
echo -e "\n[+] Syncing $GITHUB_REPOSITORY to https://pub.ajam.dev/repos/$GITHUB_REPOSITORY\n" && du -sh "$(pwd)" | |
rclone sync "." "r2:/pub/repos/$GITHUB_REPOSITORY/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress ; popd >/dev/null 2>&1 | |
##PasseiDireto/gh-runner | |
pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --depth="1" --filter="blob:none" "https://github.com/PasseiDireto/gh-runner" && cd "./gh-runner" | |
GITHUB_REPOSITORY="$(git config --get remote.origin.url | awk -F'/' '{print $(NF-1) "/" $NF}' | sed 's/\.git$//')" && export GITHUB_REPOSITORY="$GITHUB_REPOSITORY" | |
rm -rf "$(pwd)/.git" | |
echo -e "\n[+] Syncing $GITHUB_REPOSITORY to https://pub.ajam.dev/repos/$GITHUB_REPOSITORY\n" && du -sh "$(pwd)" | |
rclone sync "." "r2:/pub/repos/$GITHUB_REPOSITORY/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress ; popd >/dev/null 2>&1 | |
##xplshn/Handyscripts | |
pushd "$(mktemp -d)" >/dev/null 2>&1 && git clone --depth="1" --filter="blob:none" "https://github.com/xplshn/Handyscripts" && cd "./Handyscripts" | |
GITHUB_REPOSITORY="$(git config --get remote.origin.url | awk -F'/' '{print $(NF-1) "/" $NF}' | sed 's/\.git$//')" && export GITHUB_REPOSITORY="$GITHUB_REPOSITORY" | |
rm -rf "$(pwd)/.git" | |
echo -e "\n[+] Syncing $GITHUB_REPOSITORY to https://pub.ajam.dev/repos/$GITHUB_REPOSITORY\n" && du -sh "$(pwd)" | |
rclone sync "." "r2:/pub/repos/$GITHUB_REPOSITORY/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress ; popd >/dev/null 2>&1 | |
continue-on-error: true | |
#------------------------------------------------------------------------------------# | |
# - name: rClone Generate Dynamic (https://pub.ajam.dev/utils/) | |
# run: | | |
# # Presets | |
# set +x ; set +e | |
# #--------------# | |
# ##Mullvad | |
# #arm64 deb | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# wget --quiet --show-progress "$(curl -qfsSL "https://api.github.com/repos/mullvad/mullvadvpn-app/releases" | jq -r '.[] | select(.prerelease == true) | .assets[] | select(.name | test("arm64.deb")) | .browser_download_url' | grep -iv '\.asc$' | head -n 1)" -O "./mullvad.deb" | |
# rclone copyto "./mullvad.deb" "r2:/pub/utils/mullvad/mullvad_linux_arm64.deb" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 | |
# #x86_64 deb | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# wget --quiet --show-progress "$(curl -qfsSL "https://api.github.com/repos/mullvad/mullvadvpn-app/releases" | jq -r '.[] | select(.prerelease == true) | .assets[] | select(.name | test("amd64.deb")) | .browser_download_url' | grep -iv '\.asc$' | head -n 1)" -O "./mullvad.deb" | |
# rclone copyto "./mullvad.deb" "r2:/pub/utils/mullvad/mullvad_linux_x86_64.deb" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# #Install | |
# sudo dpkg -i "./mullvad.deb" | |
# #Generate relays file | |
# mullvad relay update && sleep 10 | |
# mullvad relay list > "./relays_cli.txt" | |
# rclone copyto "./relays_cli.txt" "r2:/pub/utils/mullvad/relays_cli.txt" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# cat "/opt/Mullvad VPN/resources/relays.json" | jq . > "./relays_cli.json" | |
# rclone copyto "./relays_cli.json" "r2:/pub/utils/mullvad/relays_cli.json" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# #From api | |
# curl -qfsSL "https://api.mullvad.net/app/v1/relays" | jq . > "./relays_api.json" | |
# rclone copyto "./relays_api.json" "r2:/pub/utils/mullvad/relays_api.json" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# #Proxies : https://mullvad.net/en/help/socks5-proxy (https://github.com/maximko/mullvad-socks-lis) | |
# curl -qfsSL "https://raw.githubusercontent.com/maximko/mullvad-socks-list/list/mullvad-socks-list.txt" -o "./proxies.txt" | |
# rclone copyto "./proxies.txt" "r2:/pub/utils/mullvad/proxies.txt" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 | |
# #--------------# | |
# continue-on-error: true | |
# #------------------------------------------------------------------------------------# | |
# - name: rClone Generate Dynamic (https://pub.ajam.dev/utils/debian) | |
# run: | | |
# # Presets | |
# set +x ; set +e | |
# #--------------# | |
# ##Fetch (Cloud Images: https://cloud.debian.org/images/cloud/) | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# CLOUD_DIR_DEBIAN="$(realpath .)" && export CLOUD_DIR_DEBIAN="$CLOUD_DIR_DEBIAN" | |
# ##amd64 | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.json" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-amd64.json" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.qcow2" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-amd64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.raw" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-amd64.raw" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.tar.xz" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-amd64.tar.xz" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.json" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-amd64.json" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.qcow2" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-amd64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.raw" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-amd64.raw" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.tar.xz" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-amd64.tar.xz" & | |
# ##arm64 | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.json" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-arm64.json" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.qcow2" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-arm64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.raw" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-arm64.raw" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.tar.xz" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-arm64.tar.xz" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.json" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-arm64.json" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.qcow2" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-arm64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.raw" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-arm64.raw" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.tar.xz" -O "$CLOUD_DIR_DEBIAN/debian-12-cloud-baremetal-arm64.tar.xz" & | |
# ##Info | |
# echo -e "[+] Source: https://cloud.debian.org/images/cloud/" > "$CLOUD_DIR_DEBIAN/INFO.txt" | |
# echo -e "cloud --> Cloud Image without drivers for physical hardware" >> "$CLOUD_DIR_DEBIAN/INFO.txt" | |
# echo -e "cloud-baremetal --> Cloud Image including drivers for physical hardware" >> "$CLOUD_DIR_DEBIAN/INFO.txt" | |
# wait ; echo ; du -sh * ; du -sh "$CLOUD_DIR_DEBIAN" | |
# cd "$CLOUD_DIR_DEBIAN" && rclone sync "." "r2:/pub/utils/debian/cloud" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 ; rm -rf "$CLOUD_DIR_DEBIAN" | |
# #--------------# | |
# continue-on-error: true | |
# #------------------------------------------------------------------------------------# | |
# - name: rClone Generate Dynamic (https://pub.ajam.dev/utils/ubuntu) | |
# run: | | |
# # Presets | |
# set +x ; set +e | |
# #--------------# | |
# ##Fetch (Cloud Images: https://cloud-images.ubuntu.com/noble/current/) | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# CLOUD_DIR_UBUNTU="$(realpath .)" && export CLOUD_DIR_UBUNTU="$CLOUD_DIR_UBUNTU" | |
# ##amd64 | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.manifest" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-amd64.txt" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-amd64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.tar.gz" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-amd64.tar.gz" & | |
# ##arm64 | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-arm64.manifest" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-arm64.txt" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-arm64.img" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-arm64.qcow2" & | |
# wget --quiet --show-progress --progress="dot:giga" --user-agent="$USER_AGENT" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-arm64.tar.gz" -O "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-arm64.tar.gz" & | |
# ##Info | |
# echo -e "[+] Source: https://cloud-images.ubuntu.com/noble/current/" > "$CLOUD_DIR_UBUNTU/INFO.txt" | |
# echo -e "Distro --> Ubuntu Server 24.04 LTS (Noble Numbat)" >> "$CLOUD_DIR_UBUNTU/INFO.txt" | |
# wait ; echo | |
# qemu-img convert -p -f qcow2 "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-amd64.qcow2" -O raw "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-amd64.raw" | |
# qemu-img convert -p -f qcow2 "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-arm64.qcow2" -O raw "$CLOUD_DIR_UBUNTU/ubuntu-24.04-lts-arm64.raw" | |
# du -sh * ; du -sh "$CLOUD_DIR_UBUNTU" | |
# cd "$CLOUD_DIR_UBUNTU" && rclone sync "." "r2:/pub/utils/ubuntu/cloud" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 ; rm -rf "$CLOUD_DIR_UBUNTU" | |
# #--------------# | |
# continue-on-error: true | |
# #------------------------------------------------------------------------------------# | |
# - name: rClone Generate Dynamic (https://pub.ajam.dev/utils/kata-containers) | |
# run: | | |
# # Presets | |
# set +x ; set +e | |
# #--------------# | |
# ##Fetch (amd64) | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# eget "https://github.com/kata-containers/kata-containers" --asset "static" --asset "amd64" --asset "tar" --download-only | |
# ouch decompress "./"* --yes | |
# find . -type d -name '*bin*' ! -name 'build-bins' -print0 | xargs -0 -I {} sh -c 'mkdir -p ./build-bins && cp -r {}/* ./build-bins/' | |
# find . -type f -exec strip {} \; 2>/dev/null | |
# file "./build-bins/"* && du -sh "./build-bins/"* | |
# #copy | |
# rsync -av --copy-links "./build-bins/" "./kata-bins" | |
# rsync -av --copy-links "$(find . -type d -path '*/opt/kata' -print -quit)/" "./kata-x86_64-Linux" | |
# #Archive | |
# 7z a -ttar -mx="9" -mmt="$(($(nproc)+1))" -bt "$SYSTMP/kata-bins.tar" "./kata-bins" 2>/dev/null | |
# 7z a -ttar -mx="9" -mmt="$(($(nproc)+1))" -bt "$SYSTMP/kata-x86_64-Linux.tar" "./kata-x86_64-Linux" 2>/dev/null | |
# #rClone | |
# rclone sync "./kata-x86_64-Linux/" "r2:/pub/utils/kata-x86_64-Linux/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# rclone copyto "$SYSTMP/kata-bins.tar" "r2:/pub/utils/kata-x86_64-Linux/kata-bins.tar" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# rclone copyto "$SYSTMP/kata-x86_64-Linux.tar" "r2:/pub/utils/kata-x86_64-Linux/kata-x86_64-Linux.tar" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 ; find "$SYSTMP" -type d -name '*kata*' -exec rm -rf {} \; 2>/dev/null | |
# #--------------# | |
# ##Fetch (arm64) | |
# pushd "$(mktemp -d)" >/dev/null 2>&1 | |
# eget "https://github.com/kata-containers/kata-containers" --asset "static" --asset "arm64" --asset "tar" --download-only | |
# ouch decompress "./"* --yes | |
# find . -type d -name '*bin*' ! -name 'build-bins' -print0 | xargs -0 -I {} sh -c 'mkdir -p ./build-bins && cp -r {}/* ./build-bins/' | |
# find . -type f -exec aarch64-linux-gnu-strip {} \; 2>/dev/null | |
# file "./build-bins/"* && du -sh "./build-bins/"* | |
# #copy | |
# rsync -av --copy-links "./build-bins/" "./kata-bins" | |
# rsync -av --copy-links "$(find . -type d -path '*/opt/kata' -print -quit)/" "./kata-aarch64-Linux" | |
# #Archive | |
# 7z a -ttar -mx="9" -mmt="$(($(nproc)+1))" -bt "$SYSTMP/kata-bins.tar" "./kata-bins" 2>/dev/null | |
# 7z a -ttar -mx="9" -mmt="$(($(nproc)+1))" -bt "$SYSTMP/kata-aarch64-Linux.tar" "./kata-aarch64-Linux" 2>/dev/null | |
# #rClone | |
# rclone sync "./kata-aarch64-Linux/" "r2:/pub/utils/kata-aarch64-Linux/" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# rclone copyto "$SYSTMP/kata-bins.tar" "r2:/pub/utils/kata-aarch64-Linux/kata-bins.tar" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# rclone copyto "$SYSTMP/kata-aarch64-Linux.tar" "r2:/pub/utils/kata-aarch64-Linux/kata-aarch64-Linux.tar" --user-agent="$USER_AGENT" --buffer-size="100M" --s3-upload-concurrency="500" --s3-chunk-size="100M" --multi-thread-streams="500" --checkers="2000" --transfers="1000" --check-first --checksum --copy-links --fast-list --progress | |
# popd "$(mktemp -d)" >/dev/null 2>&1 ; find "$SYSTMP" -type d -name '*kata*' -exec rm -rf {} \; 2>/dev/null | |
# #--------------# | |
# continue-on-error: true | |
#------------------------------------------------------------------------------------# |