Skip to content

🌴 CV - 8338 - Data Ingestion #526

🌴 CV - 8338 - Data Ingestion

🌴 CV - 8338 - Data Ingestion #526

Workflow file for this run

name: CI
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: ruby/setup-ruby@v1
with:
ruby-version: "3.3"
bundler-cache: true
- uses: pnpm/action-setup@v4
with:
version: 8
- uses: actions/setup-node@v4
with:
node-version: "20"
cache: pnpm
# NPM dependencies
- name: pnpm Install
run: pnpm install
# Build
- name: Set production DEPLOY_ENV
if: github.ref == 'refs/heads/main'
run: echo "DEPLOY_ENV=production" >> $GITHUB_ENV
- name: Set staging DEPLOY_ENV
if: github.ref != 'refs/heads/main'
run: echo "DEPLOY_ENV=staging" >> $GITHUB_ENV
- name: Build
env:
DOCS_API_KEY: ${{ secrets.DOCS_API_KEY }}
run: bundle exec middleman build --clean --verbose --environment=${{ env.DEPLOY_ENV }}
# Lint
- name: Lint
run: bundle exec rake lint
# Artifact
- uses: actions/upload-artifact@v4
if: success() && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging')
with:
name: build
path: ./build
# Push production builds to "build-history" branch to maintain history of
# built and deployed versions.
- name: Commit Build History
if: success() && github.ref == 'refs/heads/main'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: yarn run gh-pages --dist ./build/ --branch build-history --user "$GITHUB_ACTOR <[email protected]>" --repo "https://${GITHUB_ACTOR}:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
deploy:
if: success() && github.repository_owner == 'NREL' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging')
needs: build
runs-on: ubuntu-latest
container:
image: rclone/rclone:1.57.0
steps:
# Deploy to S3 bucket.
- uses: actions/download-artifact@v4
with:
name: build
path: ./build
- name: Set production S3_DEST
if: github.ref == 'refs/heads/main'
run: echo "S3_DEST=:s3,env_auth=true:${{ secrets.PRODUCTION_BUCKET_NAME }}/public/production/" >> $GITHUB_ENV
- name: Set staging S3_DEST
if: github.ref != 'refs/heads/main'
run: echo "S3_DEST=:s3,env_auth=true:${{ secrets.STAGING_BUCKET_NAME }}/public/staging/" >> $GITHUB_ENV
- name: Deploy
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
run: |
# Sync all cache-busted assets with long cache-control expirations.
rclone \
copy \
--verbose \
--checksum \
--no-update-modtime \
--s3-no-check-bucket \
--s3-no-head \
--header-upload "Cache-Control: public, max-age=31536000, immutable" \
--include "/assets/**" \
./build/ \
"$S3_DEST"
# Sync the remaining files, disallowing caching on those.
rclone \
copy \
--verbose \
--checksum \
--no-update-modtime \
--s3-no-check-bucket \
--s3-no-head \
--header-upload "Cache-Control: no-cache, max-age=0, must-revalidate" \
./build/ \
"$S3_DEST"
# Run the sync one more time to delete old files. Keep old asset
# files around, so that if old HTML pages continue to load for a few
# minutes, they can still load older assets.
rclone \
sync \
--verbose \
--checksum \
--no-update-modtime \
--s3-no-check-bucket \
--s3-no-head \
--exclude "/assets/**" \
./build/ \
"$S3_DEST"