Skip to content

crawler test

crawler test #13

Workflow file for this run

name: Crawling
on:
push:
branches:
- crawler
schedule:
- cron: "0 */3 * * *"
concurrency:
group: crawling
jobs:
crawling:
runs-on: ubuntu-latest
steps:
# - name: Checkout
# uses: actions/checkout@v2
# with:
# ref: crawler
# persist-credentials: false
# - name: Checkout data
# uses: actions/checkout@v2
# with:
# persist-credentials: false
# ref: crawler-gh-pages
# path: ./data
# - name: Install
# run: yarn install --frozen-lockfile
# - name: Pip
# uses: actions/setup-python@v4
# with:
# python-version: "3.9"
# cache: "pip" # caching pip dependencies
# - name: Pip Install
# run: pip install -r requirements.txt
# - name: Crawling
# run: yarn start
# env:
# LOG_FORMAT: json
# NUM_TERMS: 4
# ALWAYS_SCRAPE_CURRENT_TERM: 1
# DETAILS_CONCURRENCY: 256
# DATA_FOLDER: ./data
# NODE_EXTRA_CA_CERTS: ${{ github.workspace }}/intermediate.pem
# - name: Revision
# run: python ./src/Revise.py
# - name: Upload
# uses: JamesIves/github-pages-deploy-action@releases/v4
# with:
# branch: crawler-gh-pages
# folder: ./data
# clean: true
# single-commit: true
- name: Copy Course Data
uses: nkoppel/[email protected]
env:
API_TOKEN_GITHUB: ${{ secrets.CRAWLER_DEPLOY_PERSONAL_ACCESS_TOKEN }}
with:
source-files: "./data"
destination-username: "rohan-bansal"
destination-repository: "gt-course-vis"
destination-branch: "main"
destination-directory: "src/lib/data"
commit-email: "[email protected]"