diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 10ba039..e0fb706 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -1,11 +1,11 @@ name: CI on: - push: - branches: - - dev - # schedule: - # - cron: "0 2 * * 1" + # push: + # branches: + # - dev + schedule: + - cron: "0 20 * * 1" jobs: build: @@ -33,7 +33,11 @@ jobs: - name: 'Archive results, logs, report' working-directory: ${{runner.workspace}}/test_suite run: | - zip -r artifact_"`date +"%m_%d_%Y"`" report_generation/files/logs report_generation/files/results report.txt + zip -r report_"`date +"%m_%d_%Y"`" \ + report_generation/files/logs \ + report_generation/files/results \ + report_generation/files/bmdb_models \ + report.txt - name: Create Release and Upload id: create_release @@ -41,8 +45,8 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.PAT }} with: - tag_name: artifact_${{steps.date.outputs.format}} - release_name: artifact_${{steps.date.outputs.format}} + tag_name: report_${{steps.date.outputs.format}} + release_name: report_${{steps.date.outputs.format}} draft: false prerelease: false upload_url_type: application/zip @@ -54,14 +58,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.PAT }} with: upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: ./artifact_${{steps.date.outputs.format}}.zip - asset_name: artifact_${{steps.date.outputs.format}}.zip + asset_path: ./report_${{steps.date.outputs.format}}.zip + asset_name: report_${{steps.date.outputs.format}}.zip asset_content_type: application/zip - # - name: 'Upload artifacts' - # uses: actions/upload-artifact@v2 - # with: - # name: artifact_${{steps.date.outputs.format}} - # path: artifact_${{steps.date.outputs.format}}.zip - # if-no-files-found: warn - diff --git a/report_generation/sbml/sbml_fetcher.py b/report_generation/sbml/sbml_fetcher.py index 627ec8f..ce06532 100644 --- a/report_generation/sbml/sbml_fetcher.py +++ b/report_generation/sbml/sbml_fetcher.py @@ -37,7 +37,7 @@ def soup_scraper(model, headers): return preview_url def download_sbml(): - for model in create_model_list(10, 0, -1): + for model in create_model_list(1000, 0, -1): try: sbml_file_link = Config.BASE_URL + soup_scraper(model, headers=headers) urllib.request.urlretrieve(sbml_file_link, os.path.join(