diff --git a/.github/workflows/pylint.yaml b/.github/workflows/pylint.yaml deleted file mode 100644 index 77b07b8..0000000 --- a/.github/workflows/pylint.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: Run pylint - -on: [push, pull_request] - -jobs: - pylint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Set up Python 3.6 - uses: actions/setup-python@v1 - with: - python-version: 3.6 - - uses: actions/cache@v2 - with: - path: ~/pip-cache - key: pip-3.6-${{ github.sha }} - # allow cache hits from previous runs of the current branch, - # parent branch, then upstream branches, in that order - restore-keys: | - pip-3.6- - - name: Install Requirements - run: | - python -m pip install --upgrade pip - pip --cache-dir ~/pip-cache install pylint - - name: Run Pylint - run: | - cd ./src/scripts/ && pylint --errors-only *.py diff --git a/.github/workflows/python-linters.yaml b/.github/workflows/python-linters.yaml new file mode 100644 index 0000000..dd60301 --- /dev/null +++ b/.github/workflows/python-linters.yaml @@ -0,0 +1,74 @@ +name: Run Python linters + +on: [push, pull_request] + +jobs: + python-files: + runs-on: ubuntu-latest + outputs: + filelist: ${{ steps.python-files.outputs.filelist }} + steps: + - uses: actions/checkout@v2 + - id: python-files + run: | + echo "::set-output name=filelist::$(find . -type f -exec awk ' /^#!.*python/{print FILENAME} {nextfile}' {} + | tr '\n' ' ')" + + pylint: + runs-on: ubuntu-latest + needs: [python-files] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.6 + uses: actions/setup-python@v2 + with: + python-version: 3.6 + + - uses: actions/cache@v2 + with: + path: ~/pip-cache + key: pip-3.6-${{ github.sha }} + # allow cache hits from previous runs of the current branch, + # parent branch, then upstream branches, in that order + restore-keys: | + pip-3.6- + - name: Install Requirements + run: | + python -m pip install --upgrade pip + pip --cache-dir ~/pip-cache install pylint + - name: Run Pylint + env: + PYTHON_FILES: ${{ needs.python-files.outputs.filelist }} + run: | + export PYTHONPATH=$PYTHONPATH:$PWD/src/scripts + pylint --errors-only $PYTHON_FILES + + flake8: + runs-on: ubuntu-latest + needs: [python-files] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.6 + uses: actions/setup-python@v2 + with: + python-version: 3.6 + + - uses: actions/cache@v2 + with: + path: ~/pip-cache + key: pip-3.6-${{ github.sha }} + # allow cache hits from previous runs of the current branch, + # parent branch, then upstream branches, in that order + restore-keys: | + pip-3.6- + - name: Install Requirements + run: | + python -m pip install --upgrade pip + pip --cache-dir ~/pip-cache install flake8 + - name: Run flake8 + env: + PYTHON_FILES: ${{ needs.python-files.outputs.filelist }} + run: | + export PYTHONPATH=$PYTHONPATH:$PWD/src/scripts + flake8 --select F $PYTHON_FILES diff --git a/src/scripts/lsf_status.py b/src/scripts/lsf_status.py index bed31e5..771c789 100755 --- a/src/scripts/lsf_status.py +++ b/src/scripts/lsf_status.py @@ -48,7 +48,6 @@ import csv sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) -import blah cache_timeout = 60 @@ -253,7 +252,7 @@ def bjobs(jobid=""): starttime = time.time() log("Starting bjobs.") - if jobid is not "": + if jobid != "": bjobs_process = subprocess.Popen(("%s -UF %s" % (bjobs, jobid)), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, shell=True) else: bjobs_process = subprocess.Popen(("%s -UF -a" % bjobs), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, shell=True) @@ -262,9 +261,9 @@ def bjobs(jobid=""): bjobs_process_stdout = to_str(bjobs_process_stdout) bjobs_process_stderr = to_str(bjobs_process_stderr) - if bjobs_process_stderr is "": + if bjobs_process_stderr == "": result = parse_bjobs_fd(bjobs_process_stdout.splitlines()) - elif jobid is not "": + elif jobid != "": result = {jobid: {'BatchJobId': '"%s"' % jobid, 'JobStatus': '3', 'ExitCode': ' 0'}} else: result = {} @@ -276,7 +275,7 @@ def bjobs(jobid=""): raise Exception("bjobs failed with exit code %s" % str(exit_code)) # If the job has completed... - if jobid is not "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): + if jobid != "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): # Get the finished job stats and update the result finished_job_stats = get_finished_job_stats(jobid) result[jobid].update(finished_job_stats) diff --git a/src/scripts/pbs_status.py b/src/scripts/pbs_status.py index cfe2e2f..6887fa3 100755 --- a/src/scripts/pbs_status.py +++ b/src/scripts/pbs_status.py @@ -257,7 +257,7 @@ def qstat(jobid=""): raise Exception("qstat failed with exit code %s" % str(qstat_proc.returncode)) # If the job has completed... - if jobid is not "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): + if jobid != "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): # Get the finished job stats and update the result finished_job_stats = get_finished_job_stats(jobid) result[jobid].update(finished_job_stats) @@ -340,13 +340,13 @@ def get_finished_job_stats(jobid): # Slurm can return more than 1 row, for some odd reason. # so sum up relevant values for row in reader: - if row["AveCPU"] is not "": + if row["AveCPU"] != "": try: return_dict['RemoteUserCpu'] += convert_cpu_to_seconds(row["AveCPU"]) * int(row["AllocCPUS"]) except: log("Failed to parse CPU usage for job id %s: %s, %s" % (jobid, row["AveCPU"], row["AllocCPUS"])) raise - if row["MaxRSS"] is not "": + if row["MaxRSS"] != "": # Remove the trailing [KMGTP] and scale the value appropriately # Note: We assume that all values will have a suffix, and we # want the value in kilos. @@ -365,7 +365,7 @@ def get_finished_job_stats(jobid): except: log("Failed to parse memory usage for job id %s: %s" % (jobid, row["MaxRSS"])) raise - if row["ExitCode"] is not "": + if row["ExitCode"] != "": try: return_dict["ExitCode"] = int(row["ExitCode"].split(":")[0]) except: diff --git a/src/scripts/slurm_status.py b/src/scripts/slurm_status.py index 83e29d4..ac4c1a6 100755 --- a/src/scripts/slurm_status.py +++ b/src/scripts/slurm_status.py @@ -259,7 +259,7 @@ def call_squeue(jobid="", cluster=""): raise Exception("squeue failed with exit code %s" % str(squeue_proc.returncode)) # If the job has completed... - if jobid is not "" and jobid in result and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): + if jobid != "" and jobid in result and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'): # Get the finished job stats and update the result finished_job_stats = get_finished_job_stats(jobid, cluster) result[jobid].update(finished_job_stats) @@ -339,20 +339,20 @@ def get_finished_job_stats(jobid, cluster): # Slurm can return more than 1 row, for some odd reason. # so sum up relevant values for row in reader: - if row["UserCPU"] is not "": + if row["UserCPU"] != "": try: return_dict['RemoteUserCpu'] += convert_cpu_to_seconds(row["UserCPU"]) except: log("Failed to parse CPU usage for job id %s: %s" % (jobid, row["UserCPU"])) raise - if row["SystemCPU"] is not "": + if row["SystemCPU"] != "": try: return_dict['RemoteSysCpu'] += convert_cpu_to_seconds(row["SystemCPU"]) except: log("Failed to parse CPU usage for job id %s: %s" % (jobid, row["SystemCPU"])) raise - if row["MaxRSS"] is not "": + if row["MaxRSS"] != "": # Remove the trailing [KMGTP] and scale the value appropriately # Note: We assume that all values will have a suffix, and we # want the value in kilos. @@ -377,7 +377,7 @@ def get_finished_job_stats(jobid, cluster): except: log("Failed to parse memory usage for job id %s: %s" % (jobid, row["MaxRSS"])) raise - if row["ExitCode"] is not "": + if row["ExitCode"] != "": try: return_dict["ExitCode"] = int(row["ExitCode"].split(":")[0]) except: