Skip to content

Commit

Permalink
Merge pull request htcondor#58 from yongleyuan/HTCONDOR-522.update-py…
Browse files Browse the repository at this point in the history
…lint-gha

Update the blahp Python linting GitHub Action (HTCONDOR-522)
  • Loading branch information
brianhlin authored Jul 8, 2021
2 parents 0c3064c + 0b55aca commit bef1de5
Show file tree
Hide file tree
Showing 5 changed files with 87 additions and 42 deletions.
28 changes: 0 additions & 28 deletions .github/workflows/pylint.yaml

This file was deleted.

74 changes: 74 additions & 0 deletions .github/workflows/python-linters.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
name: Run Python linters

on: [push, pull_request]

jobs:
python-files:
runs-on: ubuntu-latest
outputs:
filelist: ${{ steps.python-files.outputs.filelist }}
steps:
- uses: actions/checkout@v2
- id: python-files
run: |
echo "::set-output name=filelist::$(find . -type f -exec awk ' /^#!.*python/{print FILENAME} {nextfile}' {} + | tr '\n' ' ')"
pylint:
runs-on: ubuntu-latest
needs: [python-files]
steps:
- uses: actions/checkout@v2

- name: Set up Python 3.6
uses: actions/setup-python@v2
with:
python-version: 3.6

- uses: actions/cache@v2
with:
path: ~/pip-cache
key: pip-3.6-${{ github.sha }}
# allow cache hits from previous runs of the current branch,
# parent branch, then upstream branches, in that order
restore-keys: |
pip-3.6-
- name: Install Requirements
run: |
python -m pip install --upgrade pip
pip --cache-dir ~/pip-cache install pylint
- name: Run Pylint
env:
PYTHON_FILES: ${{ needs.python-files.outputs.filelist }}
run: |
export PYTHONPATH=$PYTHONPATH:$PWD/src/scripts
pylint --errors-only $PYTHON_FILES
flake8:
runs-on: ubuntu-latest
needs: [python-files]
steps:
- uses: actions/checkout@v2

- name: Set up Python 3.6
uses: actions/setup-python@v2
with:
python-version: 3.6

- uses: actions/cache@v2
with:
path: ~/pip-cache
key: pip-3.6-${{ github.sha }}
# allow cache hits from previous runs of the current branch,
# parent branch, then upstream branches, in that order
restore-keys: |
pip-3.6-
- name: Install Requirements
run: |
python -m pip install --upgrade pip
pip --cache-dir ~/pip-cache install flake8
- name: Run flake8
env:
PYTHON_FILES: ${{ needs.python-files.outputs.filelist }}
run: |
export PYTHONPATH=$PYTHONPATH:$PWD/src/scripts
flake8 --select F $PYTHON_FILES
9 changes: 4 additions & 5 deletions src/scripts/lsf_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
import csv

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
import blah

cache_timeout = 60

Expand Down Expand Up @@ -253,7 +252,7 @@ def bjobs(jobid=""):
starttime = time.time()

log("Starting bjobs.")
if jobid is not "":
if jobid != "":
bjobs_process = subprocess.Popen(("%s -UF %s" % (bjobs, jobid)), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, shell=True)
else:
bjobs_process = subprocess.Popen(("%s -UF -a" % bjobs), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, shell=True)
Expand All @@ -262,9 +261,9 @@ def bjobs(jobid=""):
bjobs_process_stdout = to_str(bjobs_process_stdout)
bjobs_process_stderr = to_str(bjobs_process_stderr)

if bjobs_process_stderr is "":
if bjobs_process_stderr == "":
result = parse_bjobs_fd(bjobs_process_stdout.splitlines())
elif jobid is not "":
elif jobid != "":
result = {jobid: {'BatchJobId': '"%s"' % jobid, 'JobStatus': '3', 'ExitCode': ' 0'}}
else:
result = {}
Expand All @@ -276,7 +275,7 @@ def bjobs(jobid=""):
raise Exception("bjobs failed with exit code %s" % str(exit_code))

# If the job has completed...
if jobid is not "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
if jobid != "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
# Get the finished job stats and update the result
finished_job_stats = get_finished_job_stats(jobid)
result[jobid].update(finished_job_stats)
Expand Down
8 changes: 4 additions & 4 deletions src/scripts/pbs_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def qstat(jobid=""):
raise Exception("qstat failed with exit code %s" % str(qstat_proc.returncode))

# If the job has completed...
if jobid is not "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
if jobid != "" and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
# Get the finished job stats and update the result
finished_job_stats = get_finished_job_stats(jobid)
result[jobid].update(finished_job_stats)
Expand Down Expand Up @@ -340,13 +340,13 @@ def get_finished_job_stats(jobid):
# Slurm can return more than 1 row, for some odd reason.
# so sum up relevant values
for row in reader:
if row["AveCPU"] is not "":
if row["AveCPU"] != "":
try:
return_dict['RemoteUserCpu'] += convert_cpu_to_seconds(row["AveCPU"]) * int(row["AllocCPUS"])
except:
log("Failed to parse CPU usage for job id %s: %s, %s" % (jobid, row["AveCPU"], row["AllocCPUS"]))
raise
if row["MaxRSS"] is not "":
if row["MaxRSS"] != "":
# Remove the trailing [KMGTP] and scale the value appropriately
# Note: We assume that all values will have a suffix, and we
# want the value in kilos.
Expand All @@ -365,7 +365,7 @@ def get_finished_job_stats(jobid):
except:
log("Failed to parse memory usage for job id %s: %s" % (jobid, row["MaxRSS"]))
raise
if row["ExitCode"] is not "":
if row["ExitCode"] != "":
try:
return_dict["ExitCode"] = int(row["ExitCode"].split(":")[0])
except:
Expand Down
10 changes: 5 additions & 5 deletions src/scripts/slurm_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def call_squeue(jobid="", cluster=""):
raise Exception("squeue failed with exit code %s" % str(squeue_proc.returncode))

# If the job has completed...
if jobid is not "" and jobid in result and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
if jobid != "" and jobid in result and "JobStatus" in result[jobid] and (result[jobid]["JobStatus"] == '4' or result[jobid]["JobStatus"] == '3'):
# Get the finished job stats and update the result
finished_job_stats = get_finished_job_stats(jobid, cluster)
result[jobid].update(finished_job_stats)
Expand Down Expand Up @@ -339,20 +339,20 @@ def get_finished_job_stats(jobid, cluster):
# Slurm can return more than 1 row, for some odd reason.
# so sum up relevant values
for row in reader:
if row["UserCPU"] is not "":
if row["UserCPU"] != "":
try:
return_dict['RemoteUserCpu'] += convert_cpu_to_seconds(row["UserCPU"])
except:
log("Failed to parse CPU usage for job id %s: %s" % (jobid, row["UserCPU"]))
raise

if row["SystemCPU"] is not "":
if row["SystemCPU"] != "":
try:
return_dict['RemoteSysCpu'] += convert_cpu_to_seconds(row["SystemCPU"])
except:
log("Failed to parse CPU usage for job id %s: %s" % (jobid, row["SystemCPU"]))
raise
if row["MaxRSS"] is not "":
if row["MaxRSS"] != "":
# Remove the trailing [KMGTP] and scale the value appropriately
# Note: We assume that all values will have a suffix, and we
# want the value in kilos.
Expand All @@ -377,7 +377,7 @@ def get_finished_job_stats(jobid, cluster):
except:
log("Failed to parse memory usage for job id %s: %s" % (jobid, row["MaxRSS"]))
raise
if row["ExitCode"] is not "":
if row["ExitCode"] != "":
try:
return_dict["ExitCode"] = int(row["ExitCode"].split(":")[0])
except:
Expand Down

0 comments on commit bef1de5

Please sign in to comment.