Skip to content

Commit

Permalink
Docstring inheritance using docrep
Browse files Browse the repository at this point in the history
  • Loading branch information
guillaumeeb committed Mar 26, 2018
1 parent ad2ddfa commit b0cae6d
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 61 deletions.
3 changes: 3 additions & 0 deletions dask_jobqueue/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,18 @@
import socket
import os
import sys
import docrep

from distributed.utils import tmpfile, ignoring, get_ip_interface, parse_bytes
from distributed import LocalCluster

dirname = os.path.dirname(sys.executable)

logger = logging.getLogger(__name__)
docstrings = docrep.DocstringProcessor()


@docstrings.get_sectionsf('JobQueueCluster')
class JobQueueCluster(object):
""" Base class to launch Dask Clusters for Job queues
Expand Down
37 changes: 6 additions & 31 deletions dask_jobqueue/pbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,17 @@
import os
import math

from .core import JobQueueCluster
from .core import JobQueueCluster, docstrings

logger = logging.getLogger(__name__)


@docstrings.with_indent(4)
class PBSCluster(JobQueueCluster):
""" Launch Dask on a PBS cluster
Parameters
----------
name : str
Name of worker jobs and Dask workers. Passed to `$PBS -N` option.
queue : str
Destination queue for each worker job. Passed to `#PBS -q` option.
project : str
Expand All @@ -29,30 +27,7 @@ class PBSCluster(JobQueueCluster):
List of other PBS options, for example -j oe. Each option will be prepended with the #PBS prefix.
local_directory : str
Dask worker local directory for file spilling.
kwargs : dict
Additional keyword arguments to pass to `JobQueueCluster` and `LocalCluster`
Inherited parameters from JobQueueCluster
-----------------------------------------
name : str
Name of Dask workers.
threads : int
Number of threads per process.
processes : int
Number of processes per node.
memory : str
Bytes of memory that the worker can use. This should be a string
like "7GB" that can be interpretted both by PBS and Dask.
interface : str
Network interface like 'eth0' or 'ib0'.
death_timeout : float
Seconds to wait for a scheduler before closing workers
local_directory : str
Dask worker local directory for file spilling.
extra : str
Additional arguments to pass to `dask-worker`
kwargs : dict
Additional keyword arguments to pass to `LocalCluster`
%(JobQueueCluster.parameters)s
Examples
--------
Expand All @@ -79,7 +54,6 @@ class PBSCluster(JobQueueCluster):
cancel_command = 'qdel'

def __init__(self,
name='dask-worker',
queue=None,
project=None,
resource_spec=None,
Expand All @@ -88,13 +62,14 @@ def __init__(self,
**kwargs):

#Instantiate args and parameters from parent abstract class
super(PBSCluster, self).__init__(name=name, **kwargs)
super(PBSCluster, self).__init__(**kwargs)

#Try to find a project name from environment variable
project = project or os.environ.get('PBS_ACCOUNT')

#PBS header build
header_lines = ['#PBS -N %s' % name]
if self.name is not None:
header_lines = ['#PBS -N %s' % self.name]
if queue is not None:
header_lines.append('#PBS -q %s' % queue)
if project is not None:
Expand Down
35 changes: 5 additions & 30 deletions dask_jobqueue/slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
import os
import sys

from .core import JobQueueCluster
from .core import JobQueueCluster, docstrings

logger = logging.getLogger(__name__)

dirname = os.path.dirname(sys.executable)


@docstrings.with_indent(4)
class SLURMCluster(JobQueueCluster):
""" Launch Dask on a SLURM cluster
Expand All @@ -32,7 +33,6 @@ class SLURMCluster(JobQueueCluster):
cancel_command = 'scancel'

def __init__(self,
name='dask',
queue='',
project=None,
processes=8,
Expand All @@ -43,8 +43,6 @@ def __init__(self,
Parameters
----------
name : str
Name of worker jobs. Passed to `#SBATCH -J` option.
queue : str
Destination queue for each worker job.
Passed to `#SBATCH -p` option.
Expand All @@ -58,33 +56,10 @@ def __init__(self,
like "7GB" that can be interpretted both by PBS and Dask.
walltime : str
Walltime for each worker job.
kwargs : dict
Additional keyword arguments to pass to `JobQueueCluster` and `LocalCluster`
Inherited parameters from JobQueueCluster
-----------------------------------------
name : str
Name of Dask workers.
threads : int
Number of threads per process.
processes : int
Number of processes per node.
memory : str
Bytes of memory that the worker can use. This should be a string
like "7GB" that can be interpretted both by PBS and Dask.
interface : str
Network interface like 'eth0' or 'ib0'.
death_timeout : float
Seconds to wait for a scheduler before closing workers
local_directory : str
Dask worker local directory for file spilling.
extra : str
Additional arguments to pass to `dask-worker`
kwargs : dict
Additional keyword arguments to pass to `LocalCluster`
%(JobQueueCluster.parameters)s
"""

super(SLURMCluster, self).__init__(name=name, processes=processes, **kwargs)
super(SLURMCluster, self).__init__(processes=processes, **kwargs)

self._header_template = """
#SBATCH -J %(name)s
Expand All @@ -101,7 +76,7 @@ def __init__(self,
""".lstrip()

memory = memory.replace(' ', '')
self.config = {'name': name,
self.config = {'name': self.name,
'queue': queue,
'project': project,
'processes': processes,
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@
license='BSD 3-Clause',
packages=['dask_jobqueue'],
long_description=(open('README.rst').read() if exists('README.rst') else ''),
install_requires=['docrep'],
zip_safe=False)

0 comments on commit b0cae6d

Please sign in to comment.