Skip to content

Commit

Permalink
Merge pull request #564 from apjanke/typos-2019-12-31
Browse files Browse the repository at this point in the history
Fix typos in helpstrings and comments
  • Loading branch information
effigies authored Jan 6, 2020
2 parents 5dd2275 + c6e85c7 commit a23fd41
Show file tree
Hide file tree
Showing 12 changed files with 36 additions and 35 deletions.
4 changes: 2 additions & 2 deletions bids/analysis/auto_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def auto_model(layout, scan_length=None, one_vs_rest=False):
layout : :obj:`bids.layout.BIDSLayout`
A BIDSLayout instance
scan_length : int
Scan length for loading event varibles in cases
Scan length for loading event variables in cases
where the scan length can not be read from the nifti.
Primarily for testing.
one_vs_rest : bool
Expand Down Expand Up @@ -76,7 +76,7 @@ def auto_model(layout, scan_length=None, one_vs_rest=False):
run["Model"] = run_model

if one_vs_rest:
# if there are multiple trial types, build contrasts
# If there are multiple trial types, build contrasts
contrasts = []
for i, tt in enumerate(trial_types):
cdict = OrderedDict()
Expand Down
12 changes: 6 additions & 6 deletions bids/analysis/hrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def glover_dispersion_derivative(tr, oversampling=50, time_length=32.,


def _sample_condition(exp_condition, frame_times, oversampling=50,
min_onset=-24):
min_onset=-24):
"""Make a possibly oversampled event regressor from condition information.
Parameters
Expand All @@ -217,7 +217,7 @@ def _sample_condition(exp_condition, frame_times, oversampling=50,
(onsets, durations, amplitudes) triplet
frame_times : array of shape(n_scans)
sample time points
over_sampling : int, optional
oversampling : int, optional
factor for oversampling event regressor
min_onset : float, optional
minimal onset relative to frame_times[0] (in seconds)
Expand Down Expand Up @@ -305,7 +305,7 @@ def _orthogonalize(X):
Notes
-----
X is changed in place. The columns are not normalized
X is changed in place. The columns are not normalized.
"""
if X.size == X.shape[0]:
return X
Expand All @@ -324,7 +324,7 @@ def _regressor_names(con_name, hrf_model, fir_delays=None):
con_name: string
identifier of the condition
hrf_model: string or None,
hrf model chosen
hrf model chosen
fir_delays: 1D array_like, optional,
Delays used in case of an FIR model
Expand Down Expand Up @@ -455,7 +455,7 @@ def compute_regressor(exp_condition, hrf_model, frame_times, con_id='cond',
hr_regressor, hr_frame_times = _sample_condition(
exp_condition, frame_times, oversampling, min_onset)

# 2. create the hrf model(s)
# 2. create the hrf model(s)
hkernel = _hrf_kernel(hrf_model, tr, oversampling, fir_delays)

# 3. convolve the regressor and hrf, and downsample the regressor
Expand All @@ -466,7 +466,7 @@ def compute_regressor(exp_condition, hrf_model, frame_times, con_id='cond',
computed_regressors = _resample_regressor(
conv_reg, hr_frame_times, frame_times)

# 5. ortogonalize the regressors
# 5. orthogonalize the regressors
if hrf_model != 'fir':
computed_regressors = _orthogonalize(computed_regressors)

Expand Down
4 changes: 2 additions & 2 deletions bids/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ def from_file(filenames, error_on_missing=True):


def reset_options(update_from_file=False):
''' Reset all options to the package defaults.
""" Reset all options to the package defaults.
Args:
update_from_file (bool): If True, re-applies any config files found in
standard locations.
'''
"""
global _settings
_settings = _default_settings.copy()
if update_from_file:
Expand Down
2 changes: 1 addition & 1 deletion bids/layout/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _index_dir(self, path, config, default_action=None):
d = os.path.join(dirpath, d)
self._index_dir(d, list(config), default_action=default)

# prevent subdirectory traversal
# Prevent subdirectory traversal
break

def _index_file(self, f, dirpath, entities, default_action=None):
Expand Down
22 changes: 11 additions & 11 deletions bids/layout/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,8 +392,8 @@ def _sanitize_init_args(**kwargs):

def _init_db(self, database_path=None, reset_database=False):
database_file, database_sidecar = self._make_db_paths(database_path)
# Reset database if needed and return whether or not it was reset
# determining if the database needs resetting must be done prior
# Reset database if needed and return whether or not it was reset.
# Determining if the database needs resetting must be done prior
# to setting the session (which creates the empty database file)
reset_database = (
reset_database or # Manual Request
Expand Down Expand Up @@ -559,7 +559,7 @@ def save(self, database_path, replace_connection=True):
"""Save the current index as a SQLite3 DB at the specified location.
Note: This is only necessary if a database_path was not specified
at initalization, and the user now wants to save the index.
at initialization, and the user now wants to save the index.
If a database_path was specified originally, there is no need to
re-save using this method.
Expand Down Expand Up @@ -714,7 +714,7 @@ def add_derivatives(self, path, parent_database_path=None, **kwargs):
parent_database_path : str
If not None, use the pipeline name from the dataset_description.json
file as the database folder name to nest within the parent database
folder name to write out derivatie index to.
folder name to write out derivative index to.
kwargs : dict
Optional keyword arguments to pass on to
BIDSLayout() when initializing each of the derivative datasets.
Expand Down Expand Up @@ -850,10 +850,10 @@ def get(self, return_type='object', target=None, scope='all',
nodes/directories that match the specified scope will be
searched. Possible values include:
'all' (default): search all available directories.
'derivatives': search all derivatives directories
'raw': search only BIDS-Raw directories
'self': search only the directly called BIDSLayout
<PipelineName>: the name of a BIDS-Derivatives pipeline
'derivatives': search all derivatives directories.
'raw': search only BIDS-Raw directories.
'self': search only the directly called BIDSLayout.
<PipelineName>: the name of a BIDS-Derivatives pipeline.
regex_search : bool or None, optional
Whether to require exact matching
(False) or regex search (True) when comparing the query string
Expand Down Expand Up @@ -1072,7 +1072,7 @@ def get_collections(self, level, types=None, variables=None, merge=False,
observation is handled separately, and the result is returned
as a list.
sampling_rate : int or str
If level='run', the sampling rate to pass onto the returned
If level='run', the sampling rate to pass on to the returned
:obj:`bids.variables.kollekshuns.BIDSRunVariableCollection`.
skip_empty : bool
Whether or not to skip empty Variables (i.e., where there are no
Expand Down Expand Up @@ -1491,7 +1491,7 @@ def copy_files(self, files=None, path_patterns=None, symbolic_links=True,
'fail': raises an exception
'skip' does nothing
'overwrite': overwrites the existing file
'append': adds a suffix to each file copy, starting with 1
'append': adds a suffix to each file copy, starting with 1
kwargs : dict
Optional key word arguments to pass into a get() query.
"""
Expand Down Expand Up @@ -1533,7 +1533,7 @@ def write_contents_to_file(self, entities, path_patterns=None,
'fail': raises an exception
'skip' does nothing
'overwrite': overwrites the existing file
'append': adds a suffix to each file copy, starting with 1
'append': adds a suffix to each file copy, starting with 1
strict : bool
If True, all entities must be matched inside a
pattern in order to be a valid match. If False, extra entities
Expand Down
4 changes: 2 additions & 2 deletions bids/layout/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def copy(self, path_patterns, symbolic_link=False, root=None,
Parameters
----------
path_patterns : list
List of patterns use to construct the new
List of patterns used to construct the new
filename. See :obj:`build_path` documentation for details.
symbolic_link : bool
If True, use a symbolic link to point to the
Expand Down Expand Up @@ -537,7 +537,7 @@ def _astype(self, val):


class Tag(Base):
"""Represents an association between a File and and Entity.
"""Represents an association between a File and an Entity.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion bids/layout/writing.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def write_contents_to_file(path, contents=None, link_to=None,
One of 'fail', 'skip', 'overwrite', or 'append'
that defines the desired action when the output path already
exists. 'fail' raises an exception; 'skip' does nothing;
'overwrite' overwrites the existing file; 'append' adds a suffix
'overwrite' overwrites the existing file; 'append' adds a suffix
to each file copy, starting with 1. Default is 'fail'.
"""

Expand Down
2 changes: 1 addition & 1 deletion bids/reports/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def list_to_str(lst):
Returns
-------
str_ : :obj:`str`
A string with commas and/or ands separating th elements from ``lst``.
A string with commas and/or ands separating the elements from ``lst``.
"""
if len(lst) == 1:
Expand Down
9 changes: 5 additions & 4 deletions bids/variables/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def get_info(self):
# Stores key information for each Run.
RunInfo_ = namedtuple('RunInfo', ['entities', 'duration', 'tr', 'image'])


# Wrap with class to provide docstring
class RunInfo(RunInfo_):
""" A namedtuple storing run-related information.
Expand Down Expand Up @@ -103,7 +104,7 @@ def get_collections(self, unit, names=None, merge=False,
merge : bool
If True, variables are merged across all observations
of the current unit. E.g., if unit='subject' and return_type=
'collection', variablesfrom all subjects will be merged into a
'collection', variables from all subjects will be merged into a
single collection. If False, each observation is handled
separately, and the result is returned as a list.
sampling_rate : int or str
Expand Down Expand Up @@ -217,9 +218,9 @@ def create_node(self, level, entities, *args, **kwargs):
level : str
The level of analysis of the new Node.
entities : dict
Dictionary of entities belonging to Node
Dictionary of entities belonging to Node.
args, kwargs : dict
Optional positional or named arguments to pass onto
Optional positional or named arguments to pass on to
class-specific initializers. These arguments are only used if
a Node that matches the passed entities doesn't already exist,
and a new one must be created.
Expand Down Expand Up @@ -252,7 +253,7 @@ def get_or_create_node(self, level, entities, *args, **kwargs):
Dictionary of entities to include in newly-created
Nodes or filter existing ones.
args, kwargs : dict
Optional positional or named arguments to pass onto
Optional positional or named arguments to pass on to
class-specific initializers. These arguments are only used if
a Node that matches the passed entities doesn't already exist,
and a new one must be created.
Expand Down
6 changes: 3 additions & 3 deletions bids/variables/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None,
The scope of the space to search for variables. See
docstring for BIDSLayout for details and valid predefined values.
selectors : dict
Optional keyword arguments passed onto the
Optional keyword arguments passed on to the
BIDSLayout instance's get() method; can be used to constrain
which data are loaded.
Expand Down Expand Up @@ -241,11 +241,11 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None,
if (_data['amplitude'].astype(int) == 1).all() and \
'trial_type' in _data.columns:
msg = ("Column 'amplitude' with constant value 1 "
"is unnecessary in event files; ignoring it.")
"is unnecessary in event files; ignoring it.")
_data = _data.drop('amplitude', axis=1)
else:
msg = ("Column name 'amplitude' is reserved; "
"renaming it to 'amplitude_'.")
"renaming it to 'amplitude_'.")
_data = _data.rename(
columns={'amplitude': 'amplitude_'})
warnings.warn(msg)
Expand Down
2 changes: 1 addition & 1 deletion bids/variables/kollekshuns.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def from_df(cls, data, entities=None, source='contrast'):
Parameters
----------
df : :obj:`pandas.DataFrame`
data : :obj:`pandas.DataFrame`
The DataFrame to convert to a Collection. Each
column will be converted to a SimpleVariable.
entities : :obj:`pandas.DataFrame`
Expand Down
2 changes: 1 addition & 1 deletion bids/variables/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ def merge_variables(variables, name=None, **kwargs):
Notes
-----
- Currently, this function only support homogenously-typed lists. In
- Currently, this function only support homogeneously-typed lists. In
future, it may be extended to support implicit conversion.
- Variables in the list must all share the same name (i.e., it is not
possible to merge two different variables into a single variable.)
Expand Down

0 comments on commit a23fd41

Please sign in to comment.