From b2d2697251eb2f4a7a75d758bfa3b90cb718d768 Mon Sep 17 00:00:00 2001
From: DinoBektesevic <dinob@uw.edu>
Date: Mon, 19 Aug 2024 13:03:59 -0700
Subject: [PATCH] fixup

---
 pyproject.toml                                |   3 +
 src/kbmod/mocking/__init__.py                 |   2 +-
 src/kbmod/mocking/callbacks.py                |  68 ++-
 src/kbmod/mocking/catalogs.py                 | 536 ++++++++++++++----
 src/kbmod/mocking/{fits_data.py => data.py}   | 476 +++++++++++-----
 src/kbmod/mocking/dump_headers.py             | 309 ++++++++++
 src/kbmod/mocking/fits.py                     | 173 +++++-
 src/kbmod/mocking/headers.py                  | 408 +++++++++++--
 .../{data => }/headers_archive.tar.bz2        | Bin
 src/kbmod/mocking/utils.py                    |  30 +-
 tests/dump_headers.py                         | 500 ----------------
 tests/test_end_to_end.py                      |  89 ++-
 tests/test_mocking.py                         |  20 +-
 13 files changed, 1785 insertions(+), 829 deletions(-)
 rename src/kbmod/mocking/{fits_data.py => data.py} (61%)
 create mode 100644 src/kbmod/mocking/dump_headers.py
 rename src/kbmod/mocking/{data => }/headers_archive.tar.bz2 (100%)
 delete mode 100644 tests/dump_headers.py

diff --git a/pyproject.toml b/pyproject.toml
index 97311fe5b..2eee17cf3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,6 +46,9 @@ dependencies = [
     "PyYAML>=6.0"
 ]
 
+[project.scripts]
+archiveheaders = "kbmod.mocking.dump_headers:main"
+
 [project.urls]
 Documentation = "https://epyc.astro.washington.edu/~kbmod/"
 Repository = "https://github.com/dirac-institute/kbmod"
diff --git a/src/kbmod/mocking/__init__.py b/src/kbmod/mocking/__init__.py
index 2d479f17b..1644f4b3a 100644
--- a/src/kbmod/mocking/__init__.py
+++ b/src/kbmod/mocking/__init__.py
@@ -1,6 +1,6 @@
 from . import utils
 from .catalogs import *
 from .headers import *
-from .fits_data import *
+from .data import *
 from .fits import *
 from .callbacks import *
diff --git a/src/kbmod/mocking/callbacks.py b/src/kbmod/mocking/callbacks.py
index 28c1d9397..01c26af34 100644
--- a/src/kbmod/mocking/callbacks.py
+++ b/src/kbmod/mocking/callbacks.py
@@ -3,13 +3,37 @@
 from astropy.time import Time
 import astropy.units as u
 
+
 __all__ = [
     "IncrementObstime",
-    "ObstimeIterator",
+    "ObstimeIterator"
 ]
 
 
 class IncrementObstime:
+    """Endlessly incrementing FITS-standard timestamp.
+
+    Parameters
+    ----------
+    start : `astropy.time.Time`
+        Starting timestamp, or a value from which AstroPy can instantiate one.
+    dt : `float` or `astropy.units.Quantity`
+        Size of time-step to take. Assumed to be in days by default.
+
+    Examples
+    --------
+    >>> from kbmod.mocking import IncrementObstime
+    >>> obst = IncrementObstime("2021-01-01T00:00:00.0000", 1)
+    >>> obst()
+    '2021-01-01T00:00:00.000'
+    >>> obst()
+    '2021-01-02T00:00:00.000'
+    >>> import astropy.units as u
+    >>> obst = IncrementObstime("2021-01-01T00:00:00.0000", 1*u.hour)
+    >>> obst(); obst()
+    '2021-01-01T00:00:00.000'
+    '2021-01-01T01:00:00.000'
+    """
     default_unit = "day"
     def __init__(self, start, dt):
         self.start = Time(start)
@@ -17,26 +41,44 @@ def __init__(self, start, dt):
             dt = dt * getattr(u, self.default_unit)
         self.dt = dt
 
-    def __call__(self, header_val):
+    def __call__(self, header=None):
         curr = self.start
         self.start += self.dt
         return curr.fits
 
 
 class ObstimeIterator:
-    def __init__(self, obstimes, **kwargs):
-        self.obstimes = Time(obstimes, **kwargs)
-        self.generator = (t for t in obstimes)
+    """Iterate through given timestamps.
 
-    def __call__(self, header_val):
-        return Time(next(self.generator)).fits
+    Parameters
+    ----------
+    obstimes : `astropy.time.Time`
+        Starting timestamp, or a value from which AstroPy can instantiate one.
 
+    Raises
+    ------
+    StopIteration
+        When all the obstimes are exhausted.
 
-class DitherValue:
-    def __init__(self, value, dither_range):
-        self.value = value
-        self.dither_range = dither_range
+    Examples
+    --------
+    >>> from astropy.time import Time
+    >>> times = Time(range(60310, 60313, 1), format="mjd")
+    >>> from kbmod.mocking import ObstimeIterator
+    >>> obst = ObstimeIterator(times)
+    >>> obst(); obst(); obst(); obst()
+    '2024-01-01T00:00:00.000'
+    '2024-01-02T00:00:00.000'
+    '2024-01-03T00:00:00.000'
+    Traceback (most recent call last):
+    File "<stdin>", line 1, in <module>
+    File "/local/tmp/kbmod/src/kbmod/mocking/callbacks.py", line 49, in __call__
 
-    def __call__(self, header_val):
-        return self.value + random.uniform(self.dither_range)
+    StopIteration
+    """
+    def __init__(self, obstimes, **kwargs):
+        self.obstimes = Time(obstimes, **kwargs)
+        self.generator = (t for t in obstimes)
 
+    def __call__(self, header=None):
+        return Time(next(self.generator)).fits
diff --git a/src/kbmod/mocking/catalogs.py b/src/kbmod/mocking/catalogs.py
index 3063116d9..356dd97a7 100644
--- a/src/kbmod/mocking/catalogs.py
+++ b/src/kbmod/mocking/catalogs.py
@@ -9,7 +9,6 @@
 
 __all__ = [
     "gen_random_catalog",
-    "CatalogFactory",
     "SimpleCatalog",
     "SourceCatalogConfig",
     "SourceCatalog",
@@ -18,67 +17,299 @@
 ]
 
 
-def gen_random_catalog(n, param_ranges, seed=None):
-    cat = QTable()
-    rng = np.random.default_rng(seed)
 
-    for param_name, (lower, upper) in param_ranges.items():
-        cat[param_name] = rng.uniform(lower, upper, n)
+def expand_gaussian_cols(cat):
+    """Expands columns ``flux`` and ``stddev`` into ``amplitude``, ``x_stddev``
+    and ``y_stddev`` assuming the intended catalog model is a symmetric 2D
+    Gaussian.
 
-    if "x_stddev" not in param_ranges:
+    Amplitude is caluclated as:
+        A  = flux/(2*pi*sigma_x*sigma_y)
+
+    Parameters
+    ----------
+    cat : `astropy.table.Table`
+        A catalog of simplified model parameters.
+
+    Returns
+    ------
+    expanded : `astropy.table.Table`
+        A catalog of AstroPy model parameters.
+    """
+    if "x_stddev" not in cat.columns and "stddev" in cat.columns:
         cat["x_stddev"] = cat["stddev"]
-    if "y_stddev" not in param_ranges:
+    if "y_stddev" not in cat.columns and "stddev" in cat.columns:
         cat["y_stddev"] = cat["stddev"]
 
-    # conversion assumes a gaussian
-    if "flux" in param_ranges and "amplitude" not in param_ranges:
+    if "flux" in cat.columns and "amplitude" not in cat.columns:
         cat["amplitude"] = cat["flux"] / (2.0 * np.pi * xstd * ystd)
 
     return cat
 
 
-class CatalogFactory(abc.ABC):
-    @abc.abstractmethod
-    def mock(self, *args, **kwargs):
-        raise NotImplementedError()
+def gen_random_catalog(n, param_ranges, seed=None, assume_gaussian=True):
+    """Generates a random catalog of parameters of n sources based on
+    AstroPy's 2D models.
+
+    The object parameters are specified as a dict where keys become columns and
+    the values represent the range from which each parameter is uniformly
+    randomly drawn from.
+
+    If parameter ranges contain ``flux``, a column ``amplitude`` will be added
+    which value will be calculated assuming a 2D Gaussian model. If ``stddev``
+    column is preset, then ``x_stddev`` and ``y_stddev`` columns are added,
+    assuming the model intended to be used with the catalog is a symmetrical
+    2D Gaussian.
+
+    Parameters
+    ----------
+    n : `int`
+        Number of objects to create
+    param_ranges : `dict`
+        Dictionary whose keys become columns of the catalog and which values
+        define the ranges from which the catalog values are drawn from.
+    seed : `int`
+        NumPy's random number generator seed.
+    assume_gaussian : `bool`
+        Assume the catalog is intended for use with a 2D Gaussian model and
+        expand ``flux`` and ``stddev`` columns appropriately. See
+        `expand_gaussian_cols`/`
+
+    Returns
+    -------
+    catalog : `astropy.table.Table`
+        Catalog
+
+    Examples
+    --------
+    >>> import kbmod.mocking as kbmock
+    >>> kbmock.gen_random_catalog(3, {"amplitude": [0, 3], "x": [3, 6], "y": [6, 9]}, seed=100)
+    <QTable length=3>
+        amplitude              x                 y
+         float64            float64           float64
+    ------------------ ----------------- -----------------
+    2.504944891506027 3.128854712082634 8.370789493256163
+    1.7896620809036619 5.920963185318643  8.73101814388636
+    0.8665897250736108 4.789415112194066 8.064463342775236
+    """
+    cat = QTable()
+    rng = np.random.default_rng(seed)
+
+    for param_name, (lower, upper) in param_ranges.items():
+        cat[param_name] = rng.uniform(lower, upper, n)
+
+    if assume_gaussian:
+        cat = expand_gaussian_cols(cat)
+
+    return cat
 
 
 class SimpleCatalogConfig(Config):
-    mode = "static"  # folding
-    kind = "pixel"  # world
+    """A simple catalog configuration."""
+
+    mode = "static"
+    """Static, progressive or folding; static catalogs remain the same for every
+    realization, progressive catalogs modify values of realized catalog columns
+    and folding catalogs select rows from a larger catalog to return as a realization.
+    """
+
+    kind = "pixel"
+    """Either ``pixel`` or ``world``. The kind of position coordinates encoded
+    by the catalog. On-sky, world, coordinates require a list of WCSs to be given
+    to the mocking method"""
+
     return_copy = False
+    """For static catalogs, return a reference to the underlying catalog or
+    a copy that can be modified."""
+
     seed = None
-    n = 100
-    param_ranges = {}
+    """Random number generator seed. When `None` a different seed is used for
+    every catalog factory."""
 
+    n = 100
+    """Number of objects to generate."""
 
-class SimpleCatalog(CatalogFactory):
+    param_ranges = {}
+    """Default parameter ranges of the catalog values and columns that will be
+    generated."""
+
+    pix_pos_cols = ["x_mean", "y_mean"]
+    """Which """
+
+    pix_vel_cols = ["vx", "vy"]
+
+    world_pos_cols = ["ra_mean", "dec_mean"]
+
+    world_vel_cols = ["v_ra", "v_dec"]
+
+    fold_col = "obstime"
+
+
+class SimpleCatalog:
+    """Default base class for mocked catalogs factory.
+
+    This base class will always generate empty catalogs and is intended to be
+    inherited from.
+
+    A catalog is an `astropy.table.Table` of model values (position, amplitude
+    etc.) of sources. A catalog factory mock realizations of catalogs. There
+    are 2 ``kind``s of catalogs that can be mocked in 3 different ``modes``.
+
+    In progressive catalogs mode an existing base catalog template column
+    values are incremented, or otherwise updated for each new realization.
+    Static catalogs always return the same realization of a catalog. And the
+    folding catalogs depend on an underlying larger catalog, from which they
+    select which rows to return as a new realization. This is namely most
+    appropriate for catalogs with timestamps, where a different realization of
+    a catalog is returned per timestamp.
+    When the catalog mode is ``folding`` the mocking method expects the values
+    on which to fold on. By default, these are timestamps `t`.
+
+    If the catalog coordinate kind is ``pixel``, then the positions are
+    interpreted as pixel coordiantes. If the kind of catalog coordinates are
+    ``world`` then the positions are interpreted as on-sky coordinates in
+    decimal degrees and a list of WCSs is expected to be provided to the mocking
+    method.
+
+    Parameters
+    ----------
+    config : `SimpleCatalogConfig`
+        Factory configuration.
+    table : `astropy.table.Table`
+        The catalog template from which new realizations will be generated.
+
+    Attributes
+    ----------
+    config : `SimpleCatalogConfig`
+       The instance-bound configuration of the factory
+    table : `astropy.table.Table`
+       The template catalog used to create new realizations.
+    _realization : `astropy.table.Table`
+       The last realization of a catalog.
+    current : `int`
+       The current iterator counter.
+
+    Examples
+    --------
+    Directly instantiate a simple static catalog:
+
+    >>> import kbmod.mocking as kbmock
+    >>> table = kbmock.gen_random_catalog(3, {"A": [0, 1], "x": [1, 2], "y": [2, 3]}, seed=100)
+    >>> f = kbmock.SimpleCatalog(table)
+    >>> f.mock()
+    [<QTable length=3>
+            A                  x                  y
+         float64            float64            float64
+    ------------------ ------------------ ------------------
+    0.8349816305020089 1.0429515706942114 2.7902631644187212
+    0.5965540269678873 1.9736543951062142 2.9103393812954526
+    0.2888632416912036 1.5964717040646885  2.688154447591745]
+
+    Instantiating from factory methods will derive additional information
+    regarding the catalog contents:
+
+    >>> f2 = kbmock.SimpleCatalog.from_table(table)
+    >>> f2.mock()
+    [<QTable length=3>
+            A                  x                  y
+         float64            float64            float64
+    ------------------ ------------------ ------------------
+    0.8349816305020089 1.0429515706942114 2.7902631644187212
+    0.5965540269678873 1.9736543951062142 2.9103393812954526
+    0.2888632416912036 1.5964717040646885  2.688154447591745]
+
+    >>> f2.config["param_ranges"]
+    {'A': (0.2888632416912036, 0.8349816305020089), 'x': (1.0429515706942114, 1.9736543951062142), 'y': (2.688154447591745, 2.9103393812954526)}
+    >>> f.config["param_ranges"]
+    {}
+
+    Folding catalogs just return subsets of the template catalog:
+
+    >>> table["obstime"] = [1, 1, 2]
+    >>> f = kbmock.SimpleCatalog(table, mode="folding")
+    >>> f.mock(t=[1, 2])
+    [<QTable length=2>
+            A                  x                  y          obstime
+         float64            float64            float64        int64
+    ------------------ ------------------ ------------------ -------
+    0.8349816305020089 1.0429515706942114 2.7902631644187212       1
+    0.5965540269678873 1.9736543951062142 2.9103393812954526       1, <QTable length=1>
+            A                  x                  y         obstime
+         float64            float64            float64       int64
+    ------------------ ------------------ ----------------- -------
+    0.2888632416912036 1.5964717040646885 2.688154447591745       2]
+
+    And progressive catalogs increment selected column values (note the velocities
+    were assigned the default expected column names but positions weren't):
+
+    >>> table["vx"] = [1, 1, 1]
+    >>> table["vy"] = [10, 10, 10]
+    >>> f = kbmock.SimpleCatalog(table, mode="progressive", pix_pos_cols=["x", "y"])
+    >>> _ = f.mock(dt=1); f.mock(dt=1)
+    [<QTable length=3>
+            A                  x                  y             vx      vy
+         float64            float64            float64       float64 float64
+    ------------------ ------------------ ------------------ ------- -------
+    0.8349816305020089 2.0429515706942114 12.790263164418722     1.0    10.0
+    0.5965540269678873  2.973654395106214 12.910339381295453     1.0    10.0
+    0.2888632416912036 2.5964717040646885 12.688154447591746     1.0    10.0]
+    """
     default_config = SimpleCatalogConfig
 
-    def __init__(self, config, table, **kwargs):
+    def __init__(self, table, config=None, **kwargs):
         self.config = self.default_config(config=config, **kwargs)
         self.table = table
         self.current = 0
-
-    @classmethod
-    def from_config(cls, config, **kwargs):
-        config = cls.default_config(config=config, **kwargs)
-        table = gen_random_catalog(config["n"], config["param_ranges"], config["seed"])
-        return cls(config, table)
+        self._realization = self.table.copy()
+        self.mode = self.config["mode"]
+        self.kind = self.config["kind"]
 
     @classmethod
     def from_defaults(cls, param_ranges=None, **kwargs):
+        """Create a catalog factory using its default config.
+
+        Parameters
+        ----------
+        param_ranges : `dict`
+            Default parameter ranges of the catalog values and columns that will be
+            generated. See `gen_random_catalog`.
+        kwargs : `dict`
+            Any additional keyword arguments will be used to supplement or override
+            any matching default configuration parameters.
+
+        Returns
+        -------
+        factory : `SimpleCatalog`
+            Simple catalog factory.
+        """
         config = cls.default_config(**kwargs)
         if param_ranges is not None:
             config["param_ranges"].update(param_ranges)
-        return cls.from_config(config)
+        table = gen_random_catalog(config["n"], config["param_ranges"], config["seed"])
+        return cls(table=table, config=config)
 
     @classmethod
     def from_table(cls, table, **kwargs):
-        if "x_stddev" not in table.columns:
-            table["x_stddev"] = table["stddev"]
-        if "y_stddev" not in table.columns:
-            table["y_stddev"] = table["stddev"]
+        """Create a factory from a table template, deriving parameters, their
+        value ranges and number of objects from the table.
+
+        Optionally expands the given table columns assuming the intended source
+        model is a 2D Gaussian.
+
+        Parameters
+        ----------
+        table : `astropy.table.Table`
+            Catalog template.
+        kwargs : `dict`
+            Any additional keyword arguments will be used to supplement or override
+            any matching default configuration parameters.
+
+        Returns
+        -------
+        factory : `SimpleCatalog`
+            Simple catalog factory.
+        """
+        table = expand_gaussian_cols(table)
 
         config = cls.default_config(**kwargs)
         config["n"] = len(table)
@@ -86,65 +317,21 @@ def from_table(cls, table, **kwargs):
         for col in table.keys():
             params[col] = (table[col].min(), table[col].max())
         config["param_ranges"] = params
-        return cls(config, table)
-
-    def mock(self):
-        self.current += 1
-        if self.config["return_copy:"]:
-            return self.table.copy()
-        return self.table
-
-
-class SourceCatalogConfig(SimpleCatalogConfig):
-    param_ranges = {
-        "amplitude": [1.0, 10.0],
-        "x_mean": [0.0, 4096.0],
-        "y_mean": [0.0, 2048.0],
-        "x_stddev": [1.0, 3.0],
-        "y_stddev": [1.0, 3.0],
-        "theta": [0.0, np.pi],
-    }
-
-
-class SourceCatalog(SimpleCatalog):
-    default_config = SourceCatalogConfig
-
-
-class ObjectCatalogConfig(SimpleCatalogConfig):
-    mode = "progressive"  # folding
-    param_ranges = {
-        "amplitude": [0.1, 3.0],
-        "x_mean": [0.0, 4096.0],
-        "y_mean": [0.0, 2048.0],
-        "vx": [500.0, 1000.0],
-        "vy": [500.0, 1000.0],
-        "stddev": [0.25, 1.5],
-        "theta": [0.0, np.pi],
-    }
-
-
-class ObjectCatalog(SimpleCatalog):
-    default_config = ObjectCatalogConfig
-
-    def __init__(self, config, table, **kwargs):
-        # Obj cat always has to return a copy
-        kwargs["return_copy"] = True
-        super().__init__(config, table, **kwargs)
-        self._realization = self.table.copy()
-        self.mode = self.config["mode"]
+        return cls(table=table, config=config)
 
     @property
     def mode(self):
+        """Catalog mode, ``static``, ``folding`` or ``progressive``."""
         return self._mode
 
     @mode.setter
     def mode(self, val):
         if val == "folding":
             self._gen_realization = self.fold
-        elif val == "progressive" and self.config["kind"] == "pixel":
-            self._gen_realization = self.next_pixel
-        elif val == "progressive" and self.config["kind"] == "world":
-            self._gen_realization = self.next_world
+            self.config["return_copy"] = True
+        elif val == "progressive":
+            self._gen_realization = self.next
+            self.config["return_copy"] = True
         elif val == "static":
             self._gen_realization = self.static
         else:
@@ -154,32 +341,100 @@ def mode(self, val):
             )
         self._mode = val
 
+    @property
+    def kind(self):
+        """Catalog coordinate kind, ``pixel`` or ``world``"""
+        return self._kind
+
+    @mode.setter
+    def kind(self, val):
+        if val == "pixel":
+            self._cat_keys = self.config["pix_pos_cols"] + self.config["pix_vel_cols"]
+        elif val == "world":
+            self._cat_keys = self.config["world_pos_cols"] + self.config["world_vel_cols"]
+        else:
+            raise ValueError(
+                "Unrecognized coordinate kind. Expected 'world' or 'pixel, got"
+                f"{val} instead."
+            )
+        self._kind = val
+
     def reset(self):
+        """Reset the iteration counter reset the realization to the initial one."""
         self.current = 0
         self._realization = self.table.copy()
 
     def static(self, **kwargs):
-        return self.table.copy()
+        """Return the initial template as a catalog realization.
 
-    def _next(self, dt, keys):
-        a, va, b, vb = keys
+        Returns
+        -------
+        catalog : `astropy.table.Table`
+            Catalog realization.
+        """
+        self.current += 1
+        if self.config["return_copy"]:
+            return self.table.copy()
+        return self.table
+
+    def next(self, dt):
+        """Return the next catalog realization by incrementing the position
+        columns by the value of the velocity column and number of current `dt` steps.
+
+        Parameters
+        ----------
+        dt : `float`
+            Time increment of each step.
+
+        Returns
+        -------
+        catalog : `astropy.table.Table`
+            Catalog realization.
+        """
+        a, b, va, vb = self._cat_keys
         self._realization[a] = self.table[a] + self.current * self.table[va] * dt
         self._realization[b] = self.table[b] + self.current * self.table[vb] * dt
         self.current += 1
         return self._realization.copy()
 
-    def next_world(self, dt):
-        return self._next(dt, ["ra_mean", "v_ra", "dec_mean", "v_dec"])
-
-    def next_pixel(self, dt):
-        return self._next(dt, ["x_mean", "vx", "y_mean", "vy"])
-
     def fold(self, t, **kwargs):
-        self._realization = self.table[self.table["obstime"] == t]
+        """Return the next catalog realization by selecting those rows that
+        match the given parameter ``t``. By default the folding column is
+        ``obstime``.
+
+        Parameters
+        ----------
+        t : `float`
+            Value which to select from template catalog.
+
+        Returns
+        -------
+        catalog : `astropy.table.Table`
+            Catalog realization.
+        """
+        self._realization = self.table[self.table[self.config["fold_col"]] == t]
         self.current += 1
         return self._realization.copy()
 
     def mock(self, n=1, dt=None, t=None, wcs=None):
+        """Return the next realization(s) of the catalogs.
+
+        Selects the appropriate mocking function. Ignores keywords not
+        appropriate for use given some catalog generation method and coordinate
+        kind.
+
+        Parameters
+        ----------
+        n : `int`, optional
+           Number of catalogs to mock. Default 1.
+        dt : `float`, optional.
+           Timestep between each step (arbitrary units)
+        t : `list[float]` or `list[astropy.time.Time]`, optional
+           Values on which to fold the template catalog.
+        wcs : `list[astropy.wcs.WCS]`, optional
+           WCS to use in conversion of on-sky coordinates to pixel coordinates,
+           for each realization.
+        """
         data = []
 
         if self.mode == "folding":
@@ -187,12 +442,101 @@ def mock(self, n=1, dt=None, t=None, wcs=None):
                 data.append(self.fold(t=ts))
         else:
             for i in range(n):
-                data.append(self._gen_realization(dt))
+                data.append(self._gen_realization(dt=dt))
 
-        if self.config["kind"] == "world":
+        if self.kind == "world":
+            racol, deccol = self.config["world_pos_cols"]
+            xpixcol, ypixcol = self.config["pix_pos_cols"]
             for cat, w in zip(data, wcs):
-                x, y = w.world_to_pixel(SkyCoord(ra=cat["ra_mean"], dec=cat["dec_mean"], unit="deg"))
-                cat["x_mean"] = x
-                cat["y_mean"] = y
+                x, y = w.world_to_pixel(SkyCoord(ra=cat[racol], dec=cat[deccol], unit="deg"))
+                cat[xpixcol] = x
+                cat[ypixcol] = y
 
         return data
+
+
+class SourceCatalogConfig(SimpleCatalogConfig):
+    """Source catalog config.
+
+    Assumes sources are static, asymmetric 2D Gaussians.
+
+    Parameter ranges
+    ----------------
+    amplitude : [1, 10]
+        Amplitude of the model.
+    x_mean : [0, 4096]
+        Real valued x coordinate of the object's centroid.
+    y_mean : [0, 2048]
+        Real valued y coordinate of the object's centroid.
+    x_stddev : [1, 3]
+        Real valued standard deviation of the model distribution, in x.
+    y_stddev : [1, 3]
+        Real valued standard deviation of the model distribution, in y.
+    theta : `[0, np.pi]`
+        Rotation of the model's covariance matrix, increases counterclockwise.
+        In radians.
+    """
+    param_ranges = {
+        "amplitude": [1.0, 10.0],
+        "x_mean": [0.0, 4096.0],
+        "y_mean": [0.0, 2048.0],
+        "x_stddev": [1.0, 3.0],
+        "y_stddev": [1.0, 3.0],
+        "theta": [0.0, np.pi],
+    }
+
+
+class SourceCatalog(SimpleCatalog):
+    """A static catalog representing stars and galaxies.
+
+    Coordinates defined in pixel space.
+    """
+    default_config = SourceCatalogConfig
+
+
+class ObjectCatalogConfig(SimpleCatalogConfig):
+    """Object catalog config.
+
+    Assumes objects are symmetric 2D Gaussians moving in a linear fashion.
+
+    Parameter ranges
+    ----------------
+    amplitude : [1, 10]
+        Amplitude of the model.
+    x_mean : [0, 4096]
+        Real valued x coordinate of the object's centroid.
+    y_mean : [0, 2048]
+        Real valued y coordinate of the object's centroid.
+    x_stddev : [1, 3]
+        Real valued standard deviation of the model distribution, in x.
+    y_stddev : [1, 3]
+        Real valued standard deviation of the model distribution, in y.
+    theta : `[0, np.pi]`
+        Rotation of the model's covariance matrix, increases counterclockwise.
+        In radians.
+    """
+    mode = "progressive"
+    param_ranges = {
+        "amplitude": [0.1, 3.0],
+        "x_mean": [0.0, 4096.0],
+        "y_mean": [0.0, 2048.0],
+        "vx": [500.0, 1000.0],
+        "vy": [500.0, 1000.0],
+        "stddev": [0.25, 1.5],
+        "theta": [0.0, np.pi],
+    }
+
+
+class ObjectCatalog(SimpleCatalog):
+    """A catalog of moving objects.
+
+    Assumed to be symmetric 2D Gaussians whose centroids are defined in pixel
+    space and moving in linear fashion with velocity also defined in pixel space.
+    The units are relative to the timestep.
+    """
+    default_config = ObjectCatalogConfig
+
+    def __init__(self, table, **kwargs):
+        # Obj cat always has to return a copy
+        kwargs["return_copy"] = True
+        super().__init__(table=table, **kwargs)
diff --git a/src/kbmod/mocking/fits_data.py b/src/kbmod/mocking/data.py
similarity index 61%
rename from src/kbmod/mocking/fits_data.py
rename to src/kbmod/mocking/data.py
index 669c26917..cc27201fd 100644
--- a/src/kbmod/mocking/fits_data.py
+++ b/src/kbmod/mocking/data.py
@@ -8,13 +8,10 @@
 
 __all__ = [
     "add_model_objects",
-    "DataFactoryConfig",
     "DataFactory",
-    "SimpleImageConfig",
     "SimpleImage",
-    "SimpleMaskConfig",
     "SimpleMask",
-    "SimulatedImageConfig",
+    "SimpleVariance",
     "SimulatedImage",
 ]
 
@@ -32,8 +29,7 @@ def add_model_objects(img, catalog, model):
     catalog : `astropy.table.QTable`
         Table of objects, a catalog
     model : `astropy.modelling.Model`
-        Astropy's model of the surface brightness of an source. Must contain
-        at least ``x_mean`` and ``y_mean``.
+        Astropy's model of the surface brightness of an source.
 
     Returns
     -------
@@ -59,11 +55,12 @@ def add_model_objects(img, catalog, model):
         for i, source in enumerate(catalog):
             for param in params_to_set:
                 setattr(model, param, source[param])
-
-            if all(
-                    [model.x_mean > 0, model.x_mean < img.shape[1], model.y_mean > 0, model.y_mean < img.shape[0]]
-            ):
-                model.render(img)
+            model.render(img)
+    except ValueError as e:
+        # ignore rendering models larger than the image
+        message = "The `bounding_box` is larger than the input out in one or more dimensions."
+        if message in str(e):
+            pass
     finally:
         for param, value in init_params.items():
             setattr(model, param, value)
@@ -77,19 +74,26 @@ class DataFactoryConfig(Config):
     """
 
     default_img_shape = (5, 5)
+    """Default image size, used if mocking ImageHDU or CompImageHDUs."""
 
     default_img_bit_width = 32
+    """Default image data type is float32; the value of BITPIX flag in headers.
+    See bitpix_type_map for other codes.
+    """
 
     default_tbl_length = 5
+    """Default table length, used if mocking BinTableHDU or TableHDU HDUs."""
 
     default_tbl_dtype = np.dtype([("a", int), ("b", int)])
+    """Default table dtype, used when mocking table-HDUs that do not contain
+    a description of table layout.
+    """
 
     writeable = False
     """Sets the base array ``writeable`` flag. Default `False`."""
 
     return_copy = False
-    """
-    When `True`, the `DataFactory.mock` returns a copy of the final object,
+    """When `True`, the `DataFactory.mock` returns a copy of the final object,
     otherwise the original (possibly mutable!) object is returned. Default `False`.
     """
 
@@ -110,35 +114,31 @@ class DataFactoryConfig(Config):
 
 
 class DataFactory:
-    """Generic data factory.
-
-    Data can consists of tabular or array-like data of a single or compound
-    types. Generally the process of creation of a mocked data is relatively
-    simple:
-    1) pre-generate whatever shared common base data is shared by all mocked
-       data
-    2) mock one or many instances of dynamic portions of the data and paste
-       them onto the base static data.
-
-    The base data can be writable or non-writable to prevent accidentally
-    mutating its value. Some data factories consist of only the simple base
-    data. In the case it is desired that the mocked data is further modified
-    it is possible to return a copy of that base data, so that the base data is
-    not mutated. By default it's assumed that all mocked data are allowed to
-    share a single, non-mutable, base data.
-
-    This saves memory and improves performance as, for example, there is no
-    need to mock and allocate N (dimx, dimy) arrays for a shared static masking
-    array shared by all associated images. Therefore the default implementation
-    of mocking is to just continuously return the given base data.
-
-    For dynamically created data, the data may depend on the type of the
-    Header Data Unit (HDU), or dynamically changing header data. Data generated
-    in these scenarios behaves as ``return_copy`` is always `True` regardless
-    of the configuration value.
+    """Generic data factory that can mock table and image HDUs from default
+    settings or given header definitions.
+
+    Given a template, this factory repeats it for each mock.
+    A reference to the base template is returned whenever possible for
+    performance reasons. To prevent accidental mutation of the shared
+    array, the default behavior is that the returned data is not writable.
+
+    A base template value of `None` is accepted as valid to satisfy FITS
+    factory use-case of generating HDUList stubs containing only headers.
+
+    Primary purpose of this factory is to derive the template data given a
+    table, HDU or a Header object. When the base has no data, but just a
+    description of one, such as Headers, the default is to return "zeros"
+    for that datatype. This can be a zero length string, literal integer
+    zero, a float zero etc...
 
     Attributes
     ----------
+    base : `np.array`, `np.recarray` or `None`
+        Base data template.
+    shape : `tuple`
+        Shape of base array when it exists.
+    dtype : `type`
+        Numpy type of the base array, when it exists.
     counter : `int`
         Data factory tracks an internal counter of generated objects that can
         be used as a ticker for generating new data.
@@ -147,11 +147,31 @@ class DataFactory:
     ----------
     base : `np.array`
         Static data shared by all mocked instances.
-    config : `DataFactoryConfig`
-        Configuration of the data factory.
-    **kwargs :
+    kwargs :
         Additional keyword arguments are applied as configuration
         overrides.
+
+    Examples
+    --------
+    >>> from astropy.io.fits import Header, CompImageHDU, BinTableHDU
+    >>> import kbmod.mocking as kbmock
+    >>> import numpy as np
+    >>> base = np.zeros((2, 2))
+    >>> hdu = CompImageHDU(base)
+    >>> kbmock.DataFactory.from_hdu(hdu).mock()
+    array([[[0., 0.],
+            [0., 0.]]])
+    >>> kbmock.DataFactory.from_header("image", hdu.header).mock()
+    array([[[0., 0.],
+            [0., 0.]]])
+    >>> base = np.array([("test1", 10), ("test2", 11)], dtype=[("col1", "U5"), ("col2", int)])
+    >>> hdu = BinTableHDU(base)
+    >>> kbmock.DataFactory.from_hdu(hdu).mock()
+    array([[(b'test1', 10), (b'test2', 11)]],
+          dtype=(numpy.record, [('col1', 'S5'), ('col2', '<i8')]))
+    >>> kbmock.DataFactory.from_header("table", hdu.header).mock()
+    array([[(b'', 0), (b'', 0)]],
+          dtype=(numpy.record, [('col1', 'S5'), ('col2', '>i8')]))
     """
 
     default_config = DataFactoryConfig
@@ -161,42 +181,79 @@ def __init__(self, base, **kwargs):
         self.config = self.default_config(**kwargs)
 
         self.base = base
-        if base is None:
-            self.shape = None
-            self.dtype = None
-        else:
+        if self.base is not None:
             self.shape = base.shape
             self.dtype = base.dtype
             self.base.flags.writeable = self.config["writeable"]
-            self.counter = 0
+        self.counter = 0
 
     @classmethod
-    def gen_image(cls, metadata=None, **kwargs):
+    def gen_image(cls, header=None, **kwargs):
+        """Generate an image from a complete or partial header and config.
+
+        If a header is given, it trumps the default config values. When the
+        header is not complete, config values are used. Config overrides are
+        applied before the data description is evaluated.
+
+        Parameters
+        ----------
+        header : `None`, `Header` or dict-like, optional
+            Header, or dict-like object, containing the image-data descriptors.
+        kwargs :
+            Any additional keyword arguments are applied as config overrides.
+
+        Returns
+        -------
+        image : `np.array`
+            Image
+        """
         conf = cls.default_config(**kwargs)
-        cols = metadata.get("NAXIS1", conf.default_img_shape[0])
-        rows = metadata.get("NAXIS2", conf.default_img_shape[1])
-        bitwidth = metadata.get("BITPIX", conf.default_img_bit_width)
+        metadata = {} if header is None else header
+        cols = metadata.get("NAXIS1", conf["default_img_shape"][0])
+        rows = metadata.get("NAXIS2", conf["default_img_shape"][1])
+        bitwidth = metadata.get("BITPIX", conf["default_img_bit_width"])
         dtype = conf.bitpix_type_map[bitwidth]
         shape = (cols, rows)
-
         return np.zeros(shape, dtype)
 
     @classmethod
-    def gen_table(cls, metadata=None, config=None, **kwargs):
-        conf = cls.default_config(config, **kwargs, method="subset")
-
-        # FITS format standards prescribe FORTRAN-77-like input format strings
-        # for different types, but the base set has been extended significantly
-        # since and Rubin uses completely non-standard keys with support for
-        # their own internal abstractions like 'Angle' objects:
-        # https://archive.stsci.edu/fits/fits_standard/node58.html
-        # https://docs.astropy.org/en/stable/io/fits/usage/table.html#column-creation
+    def gen_table(cls, metadata=None, **kwargs):
+        """Generate an table from a complete or partial header and config.
+
+        If a header is given, it trumps the default config values. When the
+        header is not complete, config values are used. Config overrides are
+        applied before the data description is evaluated.
+
+        Parameters
+        ----------
+        header : `None`, `Header` or dict-like, optional
+            Header, or dict-like object, containing the image-data descriptors.
+        kwargs :
+            Any additional keyword arguments are applied as config overrides.
+
+        Returns
+        -------
+        table : `np.array`
+            Table, a structured array.
+
+        Notes
+        -----
+        FITS format standards prescribe FORTRAN-77-like input format strings
+        for different data types, but the base set has been extended and/or
+        altered significantly by various pipelines to support their objects
+        internal to their pipelines. Constructing objects, or values, described
+        by non-standard strings will result in a failure. For a list of supported
+        column-types see:
+        https://docs.astropy.org/en/stable/io/fits/usage/table.html#column-creation
+        """
+        conf = cls.default_config(**kwargs)
+
         # https://github.com/lsst/afw/blob/main/src/fits.cc#L207
         # So we really don't have much of a choice but to force a default
         # AstroPy HDU and then call the update. This might not preserve the
         # header or the data formats exactly and if metadata isn't given
         # could even assume a wrong class all together. The TableHDU is
-        # almost never used however - so hopefully this keeps on working.
+        # almost never used by us however - so hopefully this keeps on working.
         table_cls = BinTableHDU
         data = None
         if metadata is not None:
@@ -219,28 +276,82 @@ def gen_table(cls, metadata=None, config=None, **kwargs):
         return data
 
     @classmethod
-    def from_hdu(cls, hdu, config=None, **kwargs):
+    def from_hdu(cls, hdu, **kwargs):
+        """Create the factory from an HDU with or without data and with or
+        without a complete Header.
+
+        If the given HDU has data, it is preferred over creating a zero-array
+        based on the header. If the header is not complete, config defaults are
+        used. Config overrides are applied beforehand.
+
+        Parameters
+        ----------
+        hdu : `HDU`
+            One of AstroPy's Header Data Unit classes.
+        kwargs :
+            Config overrides.
+
+        Returns
+        -------
+        data : `np.array`
+            Data array, an ndarray or a recarray depending on the HDU.
+        """
         if isinstance(hdu, (PrimaryHDU, CompImageHDU, ImageHDU)):
-            return cls(base=cls.gen_image(hdu), config=config, **kwargs)
+            base = hdu.data if hdu.data is not None else cls.gen_image(hdu.header)
+            return cls(base=base, **kwargs)
         elif isinstance(hdu, (TableHDU, BinTableHDU)):
-            return cls(base=cls.gen_table(hdu), config=config, **kwargs)
+            base = hdu.data if hdu.data is not None else cls.gen_table(hdu.header)
+            return cls(base=base, **kwargs)
         else:
             raise TypeError(f"Expected an HDU, got {type(hdu)} instead.")
 
     @classmethod
-    def from_header(cls, kind, header, config=None, **kwargs):
+    def from_header(cls, header, kind=None, **kwargs):
+        """Create the factory from an complete or partial Header.
+
+        Provide the ``kind`` of data the header represents in situations where
+        the Header does not have an well defined ``XTENSION`` card.
+
+        Parameters
+        ----------
+        header : `astropy.io.fits.Header`
+            Header
+        kind : `str` or `None`, optional
+            Kind of data the header is representing.
+        kwargs :
+            Config overrides.
+
+        Returns
+        -------
+        data : `np.array`
+            Data array, an ndarray or a recarray depending on the Header and kind.
+        """
+        hkind = header.get("XTENSION", False)
+        if hkind and "table" in hkind.lower():
+            kind = "table"
+        elif hkind and "image" in hkind.lower():
+            kind = "image"
+        elif kind is None:
+            raise ValueError("Must provide a header with XTENSION or ``kind``")
+        else:
+            # kind was defined as keyword arg, so all is right
+            pass
+
         if kind.lower() == "image":
-            return cls(base=cls.gen_image(header), config=config, **kwargs)
+            return cls(base=cls.gen_image(header), **kwargs)
         elif kind.lower() == "table":
-            return cls(base=cls.gen_table(header), config=config, **kwargs)
+            return cls(base=cls.gen_table(header), **kwargs)
         else:
             raise TypeError(f"Expected an 'image' or 'table', got {kind} instead.")
 
-    @classmethod
-    def zeros(cls, shape, dtype, config=None, **kwargs):
-        return cls(np.zeros(shape, dtype), config, **kwargs)
+    def mock(self, n=1):
+        """Mock one or multiple data arrays.
 
-    def mock(self, n=1, **kwargs):
+        Parameters
+        ----------
+        n : `int`
+            Number of data to mock.
+        """
         if self.base is None:
             raise ValueError(
                 "Expected a DataFactory that has a base, but none was set. "
@@ -284,19 +395,40 @@ class SimpleVariance(DataFactory):
     **kwargs :
         Additional keyword arguments are applied as config
         overrides.
+
+    Examples
+    --------
+    >>> import kbmod.mocking as kbmock
+    >>> si = kbmock.SimpleImage(shape=(3, 3), add_noise=True, seed=100)
+    >>> sv = kbmock.SimpleVariance(gain=10)
+    >>> imgs = si.mock()
+    >>> imgs
+    array([[[ 8.694266,  9.225379, 10.046582],
+            [ 8.768851, 10.201585,  8.870326],
+            [10.702058,  9.910087,  9.283925]]], dtype=float32)
+    >>> sv.mock(imgs)
+    array([[[0.8694266 , 0.9225379 , 1.0046582 ],
+            [0.8768851 , 1.0201585 , 0.8870326 ],
+            [1.0702058 , 0.99100864, 0.9283925 ]]], dtype=float32)
     """
 
     default_config = SimpleVarianceConfig
 
     def __init__(self, image=None, **kwargs):
-        # skip setting the base here since the real base is
-        # derived from given image we just set it manually
-        super().__init__(base=None, **kwargs)
-
+        super().__init__(base=image, **kwargs)
         if image is not None:
             self.base = image / self.config["gain"] + self.config["read_noise"]**2
 
     def mock(self, images=None):
+        """Mock one or multiple data arrays.
+
+        Parameters
+        ----------
+        images : `list[np.array]`, optional
+            List, or otherwise a collection, of images from which the variances
+            will be generated. When not provided, and base template was
+            defined, returns the base template.
+        """
         if images is None:
             return self.base
         return images / self.config["gain"] + self.config["read_noise"]**2
@@ -306,13 +438,29 @@ class SimpleMaskConfig(DataFactoryConfig):
     """Simple mask configuration."""
 
     dtype = np.float32
+    """Data type"""
 
     threshold = 1e-05
+    """Default pixel value threshold above which every pixel in the template
+    will be masked.
+    """
 
     shape = (5, 5)
+    """Default image shape."""
+
     padding = 0
+    """Number of pixels near the edge that are masked."""
+
     bad_columns = []
+    """List of columns marked as bad."""
+
     patches = []
+    """Default patches to mask. This is a list of tuples. Each tuple consists of
+    a patch and a value. The patch can be any combination of array coordinates
+    such as ``(int, int)`` for individual pixels, ``(slice, int)`` or
+    ``(int, slice)`` for columns and rows respectively  or ``(slice, slice)``
+    for areas. See `SimpleMask.from_params` for an example.
+    """
 
 
 class SimpleMask(DataFactory):
@@ -326,44 +474,57 @@ class SimpleMask(DataFactory):
     ----------
     mask : `np.array`
         Bitmask array.
+    kwargs :
+        Config overrides.
+
+    Examples
+    --------
+    >>> import kbmod.mocking as kbmock
+    >>> si = kbmock.SimpleImage(shape=(3, 3), add_noise=True, seed=100)
+    >>> imgs = si.mock()
+    >>> imgs
+    array([[[ 8.694266,  9.225379, 10.046582],
+            [ 8.768851, 10.201585,  8.870326],
+            [10.702058,  9.910087,  9.283925]]], dtype=float32)
+    >>> sm = kbmock.SimpleMask.from_image(imgs, threshold=9)
+    >>> sm.base
+    array([[[0., 1., 1.],
+            [0., 1., 0.],
+            [1., 1., 1.]]], dtype=float32)
     """
 
     default_config = SimpleMaskConfig
+    """Default configuration."""
 
     def __init__(self, mask, **kwargs):
         super().__init__(base=mask, **kwargs)
 
     @classmethod
     def from_image(cls, image, **kwargs):
+        """Create a factory instance out of an image, masking all pixels above
+        a threshold.
+
+        Parameters
+        ----------
+        image : `np.array`
+            Template image from which a mask is created.
+        kwargs :
+            Config overrides.
+        """
         config = cls.default_config(**kwargs)
         mask = image.copy()
         mask[image > config["threshold"]] = 1
+        mask[image <= config["threshold"]] = 0
         return cls(mask)
 
     @classmethod
     def from_params(cls, **kwargs):
-        """Create a mask by adding a padding around the edges of the array with
-        the given dimensions and mask out bad columns.
+        """Create a factory instance from config parameters.
 
         Parameters
         ----------
-        shape : `tuple`
-            Tuple of (width, height)/(cols, rows) dimensions of the mask.
-        padding : `int`
-            Number of pixels near the edge that will be masked.
-        bad_columns : `list[int]`
-            Indices of bad columns to mask
-        patches : `list[tuple]`
-            Patches to mask. This is a list of tuples. Each tuple consists of
-            a patch and a value. The patch can be any combination of array
-            coordinates such as ``(int, int)`` for individual pixels,
-           ``(slice, int)`` or ``(int, slice)`` for columns and rows
-           respectively  or ``(slice, slice)`` for areas.
-
-        Returns
-        -------
-        mask_factory : `SimpleMask`
-            Mask factory.
+        kwargs :
+            Config overrides.
 
         Examples
         --------
@@ -433,9 +594,12 @@ class SimpleImageConfig(DataFactoryConfig):
     noise_std = 1.0
     """Standard deviation of the Gaussian distribution of the noise."""
 
-    model = models.Gaussian2D
+    model = models.Gaussian2D(x_stddev=1, y_stddev=1)
     """Source and object model used to render them on the image."""
 
+    dtype = np.float32
+    """Numpy data type."""
+
 
 class SimpleImage(DataFactory):
     """Simple image data factory.
@@ -445,31 +609,46 @@ class SimpleImage(DataFactory):
     image.
 
     Noise realization is drawn from a Gaussian distribution with the given
-    standard deviation and centered on the given mean.
+    standard deviation and mean.
 
     Parameters
     ----------
     image : `np.array`
         Science image that will be used as a base onto which to render details.
-    config : `SimpleImageConfig`
-        Configuration.
     src_cat : `CatalogFactory`
         Static source catalog.
     obj_cat : `CatalogFactory`
         Moving object catalog factory.
-    **kwargs :
-        Additional keyword arguments are applied as config
+    kwargs :
+        Additional keyword arguments are applied as config.
         overrides.
-    """
 
+    Examples
+    --------
+    >>> import kbmod.mocking as kbmock
+    >>> si = kbmock.SimpleImage()
+    >>> si.mock()
+    array([[[0., 0., 0., ..., 0., 0., 0.],
+            [0., 0., 0., ..., 0., 0., 0.],
+            [0., 0., 0., ..., 0., 0., 0.],
+            ...,
+            [0., 0., 0., ..., 0., 0., 0.],
+            [0., 0., 0., ..., 0., 0., 0.],
+            [0., 0., 0., ..., 0., 0., 0.]]], dtype=float32)
+    >>> si = kbmock.SimpleImage(shape=(3, 3), add_noise=True, seed=100)
+    >>> si.mock()
+    array([[[ 8.694266,  9.225379, 10.046582],
+            [ 8.768851, 10.201585,  8.870326],
+            [10.702058,  9.910087,  9.283925]]], dtype=float32)
+    """
     default_config = SimpleImageConfig
+    """Default configuration."""
 
-    def __init__(self, image=None, src_cat=None, obj_cat=None,
-                 dtype=np.float32, **kwargs):
+    def __init__(self, image=None, src_cat=None, obj_cat=None, **kwargs):
         super().__init__(image, **kwargs)
 
         if image is None:
-            image = np.zeros(self.config["shape"], dtype=dtype)
+            image = np.zeros(self.config["shape"], dtype=self.config["dtype"])
         else:
             image = image
             self.config["shape"] = image.shape
@@ -480,7 +659,7 @@ def __init__(self, image=None, src_cat=None, obj_cat=None,
         self.src_cat = src_cat
         if self.src_cat is not None:
             image = image if image.flags.writeable else image.copy()
-            add_model_objects(image, src_cat.table, self.config["model"](x_stddev=1, y_stddev=1))
+            add_model_objects(image, src_cat.table, self.config["model"])
             image.flags.writeable = self.config["writeable"]
 
         self.base = image
@@ -496,6 +675,11 @@ def add_noise(cls, images, config):
            A ``(n_images, image_width, image_height)`` shaped array of images.
         config : `SimpleImageConfig`
            Configuration.
+
+        Returns
+        -------
+        images : `np.array`
+           A ``(n_images, image_width, image_height)`` shaped array of images.
         """
         rng = np.random.default_rng(seed=config["seed"])
         shape = images.shape
@@ -520,9 +704,14 @@ def mock(self, n=1, obj_cats=None, **kwargs):
         obj_cats : `list[Catalog]`
             A list of catalogs as long as the number of requested images of
             moving objects that will be inserted into the image.
+
+        Returns
+        -------
+        images : `np.array`
+           A ``(n_images, image_width, image_height)`` shaped array of images.
         """
         shape = (n, *self.config["shape"])
-        images = np.zeros(shape, dtype=np.float32)
+        images = np.zeros(shape, dtype=self.config["dtype"])
 
         if self.config["add_noise"]:
             images = self.add_noise(images=images, config=self.config)
@@ -539,7 +728,7 @@ def mock(self, n=1, obj_cats=None, **kwargs):
         if obj_cats is not None:
             pairs = [(images[0], obj_cats[0])] if n == 1 else zip(images, obj_cats)
             for i, (img, cat) in enumerate(pairs):
-                add_model_objects(img, cat, self.config["model"](x_stddev=1, y_stddev=1))
+                add_model_objects(img, cat, self.config["model"])
 
         return images
 
@@ -547,27 +736,32 @@ def mock(self, n=1, obj_cats=None, **kwargs):
 class SimulatedImageConfig(DataFactoryConfig):
     """Simulated image configuration.
 
-    Simulated image add several sources of noise into the image, bad columns,
-    hot pixels, including the possibility of adding static and moving sources.
-    On a higher level it is possible to modify this behavior by setting:
+    Simulated image attempts to add noise to the image in a statistically
+    meaningful sense, but it does not reproduce the noise qualities in the same
+    way an optical simulation would. Noise sources added are:
+    - bad columns
+    - hot pixels
+    - read noise
+    - dark current
+    - sky level
+
+    The quantities are expressed in physical units and the defaults were
+    selected to sort of make sense.
+
+    Control over which source of noise are included in the image can be done by
+    setting the
     - add_noise
     - add_bad_cols
     - add_hot_pix
-    parameters to `False`. Optionally, for a more fine-grained control set the
-    distribution parameters, f.e. mean and standard deviation, such that they
-    do not produce measurable values in the image.
-
-    The quantities are expressed in physical units and the defaults were
-    selected to sort of make sense. Sky levels are not modeled very
-    realistically, being expressed as an additional level of noise, in counts,
-    additive to the detector noise.
+    flags to `False`. For a more fine-grained control set the distribution
+    parameters, f.e. mean and standard deviation, such that they do not produce
+    measurable values in the image.
 
-    Generally expect the mean value of pixel counts to be:
+    Expect the mean value of pixel counts to be:
 
         bias + mean(dark_current)*exposure + mean(sky_level)
 
-    The deviation of the pixel counts around the expected value can be
-    estimated by:
+    The deviation of the pixel counts should be expected to be:
 
         sqrt( std(read_noise)^2 + sqrt(sky_level)^2 )
     """
@@ -657,19 +851,29 @@ class SimulatedImageConfig(DataFactoryConfig):
     """Sky level, in counts."""
 
     # Object and Source properties
-    model = models.Gaussian2D
+    model = models.Gaussian2D(x_stddev=1, y_stddev=1)
+    """Source and object model used to render them on the image."""
+
+    dtype = np.float32
+    """Numpy data type."""
 
 
 class SimulatedImage(SimpleImage):
-    """Simulated image includes multiple sources of noise, bad columns, hot
-    pixels, static sources and moving objects.
+    """Simulated image attempt to include a more realistic noise profile.
+
+    Noise sources added are:
+    - bad columns
+    - hot pixels
+    - read noise
+    - dark current
+    - sky level
+
+    Static or moving objects may be added to the simulated image.
 
     Parameters
     ----------
     image : `np.array`
-        Science image that will be used as a base onto which to render details.
-    config : `SimpleImageConfig`
-        Configuration.
+        Base template image on which details will be rendered.
     src_cat : `CatalogFactory`
         Static source catalog.
     obj_cat : `CatalogFactory`
@@ -680,6 +884,7 @@ class SimulatedImage(SimpleImage):
     """
 
     default_config = SimulatedImageConfig
+    """Default config."""
 
     @classmethod
     def add_bad_cols(cls, image, config):
@@ -794,7 +999,7 @@ def add_noise(cls, images, config):
         return images
 
     @classmethod
-    def gen_base_image(cls, config=None, src_cat=None, dtype=np.float32):
+    def gen_base_image(cls, config=None, src_cat=None):
         """Generate base image from configuration.
 
         Parameters
@@ -812,15 +1017,20 @@ def gen_base_image(cls, config=None, src_cat=None, dtype=np.float32):
         config = cls.default_config(config)
 
         # empty image
-        base = np.zeros(config["shape"], dtype=dtype)
+        base = np.zeros(config["shape"], dtype=config["dtype"])
         base += config["bias"]
         base = cls.add_hot_pixels(base, config)
         base = cls.add_bad_cols(base, config)
         if src_cat is not None:
-            add_model_objects(base, src_cat.table, config["model"](x_stddev=1, y_stddev=1))
+            add_model_objects(base, src_cat.table, config["model"])
 
         return base
 
-    def __init__(self, image=None, src_cat=None, obj_cat=None, dtype=np.float32,**kwargs):
+    def __init__(self, image=None, src_cat=None, obj_cat=None, **kwargs):
         conf = self.default_config(**kwargs)
-        super().__init__(image=self.gen_base_image(conf, dtype=dtype), src_cat=src_cat, obj_cat=obj_cat, **conf)
+        super().__init__(
+            image=self.gen_base_image(conf),
+            src_cat=src_cat,
+            obj_cat=obj_cat,
+            **conf
+        )
diff --git a/src/kbmod/mocking/dump_headers.py b/src/kbmod/mocking/dump_headers.py
new file mode 100644
index 000000000..f5a26ec07
--- /dev/null
+++ b/src/kbmod/mocking/dump_headers.py
@@ -0,0 +1,309 @@
+# Modified from the original Astropy code to add a card format to the
+# tabular output. All rights belong to the original authors.
+
+# Licensed under a 3-clause BSD style license - see LICENSE.rst
+"""
+Modified Astropy ``archiveheaders`` utility that adds a datatype column for each
+header card to the output. All rights belong to the original authors.
+
+``archiveheaders`` is a command line script based on astropy.io.fits for printing
+the header(s) of one or more FITS file(s) to the standard output in a human-
+readable format.
+
+The modifications to the script include:
+- supporting only tabular output
+- making "ascii.ecsv" the default output format
+- appending a datatype to each serialized header card, describing the type of
+  the cards value.
+
+Example uses of ``archiveheaders``:
+
+1. Print the header of all the HDUs of a .fits file::
+
+    $ archiveheaders filename.fits
+
+2. Dump the header keywords of all the files in the current directory into a
+   machine-readable ecsv file::
+
+    $ archiveheaders *.fits > keywords.csv
+
+3. Sorting the output along a specified keyword::
+
+    $ archiveheaders -f -s DATE-OBS *.fits
+
+4. Sort first by OBJECT, then DATE-OBS::
+
+    $ archiveheaders -f -s OBJECT -s DATE-OBS *.fits
+
+Note that compressed images (HDUs of type
+:class:`~astropy.io.fits.CompImageHDU`) really have two headers: a real
+BINTABLE header to describe the compressed data, and a fake IMAGE header
+representing the image that was compressed. Astropy returns the latter by
+default. You must supply the ``--compressed`` option if you require the real
+header that describes the compression.
+
+With Astropy installed, please run ``archiveheaders --help`` to see the full usage
+documentation.
+"""
+
+import argparse
+import sys
+
+import numpy as np
+
+from astropy import __version__, log
+from astropy.io import fits
+
+DESCRIPTION = """
+Print the header(s) of a FITS file. Optional arguments allow the desired
+extension(s), keyword(s), and output format to be specified.
+Note that in the case of a compressed image, the decompressed header is
+shown by default.
+
+This script is part of the Astropy package. See
+https://docs.astropy.org/en/latest/io/fits/usage/scripts.html#module-astropy.io.fits.scripts.fitsheader
+for further documentation.
+""".strip()
+
+
+class ExtensionNotFoundException(Exception):
+    """Raised if an HDU extension requested by the user does not exist."""
+
+
+class HeaderFormatter:
+    """Class to format the header(s) of a FITS file for display by the
+    `fitsheader` tool; essentially a wrapper around a `HDUList` object.
+
+    Example usage:
+    fmt = HeaderFormatter('/path/to/file.fits')
+    print(fmt.parse())
+
+    Parameters
+    ----------
+    filename : str
+        Path to a single FITS file.
+    verbose : bool
+        Verbose flag, to show more information about missing extensions,
+        keywords, etc.
+
+    Raises
+    ------
+    OSError
+        If `filename` does not exist or cannot be read.
+    """
+
+    def __init__(self, filename, verbose=True):
+        self.filename = filename
+        self.verbose = verbose
+        self._hdulist = fits.open(filename)
+
+    def parse(self, compressed=False):
+        """Returns the FITS file header(s) in a readable format.
+
+        Parameters
+        ----------
+        compressed : bool, optional
+            If True, shows the header describing the compression, rather than
+            the header obtained after decompression. (Affects FITS files
+            containing `CompImageHDU` extensions only.)
+
+        Returns
+        -------
+        formatted_header : str or astropy.table.Table
+            Traditional 80-char wide format in the case of `HeaderFormatter`;
+            an Astropy Table object in the case of `TableHeaderFormatter`.
+        """
+        hdukeys = range(len(self._hdulist))  # Display all by default
+        return self._parse_internal(hdukeys, compressed)
+
+    def _parse_internal(self, hdukeys, compressed):
+        """The meat of the formatting; in a separate method to allow overriding."""
+        result = []
+        for idx, hdu in enumerate(hdukeys):
+            try:
+                cards = self._get_cards(hdu, compressed)
+            except ExtensionNotFoundException:
+                continue
+
+            if idx > 0:  # Separate HDUs by a blank line
+                result.append("\n")
+            result.append(f"# HDU {hdu} in {self.filename}:\n")
+            for c in cards:
+                result.append(f"{c}\n")
+        return "".join(result)
+
+    def _get_cards(self, hdukey, compressed):
+        """Returns a list of `astropy.io.fits.card.Card` objects.
+
+        This function will return the desired header cards, taking into
+        account the user's preference to see the compressed or uncompressed
+        version.
+
+        Parameters
+        ----------
+        hdukey : int or str
+            Key of a single HDU in the HDUList.
+
+        compressed : bool, optional
+            If True, shows the header describing the compression.
+
+        Raises
+        ------
+        ExtensionNotFoundException
+            If the hdukey does not correspond to an extension.
+        """
+        # First we obtain the desired header
+        try:
+            if compressed:
+                # In the case of a compressed image, return the header before
+                # decompression (not the default behavior)
+                header = self._hdulist[hdukey]._header
+            else:
+                header = self._hdulist[hdukey].header
+        except (IndexError, KeyError):
+            message = f"{self.filename}: Extension {hdukey} not found."
+            if self.verbose:
+                log.warning(message)
+            raise ExtensionNotFoundException(message)
+
+        # return all cards
+        cards = header.cards
+        return cards
+
+    def close(self):
+        self._hdulist.close()
+
+
+class TableHeaderFormatter(HeaderFormatter):
+    """Class to convert the header(s) of a FITS file into a Table object.
+    The table returned by the `parse` method will contain four columns:
+    filename, hdu, keyword, and value.
+
+    Subclassed from HeaderFormatter, which contains the meat of the formatting.
+    """
+
+    def _parse_internal(self, hdukeys, compressed):
+        """Method called by the parse method in the parent class."""
+        tablerows = []
+        for hdu in hdukeys:
+            try:
+                for card in self._get_cards(hdu, compressed):
+                    tablerows.append(
+                        {
+                            "filename": self.filename,
+                            "hdu": hdu,
+                            "keyword": card.keyword,
+                            "value": str(card.value),
+                            "format": type(card.value).__name__,
+                        }
+                    )
+            except ExtensionNotFoundException:
+                pass
+
+        if tablerows:
+            from astropy import table
+
+            return table.Table(tablerows)
+        return None
+
+
+def print_headers_as_table(args):
+    """Prints FITS header(s) in a machine-readable table format.
+
+    Parameters
+    ----------
+    args : argparse.Namespace
+        Arguments passed from the command-line as defined below.
+    """
+    tables = []
+    # Create a Table object for each file
+    for filename in args.filename:  # Support wildcards
+        formatter = None
+        try:
+            formatter = TableHeaderFormatter(filename)
+            tbl = formatter.parse(args.compressed)
+            if tbl:
+                tables.append(tbl)
+        except OSError as e:
+            log.error(str(e))  # file not found or unreadable
+        finally:
+            if formatter:
+                formatter.close()
+
+    # Concatenate the tables
+    if len(tables) == 0:
+        return False
+    elif len(tables) == 1:
+        resulting_table = tables[0]
+    else:
+        from astropy import table
+
+        resulting_table = table.vstack(tables)
+    # Print the string representation of the concatenated table
+    resulting_table.write(sys.stdout, format=args.table)
+
+
+def main(args=None):
+    """This is the main function called by the `fitsheader` script."""
+    parser = argparse.ArgumentParser(
+        description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter
+    )
+    mode_group = parser.add_mutually_exclusive_group()
+    mode_group.add_argument(
+        "-t",
+        "--table",
+        nargs="?",
+        default="ascii.ecsv",
+        metavar="FORMAT",
+        help=(
+            '"The default format is "ascii.ecsv" (can be "ascii.csv", "ascii.html", '
+            '"ascii.latex", "fits", etc)'
+        ),
+    )
+    mode_group.add_argument(
+        "-f",
+        "--fitsort",
+        action="store_true",
+        help=("print the headers as a table with each unique keyword in a given column (fitsort format) "),
+    )
+    parser.add_argument(
+        "-s",
+        "--sort",
+        metavar="SORT_KEYWORD",
+        action="append",
+        type=str,
+        help=(
+            "sort output by the specified header keywords, can be repeated to "
+            "sort by multiple keywords; Only supported with -f/--fitsort"
+        ),
+    )
+    parser.add_argument(
+        "-c",
+        "--compressed",
+        action="store_true",
+        help=(
+            "for compressed image data, show the true header which describes "
+            "the compression rather than the data"
+        ),
+    )
+    parser.add_argument(
+        "filename",
+        nargs="+",
+        help="path to one or more files; wildcards are supported",
+    )
+    args = parser.parse_args()
+
+    if args.sort:
+        args.sort = [key.replace(".", " ") for key in args.sort]
+        if not args.fitsort:
+            log.error("Sorting with -s/--sort is only supported in conjunction with" " -f/--fitsort")
+            # 2: Unix error convention for command line syntax
+            sys.exit(2)
+
+    # Now print the desired headers
+    try:
+        print_headers_as_table(args)
+    except OSError:
+        # A 'Broken pipe' OSError may occur when stdout is closed prematurely,
+        # eg. when calling `fitsheader file.fits | head`. We let this pass.
+        pass
diff --git a/src/kbmod/mocking/fits.py b/src/kbmod/mocking/fits.py
index 5db6650ad..35a3c1ee8 100644
--- a/src/kbmod/mocking/fits.py
+++ b/src/kbmod/mocking/fits.py
@@ -3,16 +3,12 @@
 
 from .callbacks import IncrementObstime
 from .headers import HeaderFactory, ArchivedHeader
-from .fits_data import (
+from .data import (
     DataFactoryConfig,
     DataFactory,
-    SimpleImageConfig,
     SimpleImage,
-    SimulatedImageConfig,
     SimulatedImage,
-    SimpleVarianceConfig,
     SimpleVariance,
-    SimpleMaskConfig,
     SimpleMask,
 )
 
@@ -25,7 +21,7 @@
 
 
 class NoneFactory:
-    "Kinda makes some code later prettier. Kinda"
+    """Factory that returns `None`."""
     def mock(self, n):
         return [
             None,
@@ -33,6 +29,54 @@ def mock(self, n):
 
 
 class EmptyFits:
+    """Mock FITS files containing zeroed arrays.
+
+    Mocks a FITS file containing 4 extensions:
+    - primary
+    - image
+    - variance
+    - mask
+    that contain no data. By default the created data is immutable. Useful when
+    data is added after mocking a collection of HDULists.
+
+    The primary header contains an incrementing timestamp.
+
+    Attributes
+    ----------
+    prim_hdr : `HeaderFactory`
+        Primary header factory
+    img_hdr : `HeaderFactory`
+        Image header factory, contains WCS.
+    var_hdr : `HeaderFactory`
+        Variance header factory, contains WCS.
+    mask_hdr: `HeaderFactory`
+        Mask header factory, contains WCS.
+    img_data : `DataFactory`
+        Image data factory, used to also create variances.
+    mask_data : `DataFactory`
+        Mask data factory.
+    current : `int`
+        Counter of current mocking iteration.
+
+    Parameters
+    ----------
+    header : `dict-like` or `None`, optional
+        Keyword-value pairs that will use to create and update the primary header
+        metadata.
+    shape : `tuple`, optional
+        Size of the image arrays, 100x100 pixels by default.
+    start_t : `str` or `astropy.time.Time`, optional
+        A timestamp string interpretable by Astropy, or a time object.
+    step_t : `float` or `astropy.time.TimeDelta`, optional
+        Timestep between each mocked instance. In units of days by default.
+    editable_images : `bool`, optional
+        Make mocked images writable and independent. `False` by default.
+        Makes the variance images editable too.
+    editable_masks : `bool`, optional
+        Make masks writable and independent. `False` by default. Masks can
+        usually be shared, so leaving them immutable reduces the memory footprint
+        and time it takes to mock large images.
+    """
     def __init__(
         self,
         header=None,
@@ -53,15 +97,16 @@ def __init__(
         self.mask_hdr = HeaderFactory.from_ext_template({"EXTNAME": "MASK"}, shape=shape)
 
         self.img_data = DataFactory.from_header(
-            kind="image", header=self.img_hdr.header, writeable=editable_images, return_copy=editable_images
+            header=self.img_hdr.header, kind="image", writeable=editable_images, return_copy=editable_images
         )
         self.mask_data = DataFactory.from_header(
-            kind="image", header=self.mask_hdr.header, return_copy=editable_masks, writeable=editable_masks
+            header=self.mask_hdr.header, kind="image", return_copy=editable_masks, writeable=editable_masks
         )
 
         self.current = 0
 
     def mock(self, n=1):
+        """Mock n empty fits files."""
         prim_hdrs = self.prim_hdr.mock(n=n)
         img_hdrs = self.img_hdr.mock(n=n)
         var_hdrs = self.var_hdr.mock(n=n)
@@ -89,6 +134,58 @@ def mock(self, n=1):
 
 
 class SimpleFits:
+    """Mock FITS files containing data.
+
+    Mocks a FITS file containing 4 extensions:
+    - primary
+    - image
+    - variance
+    - mask
+    that contain no data. By default the created data is mutable.
+    The primary header contains an incrementing timestamp.
+
+    Attributes
+    ----------
+    prim_hdr : `HeaderFactory`
+        Primary header factory
+    img_hdr : `HeaderFactory`
+        Image header factory, contains WCS.
+    var_hdr : `HeaderFactory`
+        Variance header factory, contains WCS.
+    mask_hdr: `HeaderFactory`
+        Mask header factory, contains WCS.
+    img_data : `SimpleImage` or `SimulatedImage`
+        Image data factory.
+    var_data : `SimpleVariance``
+        Variance data factory.
+    mask_data : `SimpleMask`
+        Mask data factory.
+    current : `int`
+        Counter of current mocking iteration.
+
+    Parameters
+    ----------
+    shared_header_metadata : `dict-like` or `None`, optional
+        Keyword-value pairs that will use to create and update all of the headers.
+    shape : `tuple`, optional
+        Size of the image arrays, 100x100 pixels by default.
+    start_t : `str` or `astropy.time.Time`, optional
+        A timestamp string interpretable by Astropy, or a time object.
+    step_t : `float` or `astropy.time.TimeDelta`, optional
+        Timestep between each mocked instance. In units of days by default.
+    with_noise : `bool`
+        Add noise into the images.
+    noise : `str`
+        Noise profile to use, "simplistic" is simple Gaussian noise and
+        "realistic" simulates several noise sources and adds them together.
+    src_cat : `SourceCatalog`
+        Source catalog of static objects to add into the images.
+    obj_cat : `ObjectCatalog`
+        Object catalog of moving objects to add into the images.
+    wcs_factory : `WCSFactory`
+        Factory used to create and update WCS data in headers of mocked FITS
+        files.
+    """
     def __init__(
             self,
             shared_header_metadata=None,
@@ -147,6 +244,7 @@ def __init__(
         self.current = 0
 
     def mock(self, n=1):
+        """Mock n simple FITS files."""
         prim_hdrs = self.prim_hdr.mock(n=n)
         img_hdrs = self.img_hdr.mock(n=n)
         var_hdrs = self.var_hdr.mock(n=n)
@@ -183,6 +281,62 @@ def mock(self, n=1):
 
 
 class DECamImdiff:
+    """Mock FITS files from archived headers of Rubin Science Pipelines
+    "differenceExp" dataset type headers (arXiv:2109.03296).
+
+    Each FITS file contains 16 HDUs, one PRIMARY, 3 images (image, mask and
+    variance) and supporting data such as PSF, ArchiveId etc. stored in
+    `BinTableHDU`s.
+
+    The exported data contains approximately 60 real header data of a Rubin
+    Science Pipelines ``differenceExp`` dataset type. The data was created from
+    DEEP B1a field, as described in arXiv:2310.03678.
+
+    By default only headers are mocked.
+
+    Attributes
+    ----------
+    hdr_factory : `ArchivedHeader`
+        Header factory for all 16 HDUs.
+    data_factories : `list[DataFactory]`
+        Data factories, one for each HDU being mocked.
+    hdu_layout : `list[HDU]`
+        List of HDU types (PrimaryHDU, CompImageHDU...) used to create an HDU
+        from a header and a data factory.
+    img_data : `SimpleImage` or `SimulatedImage`
+        Reference to second element in `data_factories`, an image data factory.
+    var_data : `SimpleVariance``
+        Reference to third element in `data_factories`, an variance data factory.
+        Variance uses read_noise of 7.0 e- and gain of 5.0 e-/count as described
+        by the Table 2.2 in DECam Data Handbook Version 2.05 March 2014.
+    mask_data : `SimpleMask`
+        Reference to fourth element in `data_factories`, an mask data factory.
+    current : `int`
+        Counter of current mocking iteration.
+
+    Parameters
+    ----------
+    shared_header_metadata : `dict-like` or `None`, optional
+        Keyword-value pairs that will use to create and update all of the headers.
+    shape : `tuple`, optional
+        Size of the image arrays, 100x100 pixels by default.
+    start_t : `str` or `astropy.time.Time`, optional
+        A timestamp string interpretable by Astropy, or a time object.
+    step_t : `float` or `astropy.time.TimeDelta`, optional
+        Timestep between each mocked instance. In units of days by default.
+    with_noise : `bool`
+        Add noise into the images.
+    noise : `str`
+        Noise profile to use, "simplistic" is simple Gaussian noise and
+        "realistic" simulates several noise sources and adds them together.
+    src_cat : `SourceCatalog`
+        Source catalog of static objects to add into the images.
+    obj_cat : `ObjectCatalog`
+        Object catalog of moving objects to add into the images.
+    wcs_factory : `WCSFactory`
+        Factory used to create and update WCS data in headers of mocked FITS
+        files.
+    """
     def __init__(self, with_data=False, with_noise=False, noise="simplistic",
                  src_cat=None, obj_cat=None):
         if obj_cat is not None and obj_cat.mode == "progressive":
@@ -218,7 +372,7 @@ def __init__(self, with_data=False, with_noise=False, noise="simplistic",
                 self.data_factories[1] = self.img_data
                 self.data_factories[2] = self.mask_data
                 self.data_factories[3] = self.mask_data
-                self.data_factories[4:] = [DataFactory.from_header("table", h) for h in headers[4:]]
+                self.data_factories[4:] = [DataFactory.from_header(h, kind="table") for h in headers[4:]]
 
         self.with_data = with_data
         self.src_cat = src_cat
@@ -228,6 +382,7 @@ def __init__(self, with_data=False, with_noise=False, noise="simplistic",
         self.current = 0
 
     def mock(self, n=1):
+        """Mock n differenceExp dataset type-like FITS files."""
         headers = self.hdr_factory.mock(n=n)
 
         obj_cats = None
diff --git a/src/kbmod/mocking/headers.py b/src/kbmod/mocking/headers.py
index dd7977cb9..2a7f01f92 100644
--- a/src/kbmod/mocking/headers.py
+++ b/src/kbmod/mocking/headers.py
@@ -13,13 +13,72 @@
 
 
 __all__ = [
-#    "make_wcs",
     "WCSFactory",
     "HeaderFactory",
     "ArchivedHeader",
 ]
 
 class WCSFactory:
+    """WCS Factory.
+
+    Used to generate collections of ICRS TAN WCS from parameters, or as a way to
+    update the given header with a new WCS.
+
+    The new WCS is generated by inheriting the header and updating its properties,
+    or by completely overwriting the Header WCS and replacing it with a new WCS.
+
+    Attribute
+    ---------
+    current : `int`
+        Counter of the number of mocked WCSs.
+    template : `WCS`
+        Template WCS.
+
+    Parameters
+    ----------
+    pointing : `tuple`, optional
+        Ra and Dec w.r.t ICRS coordinate system, in decimal degrees.
+    rotation : `float`, optional
+        Rotation, in degrees, from ICRS equator (ecliptic).
+    pixscale : `float`, optional
+        Pixel scale, in arcseconds per pixel.
+    dither_pos : `bool`, optional
+        Dither positions of mocked WCSs.
+    dither_rot : `bool`, optional
+        Dither rotations of mocked WCSs.
+    dither_amplitudes : `tuple`, optional
+        A set of 3 values, the amplitude of dither in ra direction, the
+        amplitude of dither in dec direction and the amplitude of dither in
+        rotations. In decimal degrees.
+    cycle : `list[WCS]`, optional
+        A list of pre-created WCS objects through which to iterate.
+
+    Examples
+    --------
+    >>> from astropy.io.fits import Header
+    >>> import kbmod.mocking as kbmock
+    >>> wcsf = kbmock.WCSFactory(pointing=(10, 10), rotation=45)
+    >>> wcsf.mock(Header())
+    WCSAXES =                    2 / Number of coordinate axes
+    CRPIX1  =                  0.0 / Pixel coordinate of reference point
+    CRPIX2  =                  0.0 / Pixel coordinate of reference point
+    PC1_1   =  -3.928369684292E-05 / Coordinate transformation matrix element
+    PC1_2   =  3.9283723288914E-05 / Coordinate transformation matrix element
+    PC2_1   =  3.9283723288914E-05 / Coordinate transformation matrix element
+    PC2_2   =   3.928369684292E-05 / Coordinate transformation matrix element
+    CDELT1  =                  1.0 / [deg] Coordinate increment at reference point
+    CDELT2  =                  1.0 / [deg] Coordinate increment at reference point
+    CUNIT1  = 'deg'                / Units of coordinate increment and value
+    CUNIT2  = 'deg'                / Units of coordinate increment and value
+    CTYPE1  = 'RA---TAN'           / Right ascension, gnomonic projection
+    CTYPE2  = 'DEC--TAN'           / Declination, gnomonic projection
+    CRVAL1  =                 10.0 / [deg] Coordinate value at reference point
+    CRVAL2  =                 10.0 / [deg] Coordinate value at reference point
+    LONPOLE =                180.0 / [deg] Native longitude of celestial pole
+    LATPOLE =                 10.0 / [deg] Native latitude of celestial pole
+    MJDREF  =                  0.0 / [d] MJD of fiducial time
+    RADESYS = 'ICRS'               / Equatorial coordinate system,
+    """
     def __init__(self, mode="static",
                  pointing=(351., -5), rotation=0, pixscale=0.2,
                  dither_pos=False, dither_rot=False, dither_amplitudes=(0.01, 0.01, 0.0),
@@ -41,7 +100,7 @@ def __init__(self, mode="static",
         self.current = 0
 
     @classmethod
-    def gen_wcs(cls, center_coords, rotation, pixscale, shape=None):
+    def gen_wcs(cls, pointing, rotation=0, pixscale=1, shape=None):
         """
         Create a simple celestial `~astropy.wcs.WCS` object in ICRS
         coordinate system.
@@ -61,14 +120,31 @@ def gen_wcs(cls, center_coords, rotation, pixscale, shape=None):
         Returns
         -------
         wcs : `astropy.wcs.WCS`
-        The world coordinate system.
+           The world coordinate system.
 
         Examples
         --------
-        >>> from kbmod.mocking import make_wcs
-        >>> shape = (100, 100)
-        >>> wcs = make_wcs(shape)
-        >>> wcs = make_wcs(shape, (115, 5), 45, 0.1)
+        >>> import kbmod.mocking as kbmock
+        >>> [kbmock.WCSFactory.gen_wcs((10, 10), rot, 0.2) for rot in (0, 90)]
+        [WCS Keywords
+
+        Number of WCS axes: 2
+        CTYPE : 'RA---TAN' 'DEC--TAN'
+        CRVAL : 10.0 10.0
+        CRPIX : 0.0 0.0
+        PC1_1 PC1_2  : -5.555555555555556e-05 0.0
+        PC2_1 PC2_2  : 0.0 5.555555555555556e-05
+        CDELT : 1.0 1.0
+        NAXIS : 0  0, WCS Keywords
+
+        Number of WCS axes: 2
+        CTYPE : 'RA---TAN' 'DEC--TAN'
+        CRVAL : 10.0 10.0
+        CRPIX : 0.0 0.0
+        PC1_1 PC1_2  : 3.7400283533421276e-11 5.555555555554297e-05
+        PC2_1 PC2_2  : 5.555555555554297e-05 -3.7400283533421276e-11
+        CDELT : 1.0 1.0
+        NAXIS : 0  0]
         """
         wcs = WCS(naxis=2)
         rho = rotation*0.0174533 # deg to rad
@@ -79,7 +155,7 @@ def gen_wcs(cls, center_coords, rotation, pixscale, shape=None):
             wcs.wcs.crpix = [shape[1] // 2, shape[0] // 2]
         else:
             wcs.wcs.crpix = [0, 0]
-        wcs.wcs.crval = center_coords
+        wcs.wcs.crval = pointing
         wcs.wcs.cunit = ['deg', 'deg']
         wcs.wcs.pc = np.array([
             [-scale * np.cos(rho), scale * np.sin(rho)],
@@ -90,37 +166,135 @@ def gen_wcs(cls, center_coords, rotation, pixscale, shape=None):
 
         return wcs
 
+    def next(self):
+        """Iteratively return WCS from the cycle."""
+        for wcs in self.cycle:
+            yield wcs
+
     def update_from_header(self, header):
+        """Update WCS template using a header, only updates the cards template
+        shares with the given header.
+
+        Updates the template WCS in-place.
+        """
         t = self.template.to_header()
         t.update(header)
         self.template = WCS(t)
 
-    def mock(self, header):
+    def update_headers(self, headers):
+        """Update the headers, in-place, with a new mocked WCS.
+
+        If the header contains a WCS, it is updated to match the template.
+
+        Parameters
+        ----------
+        header : `astropy.io.fits.Header`
+             Header
+
+        Returns
+        -------
+        header : `astropy.io.fits.Header`
+            Update header.
+        """
         wcs = self.template
 
-        if self.cycle is not None:
-            wcs = self.cycle[self.current % len(self.cycle)]
-
-        if self.dither_pos:
-            dra = random.uniform(-self.dither_amplitudes[0], self.dither_amplitudes[0])
-            ddec = random.uniform(-self.dither_amplitudes[1], self.dither_amplitudes[1])
-            wcs.wcs.crval += [dra, ddec]
-        if self.dither_rot:
-            ddec = random.uniform(-self.dither_amplitudes[2], self.dither_amplitudes[2])
-            rho = self.dither_amplitudes[2]*0.0174533 # deg to rad
-            rot_matrix =  np.array([
-                [-np.cos(rho), np.sin(rho)],
-                [np.sin(rho), np.cos(rho)]
-            ])
-            new_pc = wcs.wcs.pc @ rot_matrix
-            wcs.wcs.pc = new_pc
-
-        self.current += 1
-        header.update(wcs.to_header())
-        return header
+        for header in headers:
+            if self.cycle is not None:
+                wcs = self.next()
+
+            if self.dither_pos:
+                dra = random.uniform(-self.dither_amplitudes[0], self.dither_amplitudes[0])
+                ddec = random.uniform(-self.dither_amplitudes[1], self.dither_amplitudes[1])
+                wcs.wcs.crval += [dra, ddec]
+
+            if self.dither_rot:
+                ddec = random.uniform(-self.dither_amplitudes[2], self.dither_amplitudes[2])
+                rho = self.dither_amplitudes[2]*0.0174533 # deg to rad
+                rot_matrix =  np.array([
+                    [-np.cos(rho), np.sin(rho)],
+                    [np.sin(rho), np.cos(rho)]
+                ])
+                new_pc = wcs.wcs.pc @ rot_matrix
+                wcs.wcs.pc = new_pc
+
+            self.current += 1
+            header.update(wcs.to_header())
 
 
 class HeaderFactory:
+    """Mocks a header from a given template.
+
+    Callback functions can be defined for individual header cards which are
+    executed and their respective values are updated for each new mocked header.
+
+    A WCS factory can be attached to this factory to update the related WCS
+    header keywords.
+
+    Provides two base templates from which to create Headers from, based on
+    CTIO observatory location and DECam instrument.
+
+    Can generate headers from dict-like template and card overrides.
+
+    Attributes
+    ----------
+    is_dynamic : `bool`
+        True when factory has a mutable Header cards or a WCS factory.
+
+    Parameters
+    ---------
+    metadata : `dict-like`
+        Header, dict, or a list of cards from which a header is created.
+    mutables : `list[str]` or `None`, optional
+        A list of strings, matching header card keys, designating them as a
+        card that has an associated callback.
+    callbacks : `list[func]`, `list[class]` or `None`
+        List of callbacks, functions or classes with a ``__call__`` method, that
+        will be executed in order to update the respective key in ``mutables``
+        See already provided callbacks in `kbmod.mocking.callbacks` module.
+    has_wcs : `bool`
+        Attach a WCS to each produced header.
+    wcs_factory : `WCSFactory` or `None`
+        A WCS factory to use, if `None` and `has_wcs` is `True`, uses the default
+        WCS Factory. See `WCSFactory` for details.
+
+    Examples
+    --------
+    >>> import kbmod.mocking as kbmock
+    >>> hf = kbmock.HeaderFactory.from_primary_template()
+    >>> hf.mock()
+    [EXTNAME = 'PRIMARY '
+    NAXIS   =                    0
+    BITPIX  =                    8
+    DATE-OBS= '2021-03-19T00:27:21.140552'
+    NEXTEND =                    3
+    OBS-LAT =              -30.166
+    OBS-LONG=              -70.814
+    OBS-ELEV=                 2200
+    OBSERVAT= 'CTIO    '                                                            ]
+    >>> hf = kbmock.HeaderFactory.from_ext_template()
+    >>> hf.mock()
+    [NAXIS   =                    2
+    NAXIS1  =                 2048
+    NAXIS2  =                 4096
+    CRPIX1  =               1024.0 / Pixel coordinate of reference point
+    CRPIX2  =               2048.0 / Pixel coordinate of reference point
+    BITPIX  =                   32
+    WCSAXES =                    2 / Number of coordinate axes
+    PC1_1   = -5.5555555555556E-05 / Coordinate transformation matrix element
+    PC2_2   =  5.5555555555556E-05 / Coordinate transformation matrix element
+    CDELT1  =                  1.0 / [deg] Coordinate increment at reference point
+    CDELT2  =                  1.0 / [deg] Coordinate increment at reference point
+    CUNIT1  = 'deg'                / Units of coordinate increment and value
+    CUNIT2  = 'deg'                / Units of coordinate increment and value
+    CTYPE1  = 'RA---TAN'           / Right ascension, gnomonic projection
+    CTYPE2  = 'DEC--TAN'           / Declination, gnomonic projection
+    CRVAL1  =                351.0 / [deg] Coordinate value at reference point
+    CRVAL2  =                 -5.0 / [deg] Coordinate value at reference point
+    LONPOLE =                180.0 / [deg] Native longitude of celestial pole
+    LATPOLE =                 -5.0 / [deg] Native latitude of celestial pole
+    MJDREF  =                  0.0 / [d] MJD of fiducial time
+    RADESYS = 'ICRS'               / Equatorial coordinate system                   ]
+    """
     primary_template = {
         "EXTNAME": "PRIMARY",
         "NAXIS": 0,
@@ -132,10 +306,15 @@ class HeaderFactory:
         "OBS-ELEV": 2200,
         "OBSERVAT": "CTIO",
     }
+    """Template for the Primary header content."""
 
     ext_template = {"NAXIS": 2, "NAXIS1": 2048, "NAXIS2": 4096, "CRPIX1": 1024, "CRPIX2": 2048, "BITPIX": 32}
+    """Template of an image-like extension header."""
 
     def __validate_mutables(self):
+        """Validate number of mutables is number of callbacks, and that designated
+        mutable cards exist in the given header template.
+        """
         # !xor
         if bool(self.mutables) != bool(self.callbacks):
             raise ValueError(
@@ -176,6 +355,18 @@ def __init__(self, metadata, mutables=None, callbacks=None, has_wcs=False, wcs_f
         self.counter = 0
 
     def mock(self, n=1):
+        """Mocks headers, executing callbacks and creating WCS as necessary.
+
+        Parameters
+        ----------
+        n : `int`
+            Number of headers to mock.
+
+        Returns
+        -------
+        headers : `list[Header]`
+            Mocked headers.
+        """
         headers = []
         # This can't be vectorized because callbacks may share global state
         for i in range(n):
@@ -187,15 +378,36 @@ def mock(self, n=1):
                     for i, mutable in enumerate(self.mutables):
                         header[mutable] = self.callbacks[i](header[mutable])
             if self.has_wcs:
-                header = self.wcs_factory.mock(header)
+                self.wcs_factory.update_headers([header])
             headers.append(header)
             self.counter += 1
         return headers
 
     @classmethod
-    def gen_header(cls, base, overrides, wcs=None):
+    def gen_header(cls, base, overrides=None, wcs=None):
+        """Generate a header from a base template and overrides.
+
+        If a WCS is given, and the header contains only a partially defined
+        WCS, updates only the missing WCS cards and values.
+
+        Parameters
+        ----------
+        base : `Header` or `dict-like`
+            Header or a dict-like base template for the header.
+        overrides : `Header`, `dict-like` or `None`, optional
+            Keys and values that will either be updated or extended to the base
+            template.
+        wcs : `astropy.wcs.WCS` or `None`, optional
+            WCS template to use to update the header values.
+
+        Returns
+        -------
+        header : `astropy.io.fits.Header`
+            A header.
+        """
         header = Header(base)
-        header.update(overrides)
+        if overrides is not None:
+            header.update(overrides)
 
         if wcs is not None:
             # Sync WCS with header + overwrites
@@ -208,12 +420,58 @@ def gen_header(cls, base, overrides, wcs=None):
 
     @classmethod
     def from_primary_template(cls, overrides=None, mutables=None, callbacks=None):
+        """Create a header assuming the default template of a PRIMARY header.
+
+        Override, or extend the default template with keys and values in override.
+        Attach callbacks to mutable cards.
+
+        Parameters
+        ----------
+        overrides : `dict-like` or `None`, optional
+            A header, or different dict-like, object used to override the base
+            template keys and values.
+        mutables : `list[str]` or `None`, optional
+            List of card keys designated as mutable.
+        callbacks : `list[callable]` or `None`, optional
+            List of callable functions or classes that match the mutables.
+
+        Returns
+        -------
+        factory : `HeaderFactory`
+            Header factory.
+        """
         hdr = cls.gen_header(base=cls.primary_template, overrides=overrides)
         return cls(hdr, mutables, callbacks)
 
     @classmethod
     def from_ext_template(cls, overrides=None, mutables=None, callbacks=None, shape=None,
                           wcs=None):
+        """Create an extension header assuming the default template of an image
+        like header.
+
+        Override, or extend the default template with keys and values in override.
+        Attach callbacks to mutable cards.
+
+        Parameters
+        ----------
+        overrides : `dict-like` or `None`, optional
+            A header, or different dict-like, object used to override the base
+            template keys and values.
+        mutables : `list[str]` or `None`, optional
+            List of card keys designated as mutable.
+        callbacks : `list[callable]` or `None`, optional
+            List of callable functions or classes that match the mutables.
+        shape : `tuple` or `None`, optional
+            Update the template description of data dimensions and the reference
+            pixel.
+        wcs : `astropy.wcs.WCS` or `None`, optional
+            WCS Factory to use.
+
+        Returns
+        -------
+        factory : `HeaderFactory`
+            Header factory.
+        """
         ext_template = cls.ext_template.copy()
 
         if shape is not None:
@@ -227,6 +485,41 @@ def from_ext_template(cls, overrides=None, mutables=None, callbacks=None, shape=
 
 
 class ArchivedHeader(HeaderFactory):
+    """Archived header factory.
+
+    Archived headers are those that were produced with the modified version of
+    AstroPy's ``fitsheader`` utility available in this module. See
+
+        archiveheaders -h
+
+    for more details. To produce an archive, with KBMOD installed, execute the
+    following:
+
+        archiveheaders *fits > archive.ecsv | tar -cf headers.tar.bz2 archive.ecsv
+
+    Attributes
+    ----------
+    lexical_type_map : `dict`
+        A map between the serialized names of built-in types and the built-in
+        types. Used to cast the serialized card values before creating a Header.
+    compression : `str`
+        By default it's assumed the TAR archive was compressed with the ``bz2``
+        compression algorithm.
+    format : `str`
+        The format in which the file of serialized header cards was written in.
+        An AstroPy By default, this is assumed to be ``ascii.ecsv``
+
+    Parameters
+    ----------
+    archive_name : `str`
+        Name of the TAR archive containing serialized headers.
+    fname : `str`
+        Name of the file, within the archive, containing the headers.
+    external : `bool`
+        When `True`, file will be searched for relative to the current working
+        directory. Otherwise, the file is searched for within the header archive
+        provided with this module.
+    """
     # will almost never be anything else. Rather, it would be a miracle if it
     # were something else, since FITS standard shouldn't allow it. Further
     # casting by some packages will always be casting implemented in terms of
@@ -240,26 +533,43 @@ class ArchivedHeader(HeaderFactory):
     """Map between type names and types themselves."""
 
     compression = "bz2"
+    """Compression used to compress the archived headers."""
 
     format = "ascii.ecsv"
+    """Format of the archive, and AstroPy'S ASCII module valid identifier of a format."""
 
-    def __init__(self, archive_name, fname):
+    def __init__(self, archive_name, fname, external=False):
         super().__init__({})
-        self.table = header_archive_to_table(archive_name, fname, self.compression, self.format)
+        self.table = header_archive_to_table(archive_name, fname, self.compression, self.format, external=external)
 
         # Create HDU groups for easier iteration
         self.table = self.table.group_by("filename")
         self.n_hdus = len(self.table)
 
-    def lexical_cast(self, value, format):
+    def lexical_cast(self, value, vtype):
         """Cast str literal of a type to the type itself. Supports just
         the builtin Python types.
         """
-        if format in self.lexical_type_map:
-            return self.lexical_type_map[format](value)
+        if vtype in self.lexical_type_map:
+            return self.lexical_type_map[vtype](value)
         return value
 
     def get_item(self, group_idx, hdr_idx):
+        """Get an extension of an HDUList within the archive without
+        incrementing the mocking counter.
+
+        Parameters
+        ----------
+        group_idx : `int`
+            Index of the HDUList to fetch from the archive.
+        hdr_idx : `int`
+            Index of the extension within the HDUList.
+
+        Returns
+        -------
+        header : `Header`
+            Header of the given extension of the targeted HDUList.
+        """
         header = Header()
         # this is equivalent to one hdulist worth of headers
         group = self.table.groups[group_idx]
@@ -271,6 +581,18 @@ def get_item(self, group_idx, hdr_idx):
         return header
 
     def get(self, group_idx):
+        """Get an HDUList within the archive without incrementing the mocking counter.
+
+        Parameters
+        ----------
+        group_idx : `int`
+            Index of the HDUList to fetch from the archive.
+
+        Returns
+        -------
+        headers : `list[Header]`
+            All headers of the targeted HDUList.
+        """
         headers = []
         # this is a bit repetitive but it saves recreating
         # groups for one HDUL-equivalent many times
@@ -288,6 +610,18 @@ def get(self, group_idx):
         return headers
 
     def mock(self, n=1):
+        """Mock all headers within n HDULists.
+
+        Parameters
+        ----------
+        n : `int`
+            Number of HDUList units which headers we want mocked.
+
+        Returns
+        -------
+        headers : `list[list[Header]]`
+            A list of containing headers of each extension of an HDUList.
+        """
         res = []
         for _ in range(n):
             res.append(self.get(self.counter))
diff --git a/src/kbmod/mocking/data/headers_archive.tar.bz2 b/src/kbmod/mocking/headers_archive.tar.bz2
similarity index 100%
rename from src/kbmod/mocking/data/headers_archive.tar.bz2
rename to src/kbmod/mocking/headers_archive.tar.bz2
diff --git a/src/kbmod/mocking/utils.py b/src/kbmod/mocking/utils.py
index d79a48bcb..40f0af93e 100644
--- a/src/kbmod/mocking/utils.py
+++ b/src/kbmod/mocking/utils.py
@@ -4,20 +4,22 @@
 from astropy.table import Table, MaskedColumn
 
 
-def get_absolute_data_path(file_or_directory):
-    test_dir = path.abspath(path.dirname(__file__))
-    data_dir = path.join(test_dir, "data")
-    return path.join(data_dir, file_or_directory)
-
-
-def get_absolute_demo_data_path(file_or_directory):
-    project_root_dir = path.abspath(path.dirname(path.dirname(__file__)))
-    data_dir = path.join(project_root_dir, "data")
-    return path.join(data_dir, file_or_directory)
-
-
-def header_archive_to_table(archive_path, fname, compression, format):
-    archive_path = get_absolute_data_path(archive_path)
+#def get_absolute_data_path(file_or_directory):
+#    test_dir = path.abspath(path.dirname(__file__))
+#    data_dir = path.join(test_dir, "archived_data")
+#    return path.join(data_dir, file_or_directory)
+#
+#
+#def get_absolute_demo_data_path(file_or_directory):
+#    project_root_dir = path.abspath(path.dirname(path.dirname(__file__)))
+#    data_dir = path.join(project_root_dir, "archived_data")
+#    return path.join(data_dir, file_or_directory)
+
+
+def header_archive_to_table(archive_path, fname, compression, format, external=True):
+    if not external:
+        mocking_dir = path.abspath(path.dirname(__file__))
+        archive_path = path.join(mocking_dir, archive_path)
     with tarfile.open(archive_path, f"r:{compression}") as archive:
         tblfile = archive.extractfile(fname)
         table = Table.read(tblfile.read().decode(), format=format)
diff --git a/tests/dump_headers.py b/tests/dump_headers.py
deleted file mode 100644
index 74fbc60f4..000000000
--- a/tests/dump_headers.py
+++ /dev/null
@@ -1,500 +0,0 @@
-# Modified from the original Astropy code to add a card format to the
-# tabular output. All rights belong to the original authors.
-
-# Licensed under a 3-clause BSD style license - see LICENSE.rst
-"""
-``fitsheader`` is a command line script based on astropy.io.fits for printing
-the header(s) of one or more FITS file(s) to the standard output in a human-
-readable format.
-
-Example uses of fitsheader:
-
-1. Print the header of all the HDUs of a .fits file::
-
-    $ fitsheader filename.fits
-
-2. Print the header of the third and fifth HDU extension::
-
-    $ fitsheader --extension 3 --extension 5 filename.fits
-
-3. Print the header of a named extension, e.g. select the HDU containing
-   keywords EXTNAME='SCI' and EXTVER='2'::
-
-    $ fitsheader --extension "SCI,2" filename.fits
-
-4. Print only specific keywords::
-
-    $ fitsheader --keyword BITPIX --keyword NAXIS filename.fits
-
-5. Print keywords NAXIS, NAXIS1, NAXIS2, etc using a wildcard::
-
-    $ fitsheader --keyword NAXIS* filename.fits
-
-6. Dump the header keywords of all the files in the current directory into a
-   machine-readable csv file::
-
-    $ fitsheader --table ascii.csv *.fits > keywords.csv
-
-7. Specify hierarchical keywords with the dotted or spaced notation::
-
-    $ fitsheader --keyword ESO.INS.ID filename.fits
-    $ fitsheader --keyword "ESO INS ID" filename.fits
-
-8. Compare the headers of different fits files, following ESO's ``fitsort``
-   format::
-
-    $ fitsheader --fitsort --extension 0 --keyword ESO.INS.ID *.fits
-
-9. Same as above, sorting the output along a specified keyword::
-
-    $ fitsheader -f -s DATE-OBS -e 0 -k DATE-OBS -k ESO.INS.ID *.fits
-
-10. Sort first by OBJECT, then DATE-OBS::
-
-    $ fitsheader -f -s OBJECT -s DATE-OBS *.fits
-
-Note that compressed images (HDUs of type
-:class:`~astropy.io.fits.CompImageHDU`) really have two headers: a real
-BINTABLE header to describe the compressed data, and a fake IMAGE header
-representing the image that was compressed. Astropy returns the latter by
-default. You must supply the ``--compressed`` option if you require the real
-header that describes the compression.
-
-With Astropy installed, please run ``fitsheader --help`` to see the full usage
-documentation.
-"""
-
-import argparse
-import sys
-
-import numpy as np
-
-from astropy import __version__, log
-from astropy.io import fits
-
-DESCRIPTION = """
-Print the header(s) of a FITS file. Optional arguments allow the desired
-extension(s), keyword(s), and output format to be specified.
-Note that in the case of a compressed image, the decompressed header is
-shown by default.
-
-This script is part of the Astropy package. See
-https://docs.astropy.org/en/latest/io/fits/usage/scripts.html#module-astropy.io.fits.scripts.fitsheader
-for further documentation.
-""".strip()
-
-
-class ExtensionNotFoundException(Exception):
-    """Raised if an HDU extension requested by the user does not exist."""
-
-
-class HeaderFormatter:
-    """Class to format the header(s) of a FITS file for display by the
-    `fitsheader` tool; essentially a wrapper around a `HDUList` object.
-
-    Example usage:
-    fmt = HeaderFormatter('/path/to/file.fits')
-    print(fmt.parse(extensions=[0, 3], keywords=['NAXIS', 'BITPIX']))
-
-    Parameters
-    ----------
-    filename : str
-        Path to a single FITS file.
-    verbose : bool
-        Verbose flag, to show more information about missing extensions,
-        keywords, etc.
-
-    Raises
-    ------
-    OSError
-        If `filename` does not exist or cannot be read.
-    """
-
-    def __init__(self, filename, verbose=True):
-        self.filename = filename
-        self.verbose = verbose
-        self._hdulist = fits.open(filename)
-
-    def parse(self, extensions=None, keywords=None, compressed=False):
-        """Returns the FITS file header(s) in a readable format.
-
-        Parameters
-        ----------
-        extensions : list of int or str, optional
-            Format only specific HDU(s), identified by number or name.
-            The name can be composed of the "EXTNAME" or "EXTNAME,EXTVER"
-            keywords.
-
-        keywords : list of str, optional
-            Keywords for which the value(s) should be returned.
-            If not specified, then the entire header is returned.
-
-        compressed : bool, optional
-            If True, shows the header describing the compression, rather than
-            the header obtained after decompression. (Affects FITS files
-            containing `CompImageHDU` extensions only.)
-
-        Returns
-        -------
-        formatted_header : str or astropy.table.Table
-            Traditional 80-char wide format in the case of `HeaderFormatter`;
-            an Astropy Table object in the case of `TableHeaderFormatter`.
-        """
-        # `hdukeys` will hold the keys of the HDUList items to display
-        if extensions is None:
-            hdukeys = range(len(self._hdulist))  # Display all by default
-        else:
-            hdukeys = []
-            for ext in extensions:
-                try:
-                    # HDU may be specified by number
-                    hdukeys.append(int(ext))
-                except ValueError:
-                    # The user can specify "EXTNAME" or "EXTNAME,EXTVER"
-                    parts = ext.split(",")
-                    if len(parts) > 1:
-                        extname = ",".join(parts[0:-1])
-                        extver = int(parts[-1])
-                        hdukeys.append((extname, extver))
-                    else:
-                        hdukeys.append(ext)
-
-        # Having established which HDUs the user wants, we now format these:
-        return self._parse_internal(hdukeys, keywords, compressed)
-
-    def _parse_internal(self, hdukeys, keywords, compressed):
-        """The meat of the formatting; in a separate method to allow overriding."""
-        result = []
-        for idx, hdu in enumerate(hdukeys):
-            try:
-                cards = self._get_cards(hdu, keywords, compressed)
-            except ExtensionNotFoundException:
-                continue
-
-            if idx > 0:  # Separate HDUs by a blank line
-                result.append("\n")
-            result.append(f"# HDU {hdu} in {self.filename}:\n")
-            for c in cards:
-                result.append(f"{c}\n")
-        return "".join(result)
-
-    def _get_cards(self, hdukey, keywords, compressed):
-        """Returns a list of `astropy.io.fits.card.Card` objects.
-
-        This function will return the desired header cards, taking into
-        account the user's preference to see the compressed or uncompressed
-        version.
-
-        Parameters
-        ----------
-        hdukey : int or str
-            Key of a single HDU in the HDUList.
-
-        keywords : list of str, optional
-            Keywords for which the cards should be returned.
-
-        compressed : bool, optional
-            If True, shows the header describing the compression.
-
-        Raises
-        ------
-        ExtensionNotFoundException
-            If the hdukey does not correspond to an extension.
-        """
-        # First we obtain the desired header
-        try:
-            if compressed:
-                # In the case of a compressed image, return the header before
-                # decompression (not the default behavior)
-                header = self._hdulist[hdukey]._header
-            else:
-                header = self._hdulist[hdukey].header
-        except (IndexError, KeyError):
-            message = f"{self.filename}: Extension {hdukey} not found."
-            if self.verbose:
-                log.warning(message)
-            raise ExtensionNotFoundException(message)
-
-        if not keywords:  # return all cards
-            cards = header.cards
-        else:  # specific keywords are requested
-            cards = []
-            for kw in keywords:
-                try:
-                    crd = header.cards[kw]
-                    if isinstance(crd, fits.card.Card):  # Single card
-                        cards.append(crd)
-                    else:  # Allow for wildcard access
-                        cards.extend(crd)
-                except KeyError:  # Keyword does not exist
-                    if self.verbose:
-                        log.warning(f"{self.filename} (HDU {hdukey}): Keyword {kw} not found.")
-        return cards
-
-    def close(self):
-        self._hdulist.close()
-
-
-class TableHeaderFormatter(HeaderFormatter):
-    """Class to convert the header(s) of a FITS file into a Table object.
-    The table returned by the `parse` method will contain four columns:
-    filename, hdu, keyword, and value.
-
-    Subclassed from HeaderFormatter, which contains the meat of the formatting.
-    """
-
-    def _parse_internal(self, hdukeys, keywords, compressed):
-        """Method called by the parse method in the parent class."""
-        tablerows = []
-        for hdu in hdukeys:
-            try:
-                for card in self._get_cards(hdu, keywords, compressed):
-                    tablerows.append(
-                        {
-                            "filename": self.filename,
-                            "hdu": hdu,
-                            "keyword": card.keyword,
-                            "value": str(card.value),
-                            "format": type(card.value).__name__,
-                        }
-                    )
-            except ExtensionNotFoundException:
-                pass
-
-        if tablerows:
-            from astropy import table
-
-            return table.Table(tablerows)
-        return None
-
-
-def print_headers_traditional(args):
-    """Prints FITS header(s) using the traditional 80-char format.
-
-    Parameters
-    ----------
-    args : argparse.Namespace
-        Arguments passed from the command-line as defined below.
-    """
-    for idx, filename in enumerate(args.filename):  # support wildcards
-        if idx > 0 and not args.keyword:
-            print()  # print a newline between different files
-
-        formatter = None
-        try:
-            formatter = HeaderFormatter(filename)
-            print(formatter.parse(args.extensions, args.keyword, args.compressed), end="")
-        except OSError as e:
-            log.error(str(e))
-        finally:
-            if formatter:
-                formatter.close()
-
-
-def print_headers_as_table(args):
-    """Prints FITS header(s) in a machine-readable table format.
-
-    Parameters
-    ----------
-    args : argparse.Namespace
-        Arguments passed from the command-line as defined below.
-    """
-    tables = []
-    # Create a Table object for each file
-    for filename in args.filename:  # Support wildcards
-        formatter = None
-        try:
-            formatter = TableHeaderFormatter(filename)
-            tbl = formatter.parse(args.extensions, args.keyword, args.compressed)
-            if tbl:
-                tables.append(tbl)
-        except OSError as e:
-            log.error(str(e))  # file not found or unreadable
-        finally:
-            if formatter:
-                formatter.close()
-
-    # Concatenate the tables
-    if len(tables) == 0:
-        return False
-    elif len(tables) == 1:
-        resulting_table = tables[0]
-    else:
-        from astropy import table
-
-        resulting_table = table.vstack(tables)
-    # Print the string representation of the concatenated table
-    resulting_table.write(sys.stdout, format=args.table)
-
-
-def print_headers_as_comparison(args):
-    """Prints FITS header(s) with keywords as columns.
-
-    This follows the dfits+fitsort format.
-
-    Parameters
-    ----------
-    args : argparse.Namespace
-        Arguments passed from the command-line as defined below.
-    """
-    from astropy import table
-
-    tables = []
-    # Create a Table object for each file
-    for filename in args.filename:  # Support wildcards
-        formatter = None
-        try:
-            formatter = TableHeaderFormatter(filename, verbose=False)
-            tbl = formatter.parse(args.extensions, args.keyword, args.compressed)
-            if tbl:
-                # Remove empty keywords
-                tbl = tbl[np.where(tbl["keyword"] != "")]
-            else:
-                tbl = table.Table([[filename]], names=("filename",))
-            tables.append(tbl)
-        except OSError as e:
-            log.error(str(e))  # file not found or unreadable
-        finally:
-            if formatter:
-                formatter.close()
-
-    # Concatenate the tables
-    if len(tables) == 0:
-        return False
-    elif len(tables) == 1:
-        resulting_table = tables[0]
-    else:
-        resulting_table = table.vstack(tables)
-
-    # If we obtained more than one hdu, merge hdu and keywords columns
-    hdus = resulting_table["hdu"]
-    if np.ma.isMaskedArray(hdus):
-        hdus = hdus.compressed()
-    if len(np.unique(hdus)) > 1:
-        for tab in tables:
-            new_column = table.Column([f"{row['hdu']}:{row['keyword']}" for row in tab])
-            tab.add_column(new_column, name="hdu+keyword")
-        keyword_column_name = "hdu+keyword"
-    else:
-        keyword_column_name = "keyword"
-
-    # Check how many hdus we are processing
-    final_tables = []
-    for tab in tables:
-        final_table = [table.Column([tab["filename"][0]], name="filename")]
-        if "value" in tab.colnames:
-            for row in tab:
-                if row["keyword"] in ("COMMENT", "HISTORY"):
-                    continue
-                final_table.append(table.Column([row["value"]], name=row[keyword_column_name]))
-        final_tables.append(table.Table(final_table))
-    final_table = table.vstack(final_tables)
-    # Sort if requested
-    if args.sort:
-        final_table.sort(args.sort)
-    # Reorganise to keyword by columns
-    final_table.pprint(max_lines=-1, max_width=-1)
-
-
-if __name__ == "__main__":
-    """This is the main function called by the `fitsheader` script."""
-    parser = argparse.ArgumentParser(
-        description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter
-    )
-
-    parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
-
-    parser.add_argument(
-        "-e",
-        "--extension",
-        metavar="HDU",
-        action="append",
-        dest="extensions",
-        help=(
-            "specify the extension by name or number; this argument can "
-            "be repeated to select multiple extensions"
-        ),
-    )
-    parser.add_argument(
-        "-k",
-        "--keyword",
-        metavar="KEYWORD",
-        action="append",
-        type=str,
-        help=(
-            "specify a keyword; this argument can be repeated to select "
-            "multiple keywords; also supports wildcards"
-        ),
-    )
-    mode_group = parser.add_mutually_exclusive_group()
-    mode_group.add_argument(
-        "-t",
-        "--table",
-        nargs="?",
-        default=False,
-        metavar="FORMAT",
-        help=(
-            "print the header(s) in machine-readable table format; the "
-            'default format is "ascii.fixed_width" (can be "ascii.csv", '
-            '"ascii.html", "ascii.latex", "fits", etc)'
-        ),
-    )
-    mode_group.add_argument(
-        "-f",
-        "--fitsort",
-        action="store_true",
-        help=("print the headers as a table with each unique " "keyword in a given column (fitsort format) "),
-    )
-    parser.add_argument(
-        "-s",
-        "--sort",
-        metavar="SORT_KEYWORD",
-        action="append",
-        type=str,
-        help=(
-            "sort output by the specified header keywords, can be repeated to "
-            "sort by multiple keywords; Only supported with -f/--fitsort"
-        ),
-    )
-    parser.add_argument(
-        "-c",
-        "--compressed",
-        action="store_true",
-        help=(
-            "for compressed image data, show the true header which describes "
-            "the compression rather than the data"
-        ),
-    )
-    parser.add_argument(
-        "filename",
-        nargs="+",
-        help="path to one or more files; wildcards are supported",
-    )
-    args = parser.parse_args()
-    # If `--table` was used but no format specified,
-    # then use ascii.fixed_width by default
-    if args.table is None:
-        args.table = "ascii.fixed_width"
-
-    if args.sort:
-        args.sort = [key.replace(".", " ") for key in args.sort]
-        if not args.fitsort:
-            log.error("Sorting with -s/--sort is only supported in conjunction with" " -f/--fitsort")
-            # 2: Unix error convention for command line syntax
-            sys.exit(2)
-
-    if args.keyword:
-        args.keyword = [key.replace(".", " ") for key in args.keyword]
-
-    # Now print the desired headers
-    try:
-        if args.table:
-            print_headers_as_table(args)
-        elif args.fitsort:
-            print_headers_as_comparison(args)
-        else:
-            print_headers_traditional(args)
-    except OSError:
-        # A 'Broken pipe' OSError may occur when stdout is closed prematurely,
-        # eg. when calling `fitsheader file.fits | head`. We let this pass.
-        pass
diff --git a/tests/test_end_to_end.py b/tests/test_end_to_end.py
index 9f5ea7358..f6f645c5b 100644
--- a/tests/test_end_to_end.py
+++ b/tests/test_end_to_end.py
@@ -38,6 +38,7 @@ def setUp(self):
         self.factory = kbmock.EmptyFits()
 
     def test_empty(self):
+        """Test no detections are found on empty images."""
         hduls = self.factory.mock(n=10)
 
         # create the most permissive search configs you can come up with
@@ -61,6 +62,7 @@ def test_empty(self):
         self.assertTrue(len(results) == 0)
 
     def test_static_objects(self):
+        """Test no detections are found on images containing static objects."""
         src_cat = kbmock.SourceCatalog.from_defaults(seed=100)
         factory = kbmock.SimpleFits(src_cat=src_cat)
         hduls = factory.mock(10)
@@ -111,7 +113,28 @@ def setUp(self):
             }
         )
 
-    def xmatch_best(self, obj, results, match_cols={"x_mean": "x", "y_mean": "y", "vx": "vx", "vy": "vy"}):
+    def xmatch_best(self, obj, results,
+                    match_cols={"x_mean": "x", "y_mean": "y", "vx": "vx", "vy": "vy"}):
+        """Finds the result that minimizes the L2 distance to the target object.
+
+        Parameters
+        ----------
+        obj : `astropy.table.Row`
+            Row, or a table with single entry, containing the target object.
+        results : `astropy.table.Table`
+            Table of objects from which the closest matching one will be returned.
+        match_cols : `dict`, optional
+            Dictionary of column names on which to perform the matching. Keys
+            of the dictionary are columns from ``obj`` and values of the dict
+            are columns from ``results``.
+
+        Returns
+        -------
+        result : `astropy.table.Row`
+            Best matching result
+        distances: `np.array`
+            Array of calculated L2 distances of ``obj`` to all given results.
+        """
         objk, resk = [], []
         for k, v in match_cols.items():
             if k in obj.columns and v in results.table.columns:
@@ -126,6 +149,28 @@ def xmatch_best(self, obj, results, match_cols={"x_mean": "x", "y_mean": "y", "v
 
     def assertResultValuesWithinSpec(self, expected, result, spec,
                                      match_cols={"x_mean": "x", "y_mean": "y", "vx": "vx", "vy": "vy"}):
+        """Asserts expected object matches the given result object within
+        specification.
+
+        Parameters
+        ----------
+        expected : `astropy.table.Row`
+            Row, or table with single entry, containing the target object.
+        result : `astropy.table.Row`
+            Row, or table with single entry, containing the found object.
+        spec : `float`
+            Specification of maximum deviation of the expected values from the
+            found resulting values. For example, a spec of 3 means results can
+            be 3 or less pixels away from the expected position.
+        match_cols : `dict`, optional
+            Dictionary of column names on which to perform the matching. Keys
+            of the dictionary are columns from ``obj`` and values of the dict
+            are columns from ``results``.
+
+        Raises
+        -------
+        AssertionError - if comparison fails.
+        """
         for ekey, rkey in match_cols.items():
             info = (
                 f"\n Expected: \n {expected[tuple(match_cols.keys())]} \n"
@@ -134,6 +179,20 @@ def assertResultValuesWithinSpec(self, expected, result, spec,
             self.assertLessEqual(abs(expected[ekey] - result[rkey]), spec, info)
 
     def run_single_search(self, data, expected, spec=5):
+        """Runs a KBMOD search on given data and tests the results lie within
+        specification from the expected.
+
+        Parameters
+        ----------
+        data : `list[str]` or `list[astropy.io.fits.HDUList]`
+            List of targets processable by the TestDataStandardizer.
+        expected : `kbmod.mocking.ObjectCatalog`
+            Object catalog expected to be retrieved from the run.
+        spec : `float`
+            Specification of maximum deviation of the expected values from the
+            found resulting values. For example, a spec of 3 means results can
+            be 3 or less pixels away from the expected position.
+        """
         ic = ImageCollection.fromTargets(data, force="TestDataStd")
         wu = ic.toWorkUnit(search_config=self.config)
         results = SearchRunner().run_search_from_work_unit(wu)
@@ -145,6 +204,7 @@ def run_single_search(self, data, expected, spec=5):
             self.assertResultValuesWithinSpec(obj, res, spec)
 
     def test_exact_motion(self):
+        """Test exact searches are recovered in all 8 cardinal directions."""
         search_vs = list(itertools.product([-20, 0, 20], repeat=2))
         search_vs.remove((0, 0))
         for (vx, vy) in search_vs:
@@ -162,6 +222,7 @@ def test_exact_motion(self):
                 self.run_single_search(hduls, obj_cat, 1)
 
     def test_random_motion(self):
+        """Repeat searches for randomly inserted objects."""
         # Mock the data and repeat tests. The random catalog
         # creation guarantees a diverse set of changing test values
         for i in range(self.repeat_n_times):
@@ -171,7 +232,8 @@ def test_random_motion(self):
                 hduls = factory.mock(n=self.n_imgs)
                 self.run_single_search(hduls, obj_cat)
 
-    def test_reprojected_search(self):
+    def test_resampled_search(self):
+        """Search for objects in a set of resampled images; randomly dithered pointings and orientations."""
         # 0. Setup
         self.shape = (500, 500)
         self.start_pos = (10, 10)  # (ra, dec) in deg
@@ -204,7 +266,7 @@ def test_reprojected_search(self):
             pixscale=pixscale,
             dither_pos=True,
             dither_rot=True,
-            dither_amplitudes=(0.001, 0.001, 0.01)
+            dither_amplitudes=(0.001, 0.001, 10)
         )
 
         prim_hdr_factory = kbmock.HeaderFactory.from_primary_template(
@@ -283,27 +345,6 @@ def test_reprojected_search(self):
         self.assertGreaterEqual(len(results), 1)
         self.assertResultValuesWithinSpec(best_cat, best_res, 10)
 
-#    def test_diffim_mocks(self):
-#        src_cat = kbmock.SourceCatalog.from_defaults()
-#        obj_cat = kbmock.ObjectCatalog.from_defaults(self.param_ranges, n=1)
-#        factory = kbmock.DECamImdiff.from_defaults(with_data=True, src_cat=src_cat, obj_cat=obj_cat)
-#        hduls = factory.mock(n=self.n_imgs)
-#
-#
-#        ic = ImageCollection.fromTargets(hduls, force="TestDataStd")
-#        wu = ic.toWorkUnit(search_config=self.config)
-#        results = SearchRunner().run_search_from_work_unit(wu)
-#
-#        # Run tests
-#        self.assertGreaterEqual(len(results), 1)
-#        for res in results:
-#            diff = abs(obj_cat.table["y_mean"] - res["y"])
-#            obj = obj_cat.table[diff == diff.min()]
-#            self.assertLessEqual(abs(obj["x_mean"] - res["x"]), 5)
-#            self.assertLessEqual(abs(obj["y_mean"] - res["y"]), 5)
-#            self.assertLessEqual(abs(obj["vx"] - res["vx"]), 5)
-#            self.assertLessEqual(abs(obj["vy"] - res["vy"]), 5)
-
 
 ####
 
diff --git a/tests/test_mocking.py b/tests/test_mocking.py
index b03ce580e..b999c6fb3 100644
--- a/tests/test_mocking.py
+++ b/tests/test_mocking.py
@@ -92,6 +92,7 @@ def test(self):
         self.assertEqual(step_t.to("day").value, 1.0)
 
     def test_static_src_cat(self):
+        """Test that static source catalog works and is correctly rendered."""
         src_cat = kbmock.SourceCatalog.from_defaults()
         src_cat2 = kbmock.SourceCatalog.from_defaults()
         self.assertEqual(src_cat.config["mode"], "static")
@@ -121,6 +122,18 @@ def test_static_src_cat(self):
         self.assertGreaterEqual(hdul["IMAGE"].data[y, x].min(), 90)
 
     def validate_cat_render(self, hduls, cats, expected_gte=90):
+        """Validate that catalog objects appear in the given images.
+
+        Parameters
+        ----------
+        hduls : `list[astropy.io.fits.HDUList]`
+            List of FITS files to check.
+        cats : `list[astropy.table.Table]`
+            List of catalog realizations containing the coordinate of objects
+            to check for.
+        expected_gte : `float`
+            Expected minimal value of the pixel at the object's location.
+        """
         for hdul, cat in zip(hduls, cats):
             x = np.round(cat["x_mean"].data).astype(int)
             y = np.round(cat["y_mean"].data).astype(int)
@@ -128,6 +141,7 @@ def validate_cat_render(self, hduls, cats, expected_gte=90):
             self.assertGreaterEqual(hdul["VARIANCE"].data[y, x].min(), expected_gte)
 
     def test_progressive_obj_cat(self):
+        """Test progressive catalog renders properly."""
         obj_cat = kbmock.ObjectCatalog.from_defaults()
         obj_cat2 = kbmock.ObjectCatalog.from_defaults()
         self.assertEqual(obj_cat.config["mode"], "progressive")
@@ -159,6 +173,7 @@ def test_progressive_obj_cat(self):
         self.validate_cat_render(hduls, cats)
 
     def test_folding_obj_cat(self):
+        """Test folding catalog renders properly."""
         # Set up shared values for the whole setup
         # like starting positions of object and timestamps
         start_x = np.ones((self.n_obj,)) * 10
@@ -189,13 +204,12 @@ def test_folding_obj_cat(self):
         factory.prim_hdr = prim_hdr_factory
         hduls = factory.mock(n=self.n_imgs)
 
-        # Run tests and ensure we have rendered the object in correct
-        # positions
         obj_cat.reset()
         cats = obj_cat.mock(n=self.n_imgs, t=self.timestamps)
         self.validate_cat_render(hduls, cats)
 
     def test_progressive_sky_cat(self):
+        """Test progressive catalog based on on-sky coordinates."""
         # a 10-50 in x by a 10-90 in y box using default WCS
         #self.shape = (500, 500)
         param_ranges = {
@@ -221,6 +235,7 @@ def test_progressive_sky_cat(self):
         self.validate_cat_render(hduls, cats)
 
     def test_folding_sky_cat(self):
+        """Test folding catalog based on on-sky coordinates."""
         # a 20x20 box in pixels using a default WCS
         start_ra = np.linspace(350.998, 351.002, self.n_obj)
         start_dec = np.linspace(-5.0077, -5.0039, self.n_obj)
@@ -254,6 +269,7 @@ def test_folding_sky_cat(self):
 
     # TODO: move to pytest and mark as xfail
     def test_noise_gen(self):
+        """Test noise renders with expected statistical properties."""
         factory = kbmock.SimpleFits(shape=(1000, 1000), with_noise=True)
         hdul = factory.mock()[0]
         self.assertAlmostEqual(hdul["IMAGE"].data.mean(), 10, 1)