Skip to content

Commit

Permalink
Merge pull request #1471 from ncoop57/aliases
Browse files Browse the repository at this point in the history
Update NbdevLookup to support import aliases and improve docstrings
  • Loading branch information
jph00 authored Nov 21, 2024
2 parents 016a027 + e3e19e2 commit 05deba5
Show file tree
Hide file tree
Showing 3 changed files with 163 additions and 52 deletions.
1 change: 1 addition & 0 deletions nbdev/_modidx.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
'nbdev.doclinks.NbdevLookup.link_line': ('api/doclinks.html#nbdevlookup.link_line', 'nbdev/doclinks.py'),
'nbdev.doclinks.NbdevLookup.linkify': ('api/doclinks.html#nbdevlookup.linkify', 'nbdev/doclinks.py'),
'nbdev.doclinks._binop_leafs': ('api/doclinks.html#_binop_leafs', 'nbdev/doclinks.py'),
'nbdev.doclinks._build_lookup_table': ('api/doclinks.html#_build_lookup_table', 'nbdev/doclinks.py'),
'nbdev.doclinks._build_modidx': ('api/doclinks.html#_build_modidx', 'nbdev/doclinks.py'),
'nbdev.doclinks._find_mod': ('api/doclinks.html#_find_mod', 'nbdev/doclinks.py'),
'nbdev.doclinks._get_exps': ('api/doclinks.html#_get_exps', 'nbdev/doclinks.py'),
Expand Down
55 changes: 33 additions & 22 deletions nbdev/doclinks.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from pprint import pformat
from urllib.parse import urljoin
from functools import lru_cache
from types import ModuleType

# %% ../nbs/api/05_doclinks.ipynb
def _sym_nm(klas, sym): return f'{unparse(klas).strip()}.{sym.name}'
Expand Down Expand Up @@ -166,6 +167,7 @@ def _find_mod(mod):

@lru_cache(None)
def _get_exps(mod):
"Get the line numbers for function and class definitions in module"
mf = _find_mod(mod)
if not mf: return {}
txt = mf.read_text(encoding='utf-8')
Expand All @@ -181,6 +183,7 @@ def _lineno(sym, fname): return _get_exps(fname).get(sym, None) if fname else No

# %% ../nbs/api/05_doclinks.ipynb
def _qual_sym(s, settings):
"Get qualified nb, py, and github paths for a symbol s"
if not isinstance(s,tuple): return s
nb,py = s
nbbase = urljoin(settings["doc_host"]+'/',settings["doc_baseurl"])
Expand All @@ -199,30 +202,38 @@ def _qual_syms(entries):

# %% ../nbs/api/05_doclinks.ipynb
@lru_cache(None)
def _build_lookup_table(strip_libs=None, incl_libs=None, skip_mods=None):
cfg = get_config()
if strip_libs is None:
try: strip_libs = cfg.get('strip_libs', cfg.get('lib_path', 'nbdev').name).split()
except FileNotFoundError: strip_libs = 'nbdev'
skip_mods = setify(skip_mods)
strip_libs = L(strip_libs)
if incl_libs is not None: incl_libs = (L(incl_libs)+strip_libs).unique()
entries = {o.name: _qual_syms(o.resolve()) for o in list(pkg_resources.iter_entry_points(group='nbdev'))
if incl_libs is None or o.dist.key in incl_libs}
py_syms = merge(*L(o['syms'].values() for o in entries.values()).concat())
for m in strip_libs:
if m in entries:
_d = entries[m]
stripped = {remove_prefix(k,f"{mod}."):v
for mod,dets in _d['syms'].items() if mod not in skip_mods
for k,v in dets.items()}
py_syms = merge(stripped, py_syms)
return entries,py_syms

# %% ../nbs/api/05_doclinks.ipynb
class NbdevLookup:
"Mapping from symbol names to docs and source URLs"
def __init__(self, strip_libs=None, incl_libs=None, skip_mods=None):
cfg = get_config()
if strip_libs is None:
try: strip_libs = cfg.get('strip_libs', cfg.get('lib_path', 'nbdev').name).split()
except FileNotFoundError: strip_libs = 'nbdev'
skip_mods = setify(skip_mods)
strip_libs = L(strip_libs)
if incl_libs is not None: incl_libs = (L(incl_libs)+strip_libs).unique()
# Dict from lib name to _nbdev module for incl_libs (defaults to all)
self.entries = {o.name: _qual_syms(o.resolve()) for o in list(pkg_resources.iter_entry_points(group='nbdev'))
if incl_libs is None or o.dist.key in incl_libs}
py_syms = merge(*L(o['syms'].values() for o in self.entries.values()).concat())
for m in strip_libs:
if m in self.entries:
_d = self.entries[m]
stripped = {remove_prefix(k,f"{mod}."):v
for mod,dets in _d['syms'].items() if mod not in skip_mods
for k,v in dets.items()}
py_syms = merge(stripped, py_syms)
self.syms = py_syms

def __getitem__(self, s): return self.syms.get(s, None)
def __init__(self, strip_libs=None, incl_libs=None, skip_mods=None, ns=None):
self.entries,self.syms = _build_lookup_table(strip_libs, incl_libs, skip_mods)
self.aliases = {n:o.__name__ for n,o in (ns or {}).items() if isinstance(o, ModuleType)}

def __getitem__(self, s):
if '.' in s:
pre,post = s.split('.', 1)
if pre in self.aliases: s = f"{self.aliases[pre]}.{post}"
return self.syms.get(s, None)

def doc(self, sym):
"Link to docs for `sym`"
Expand Down
159 changes: 129 additions & 30 deletions nbs/api/05_doclinks.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@
"\n",
"from pprint import pformat\n",
"from urllib.parse import urljoin\n",
"from functools import lru_cache"
"from functools import lru_cache\n",
"from types import ModuleType"
]
},
{
Expand Down Expand Up @@ -415,6 +416,7 @@
"\n",
"@lru_cache(None)\n",
"def _get_exps(mod):\n",
" \"Get the line numbers for function and class definitions in module\"\n",
" mf = _find_mod(mod)\n",
" if not mf: return {}\n",
" txt = mf.read_text(encoding='utf-8')\n",
Expand All @@ -429,6 +431,28 @@
"def _lineno(sym, fname): return _get_exps(fname).get(sym, None) if fname else None"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#|hide\n",
"_get_exps.cache_clear()\n",
"\n",
"# Test _get_exps caching\n",
"initial = _get_exps.cache_info()\n",
"_ = _get_exps('nbdev/maker.py') # First call should miss\n",
"after_first = _get_exps.cache_info()\n",
"_ = _get_exps('nbdev/maker.py') # Second call should hit\n",
"after_second = _get_exps.cache_info()\n",
"\n",
"test_eq(after_first.misses, initial.misses + 1)\n",
"test_eq(after_first.hits, initial.hits)\n",
"test_eq(after_second.hits, after_first.hits + 1)\n",
"test_eq(after_second.misses, after_first.misses)"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -448,6 +472,7 @@
"source": [
"#|export\n",
"def _qual_sym(s, settings):\n",
" \"Get qualified nb, py, and github paths for a symbol s\"\n",
" if not isinstance(s,tuple): return s\n",
" nb,py = s\n",
" nbbase = urljoin(settings[\"doc_host\"]+'/',settings[\"doc_baseurl\"])\n",
Expand Down Expand Up @@ -499,32 +524,47 @@
"metadata": {},
"outputs": [],
"source": [
"#|export\n",
"#| export\n",
"@lru_cache(None)\n",
"def _build_lookup_table(strip_libs=None, incl_libs=None, skip_mods=None):\n",
" cfg = get_config()\n",
" if strip_libs is None:\n",
" try: strip_libs = cfg.get('strip_libs', cfg.get('lib_path', 'nbdev').name).split()\n",
" except FileNotFoundError: strip_libs = 'nbdev'\n",
" skip_mods = setify(skip_mods)\n",
" strip_libs = L(strip_libs)\n",
" if incl_libs is not None: incl_libs = (L(incl_libs)+strip_libs).unique()\n",
" entries = {o.name: _qual_syms(o.resolve()) for o in list(pkg_resources.iter_entry_points(group='nbdev'))\n",
" if incl_libs is None or o.dist.key in incl_libs}\n",
" py_syms = merge(*L(o['syms'].values() for o in entries.values()).concat())\n",
" for m in strip_libs:\n",
" if m in entries:\n",
" _d = entries[m]\n",
" stripped = {remove_prefix(k,f\"{mod}.\"):v\n",
" for mod,dets in _d['syms'].items() if mod not in skip_mods\n",
" for k,v in dets.items()}\n",
" py_syms = merge(stripped, py_syms)\n",
" return entries,py_syms"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#|export\n",
"class NbdevLookup:\n",
" \"Mapping from symbol names to docs and source URLs\"\n",
" def __init__(self, strip_libs=None, incl_libs=None, skip_mods=None):\n",
" cfg = get_config()\n",
" if strip_libs is None:\n",
" try: strip_libs = cfg.get('strip_libs', cfg.get('lib_path', 'nbdev').name).split()\n",
" except FileNotFoundError: strip_libs = 'nbdev'\n",
" skip_mods = setify(skip_mods)\n",
" strip_libs = L(strip_libs)\n",
" if incl_libs is not None: incl_libs = (L(incl_libs)+strip_libs).unique()\n",
" # Dict from lib name to _nbdev module for incl_libs (defaults to all)\n",
" self.entries = {o.name: _qual_syms(o.resolve()) for o in list(pkg_resources.iter_entry_points(group='nbdev'))\n",
" if incl_libs is None or o.dist.key in incl_libs}\n",
" py_syms = merge(*L(o['syms'].values() for o in self.entries.values()).concat())\n",
" for m in strip_libs:\n",
" if m in self.entries:\n",
" _d = self.entries[m]\n",
" stripped = {remove_prefix(k,f\"{mod}.\"):v\n",
" for mod,dets in _d['syms'].items() if mod not in skip_mods\n",
" for k,v in dets.items()}\n",
" py_syms = merge(stripped, py_syms)\n",
" self.syms = py_syms\n",
"\n",
" def __getitem__(self, s): return self.syms.get(s, None)\n",
" def __init__(self, strip_libs=None, incl_libs=None, skip_mods=None, ns=None):\n",
" self.entries,self.syms = _build_lookup_table(strip_libs, incl_libs, skip_mods)\n",
" self.aliases = {n:o.__name__ for n,o in (ns or {}).items() if isinstance(o, ModuleType)}\n",
" \n",
" def __getitem__(self, s): \n",
" if '.' in s:\n",
" pre,post = s.split('.', 1)\n",
" if pre in self.aliases: s = f\"{self.aliases[pre]}.{post}\"\n",
" return self.syms.get(s, None)\n",
"\n",
" def doc(self, sym):\n",
" \"Link to docs for `sym`\"\n",
Expand Down Expand Up @@ -559,6 +599,37 @@
" return '\\n'.join(lines)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/nathan/miniconda3/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
" from .autonotebook import tqdm as notebook_tqdm\n"
]
}
],
"source": [
"#|hide\n",
"_build_lookup_table.cache_clear()\n",
"\n",
"# Test _build_lookup_table caching\n",
"initial = _build_lookup_table.cache_info()\n",
"_ = _build_lookup_table() # First call should miss\n",
"after_first = _build_lookup_table.cache_info()\n",
"_ = _build_lookup_table() # Second call should hit\n",
"after_second = _build_lookup_table.cache_info()\n",
"\n",
"test_eq(after_first.misses, initial.misses + 1)\n",
"test_eq(after_first.hits, initial.hits)\n",
"test_eq(after_second.hits, after_first.hits + 1)\n",
"test_eq(after_second.misses, after_first.misses)"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down Expand Up @@ -599,7 +670,7 @@
"text/markdown": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L227){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L234){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.doc\n",
"\n",
Expand All @@ -610,7 +681,7 @@
"text/plain": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L227){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L234){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.doc\n",
"\n",
Expand Down Expand Up @@ -694,7 +765,7 @@
"text/markdown": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L232){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L239){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.code\n",
"\n",
Expand All @@ -705,7 +776,7 @@
"text/plain": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L232){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L239){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.code\n",
"\n",
Expand Down Expand Up @@ -753,7 +824,7 @@
"text/markdown": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L249){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L257){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.linkify\n",
"\n",
Expand All @@ -762,7 +833,7 @@
"text/plain": [
"---\n",
"\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L249){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"[source](https://github.com/fastai/nbdev/blob/master/nbdev/doclinks.py#L257){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n",
"\n",
"### NbdevLookup.linkify\n",
"\n",
Expand Down Expand Up @@ -833,6 +904,34 @@
"assert NbdevLookup().linkify(md) == md"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"You can also use NbdevLookup with import aliases like the following:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'this is an aliased import link [`np.array`](https://numpy.org/doc/stable/reference/generated/numpy.array.html#numpy.array)'"
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import numpy as np\n",
"NbdevLookup(ns=globals()).linkify('this is an aliased import link `np.array`')"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down

0 comments on commit 05deba5

Please sign in to comment.