#hide
#default_exp cli
from nbdev.showdoc import show_doc
Console commands added by the nbdev library
#export
from nbdev.imports import *
from nbdev.export import *
from nbdev.sync import *
from nbdev.merge import *
from nbdev.export2html import *
from nbdev.test import *
from fastcore.script import call_parse,Param,bool_arg
from subprocess import check_output,STDOUT
nbdev
comes with the following commands. To use any of them, you must be in one of the subfolders of your project: they will search for the settings.ini
recursively in the parent directory but need to access it to be able to work. Their names all begin with nbdev so you can easily get a list with tab completion.
nbdev_build_docs
builds the documentation from the notebooksnbdev_build_lib
builds the library from the notebooksnbdev_bump_version
increments version in settings.py
by onenbdev_clean_nbs
removes all superfluous metadata form the notebooks, to avoid merge conflictsnbdev_detach
exports cell attachments to dest
and updates referencesnbdev_diff_nbs
gives you the diff between the notebooks and the exported librarynbdev_fix_merge
will fix merge conflicts in a notebook filenbdev_install_git_hooks
installs the git hooks that use the last two command automatically on each commit/mergenbdev_nb2md
converts a notebook to a markdown filenbdev_new
creates a new nbdev projectnbdev_read_nbs
reads all notebooks to make sure none are brokennbdev_test_nbs
runs tests in notebooksnbdev_trust_nbs
trusts all notebooks (so that the HTML content is shown)nbdev_update_lib
propagates any change in the library back to the notebooks#export
@call_parse
def nbdev_build_lib(fname:Param("A notebook name or glob to convert", str)=None):
"Export notebooks matching `fname` to python modules"
write_tmpls()
notebook2script(fname=fname)
By default (fname
left to None
), the whole library is built from the notebooks in the lib_folder
set in your settings.ini
.
#export
@call_parse
def nbdev_update_lib(fname:Param("A notebook name or glob to convert", str)=None):
"Propagates any change in the modules matching `fname` to the notebooks that created them"
script2notebook(fname=fname)
By default (fname
left to None
), the whole library is treated. Note that this tool is only designed for small changes such as typo or small bug fixes. You can't add new cells in notebook from the library.
#export
@call_parse
def nbdev_diff_nbs():
"Prints the diff between an export of the library in notebooks and the actual modules"
diff_nb_script()
#export
def _test_one(fname, flags=None, verbose=True):
print(f"testing {fname}")
start = time.time()
try:
test_nb(fname, flags=flags)
return True,time.time()-start
except Exception as e:
if "ZMQError" in str(e): _test_one(item, flags=flags, verbose=verbose)
if verbose: print(f'Error in {fname}:\n{e}')
return False,time.time()-start
#export
@call_parse
def nbdev_test_nbs(fname:Param("A notebook name or glob to convert", str)=None,
flags:Param("Space separated list of flags", str)=None,
n_workers:Param("Number of workers to use", int)=None,
verbose:Param("Print errors along the way", bool)=True,
timing:Param("Timing each notebook to see the ones are slow", bool)=False,
pause:Param("Pause time (in secs) between notebooks to avoid race conditions", float)=0.5):
"Test in parallel the notebooks matching `fname`, passing along `flags`"
if flags is not None: flags = flags.split(' ')
if fname is None:
files = [f for f in Config().nbs_path.glob('*.ipynb') if not f.name.startswith('_')]
else: files = glob.glob(fname)
files = [Path(f).absolute() for f in sorted(files)]
if n_workers is None: n_workers = 0 if len(files)==1 else min(num_cpus(), 8)
# make sure we are inside the notebook folder of the project
os.chdir(Config().nbs_path)
results = parallel(_test_one, files, flags=flags, verbose=verbose, n_workers=n_workers, pause=pause)
passed,times = [r[0] for r in results],[r[1] for r in results]
if all(passed): print("All tests are passing!")
else:
msg = "The following notebooks failed:\n"
raise Exception(msg + '\n'.join([f.name for p,f in zip(passed,files) if not p]))
if timing:
for i,t in sorted(enumerate(times), key=lambda o:o[1], reverse=True):
print(f"Notebook {files[i].name} took {int(t)} seconds")
By default (fname
left to None
), the whole library is tested from the notebooks in the lib_folder
set in your settings.ini
.
The following functions complete the ones in export2html
to fully build the documentation of your library.
#export
_re_index = re.compile(r'^(?:\d*_|)index\.ipynb$')
#export
def make_readme():
"Convert the index notebook to README.md"
index_fn = None
for f in Config().nbs_path.glob('*.ipynb'):
if _re_index.match(f.name): index_fn = f
assert index_fn is not None, "Could not locate index notebook"
print(f"converting {index_fn} to README.md")
convert_md(index_fn, Config().config_file.parent, jekyll=False)
n = Config().config_file.parent/index_fn.with_suffix('.md').name
shutil.move(n, Config().config_file.parent/'README.md')
if Path(Config().config_file.parent/'PRE_README.md').is_file():
with open(Config().config_file.parent/'README.md', 'r') as f: readme = f.read()
with open(Config().config_file.parent/'PRE_README.md', 'r') as f: pre_readme = f.read()
with open(Config().config_file.parent/'README.md', 'w') as f: f.write(f'{pre_readme}\n{readme}')
#export
@call_parse
def nbdev_build_docs(fname:Param("A notebook name or glob to convert", str)=None,
force_all:Param("Rebuild even notebooks that haven't changed", bool_arg)=False,
mk_readme:Param("Also convert the index notebook to README", bool_arg)=True,
n_workers:Param("Number of workers to use", int)=None,
pause:Param("Pause time (in secs) between notebooks to avoid race conditions", float)=0.5):
"Build the documentation by converting notebooks mathing `fname` to html"
notebook2html(fname=fname, force_all=force_all, n_workers=n_workers, pause=pause)
if fname is None: make_sidebar()
if mk_readme: make_readme()
By default (fname
left to None
), the whole documentation is build from the notebooks in the lib_folder
set in your settings.ini
, only converting the ones that have been modified since the their corresponding html was last touched unless you pass force_all=True
. The index is also converted to make the README file, unless you pass along mk_readme=False
.
#export
@call_parse
def nbdev_nb2md(fname:Param("A notebook file name to convert", str),
dest:Param("The destination folder", str)='.',
img_path:Param("Folder to export images to")="",
jekyll:Param("To use jekyll metadata for your markdown file or not", bool_arg)=False,):
"Convert the notebook in `fname` to a markdown file"
nb_detach_cells(fname, dest=img_path)
convert_md(fname, dest, jekyll=jekyll, img_path=img_path)
#export
@call_parse
def nbdev_detach(path_nb:Param("Path to notebook"),
dest:Param("Destination folder", str)="",
use_img:Param("Convert markdown images to img tags", bool_arg)=False):
"Export cell attachments to `dest` and update references"
nb_detach_cells(path_nb, dest=dest, use_img=use_img)
#export
@call_parse
def nbdev_read_nbs(fname:Param("A notebook name or glob to convert", str)=None):
"Check all notebooks matching `fname` can be opened"
files = Config().nbs_path.glob('**/*.ipynb') if fname is None else glob.glob(fname)
for nb in files:
try: _ = read_nb(nb)
except Exception as e:
print(f"{nb} is corrupted and can't be opened.")
raise e
By default (fname
left to None
), the all the notebooks in lib_folder
are checked.
#export
@call_parse
def nbdev_trust_nbs(fname:Param("A notebook name or glob to convert", str)=None,
force_all:Param("Trust even notebooks that haven't changed", bool)=False):
"Trust noteboks matching `fname`"
check_fname = Config().nbs_path/".last_checked"
last_checked = os.path.getmtime(check_fname) if check_fname.exists() else None
files = Config().nbs_path.glob('**/*.ipynb') if fname is None else glob.glob(fname)
for fn in files:
if last_checked and not force_all:
last_changed = os.path.getmtime(fn)
if last_changed < last_checked: continue
nb = read_nb(fn)
if not NotebookNotary().check_signature(nb): NotebookNotary().sign(nb)
check_fname.touch(exist_ok=True)
By default (fname
left to None
), the all the notebooks in lib_folder
are trusted. To speed things up, only the ones touched since the last time this command was run are trusted unless you pass along force_all=True
.
#export
@call_parse
def nbdev_fix_merge(fname:Param("A notebook filename to fix", str),
fast:Param("Fast fix: automatically fix the merge conflicts in outputs or metadata", bool)=True,
trust_us:Param("Use local outputs/metadata when fast mergning", bool)=True):
"Fix merge conflicts in notebook `fname`"
fix_conflicts(fname, fast=fast, trust_us=trust_us)
When you have merge conflicts after a git pull
, the notebook file will be broken and won't open in jupyter notebook anymore. This command fixes this by changing the notebook to a proper json file again and add markdown cells to signal the conflict, you just have to open that notebook again and look for >>>>>>>
to see those conflicts and manually fix them. The old broken file is copied with a .ipynb.bak
extension, so is still accessible in case the merge wasn't sucessful.
Moreover, if fast=True
, conflicts in outputs and metadata will automatically be fixed by using the local version if trust_us=True
, the remote one if trust_us=False
. With this option, it's very likely you won't have anything to do, unless there is a real conflict.
#export
def bump_version(version, part=2):
version = version.split('.')
version[part] = str(int(version[part]) + 1)
for i in range(part+1, 3): version[i] = '0'
return '.'.join(version)
test_eq(bump_version('0.1.1' ), '0.1.2')
test_eq(bump_version('0.1.1', 1), '0.2.0')
#export
@call_parse
def nbdev_bump_version(part:Param("Part of version to bump", int)=2):
"Increment version in `settings.py` by one"
cfg = Config()
print(f'Old version: {cfg.version}')
cfg.d['version'] = bump_version(Config().version, part)
cfg.save()
update_version()
print(f'New version: {cfg.version}')
#export
import subprocess
#export
@call_parse
def nbdev_install_git_hooks():
"Install git hooks to clean/trust notebooks automatically"
try: path = Config().config_file.parent
except: path = Path.cwd()
hook_path = path/'.git'/'hooks'
fn = hook_path/'post-merge'
hook_path.mkdir(parents=True, exist_ok=True)
#Trust notebooks after merge
with open(fn, 'w') as f:
f.write("""#!/bin/bash
echo "Trusting notebooks"
nbdev_trust_nbs
"""
)
os.chmod(fn, os.stat(fn).st_mode | stat.S_IEXEC)
#Clean notebooks on commit/diff
with open(path/'.gitconfig', 'w') as f:
f.write("""# Generated by nbdev_install_git_hooks
#
# If you need to disable this instrumentation do:
#
# git config --local --unset include.path
#
# To restore the filter
#
# git config --local include.path .gitconfig
#
# If you see notebooks not stripped, checked the filters are applied in .gitattributes
#
[filter "clean-nbs"]
clean = nbdev_clean_nbs --read_input_stream True
smudge = cat
required = true
[diff "ipynb"]
textconv = nbdev_clean_nbs --disp True --fname
""")
cmd = "git config --local include.path ../.gitconfig"
print(f"Executing: {cmd}")
result = subprocess.run(cmd.split(), shell=False, check=False, stderr=subprocess.PIPE)
if result.returncode == 0:
print("Success: hooks are installed and repo's .gitconfig is now trusted")
else:
print("Failed to trust repo's .gitconfig")
if result.stderr: print(f"Error: {result.stderr.decode('utf-8')}")
try: nb_path = Config().nbs_path
except: nb_path = Path.cwd()
with open(nb_path/'.gitattributes', 'w') as f:
f.write("""**/*.ipynb filter=clean-nbs
**/*.ipynb diff=ipynb
"""
)
This command installs git hooks to make sure notebooks are cleaned before you commit them to GitHub and automatically trusted at each merge. To be more specific, this creates:
nbdev_trust_nbs
.gitconfig
file that uses nbev_clean_nbs
has a filter/diff on all notebook files inside nbs_folder
and a .gitattributes
file generated in this folder (copy this file in other folders where you might have notebooks you want cleaned as well)#export
_template_git_repo = "https://github.com/fastai/nbdev_template.git"
#export
@call_parse
def nbdev_new(name: Param("A directory to create the project in", str),
template_git_repo: Param("url to template repo", str)=_template_git_repo):
"Create a new nbdev project with a given name."
path = Path(f"./{name}").absolute()
if path.is_dir():
print(f"Directory {path} already exists. Aborting.")
return
print(f"Creating a new nbdev project {name}.")
def rmtree_onerror(func, path, exc_info):
"Use with `shutil.rmtree` when you need to delete files/folders that might be read-only."
os.chmod(path, stat.S_IWRITE)
func(path)
try:
subprocess.run(['git', 'clone', f'{template_git_repo}', f'{path}'], check=True, timeout=5000)
# Note: on windows, .git is created with a read-only flag
shutil.rmtree(path/".git", onerror=rmtree_onerror)
subprocess.run("git init".split(), cwd=path, check=True)
subprocess.run("git add .".split(), cwd=path, check=True)
subprocess.run("git commit -am \"Initial\"".split(), cwd=path, check=True)
print(f"Created a new repo for project {name}. Please edit settings.ini and run nbdev_build_lib to get started.")
except Exception as e:
print("An error occured while copying nbdev project template:")
print(e)
if os.path.isdir(path):
try:
shutil.rmtree(path, onerror=rmtree_onerror)
except Exception as e2:
print(f"An error occured while cleaning up. Failed to delete {path}:")
print(e2)
nbdev_new
is a command line tool that creates a new nbdev project based on the nbdev_template repo. You can use a custom template by passing in template_git_repo
. It'll initialize a new git repository and commit the new project.
After you run nbdev_new
, please edit settings.ini
and run nbdev_build_lib
.
If you want to use collapsable cells in your HTML docs, you need to style the details tag in customstyles.css. _add_collapse_css
will do this for you, if the details tag is not already styled.
#exporti
_details_description_css = """\n
/*Added by nbdev add_collapse_css*/
details.description[open] summary::after {
content: attr(data-open);
}
details.description:not([open]) summary::after {
content: attr(data-close);
}
details.description summary {
text-align: right;
font-size: 15px;
color: #337ab7;
cursor: pointer;
}
details + div.output_wrapper {
/* show/hide code */
margin-top: 25px;
}
div.input + details {
/* show/hide output */
margin-top: -25px;
}
/*End of section added by nbdev add_collapse_css*/"""
def _add_collapse_css(doc_path=None):
"Update customstyles.css so that collapse components can be used in HTML pages"
fn = (Path(doc_path) if doc_path else Config().doc_path/'css')/'customstyles.css'
with open(fn) as f:
if 'details.description' in f.read():
print('details.description already styled in customstyles.css, no changes made')
else:
with open(fn, 'a') as f: f.write(_details_description_css)
print('details.description styles added to customstyles.css')
#hide
with open('/tmp/customstyles.css', 'w') as f:
f.write('/*test file*/')
_add_collapse_css('/tmp') # details.description styles added ...
with open('/tmp/customstyles.css') as f:
test_eq(''.join(['/*test file*/', _details_description_css]), f.read())
with open('/tmp/customstyles.css', 'a') as f:
f.write('\nmore things added after')
_add_collapse_css('/tmp') # details.description already styled ...
with open('/tmp/customstyles.css') as f:
test_eq(''.join(['/*test file*/', _details_description_css, '\nmore things added after']), f.read())
details.description styles added to customstyles.css details.description already styled in customstyles.css, no changes made
#hide
from nbdev.export import notebook2script
notebook2script()
Converted 00_export.ipynb. Converted 01_sync.ipynb. Converted 02_showdoc.ipynb. Converted 03_export2html.ipynb. Converted 04_test.ipynb. Converted 05_merge.ipynb. Converted 06_cli.ipynb. Converted 07_clean.ipynb. Converted 99_search.ipynb. Converted index.ipynb. Converted tutorial.ipynb.