#|default_exp test
#|export
import time,os,sys,traceback,contextlib, inspect
from fastcore.basics import *
from fastcore.imports import *
from fastcore.foundation import *
from fastcore.parallel import *
from fastcore.script import *
from nbdev.config import *
from nbdev.doclinks import *
from nbdev.process import NBProcessor, nb_lang
from nbdev.frontmatter import FrontmatterProc
from logging import warning
from execnb.nbio import *
from execnb.shell import *
Run unit tests on notebooks in parallel
#|export
def test_nb(fn, # file name of notebook to test
skip_flags=None, # list of flags marking cells to skip
force_flags=None, # list of flags marking cells to always run
do_print=False, # print completion?
showerr=True, # warn errors?
basepath=None): # path to add to sys.path
"Execute tests in notebook in `fn` except those with `skip_flags`"
if basepath: sys.path.insert(0, str(basepath))
if not IN_NOTEBOOK: os.environ["IN_TEST"] = '1'
flags=set(L(skip_flags)) - set(L(force_flags))
nb = NBProcessor(fn, preprocs=FrontmatterProc, process=True).nb
fm = getattr(nb, 'frontmatter_', {})
if str2bool(fm.get('skip_exec', False)) or nb_lang(nb) != 'python': return True, 0
def _no_eval(cell):
if cell.cell_type != 'code': return True
if 'nbdev_export'+'(' in cell.source: return True
direc = getattr(cell, 'directives_', {}) or {}
if direc.get('eval:', [''])[0].lower() == 'false': return True
return flags & direc.keys()
start = time.time()
k = CaptureShell(fn)
if do_print: print(f'Starting {fn}')
try:
with working_directory(fn.parent):
k.run_all(nb, exc_stop=True, preproc=_no_eval)
res = True
except:
if showerr: warning(k.prettytb(fname=fn))
res=False
if do_print: print(f'- Completed {fn}')
return res,time.time()-start
test_nb
can test a notebook, and skip over certain flags:
_nb = Path('../tests/directives.ipynb')
success,duration = test_nb(_nb, skip_flags=['notest'])
assert success
duration
0.02804255485534668
In that notebook the cell flagged notest raises an exception, which will be returned as a bool
:
_nb = Path('../tests/directives.ipynb')
success,duration = test_nb(_nb, showerr=False)
assert not success
#|export
def _keep_file(p:Path, # filename for which to check for `indicator_fname`
ignore_fname:str # filename that will result in siblings being ignored
) -> bool:
"Returns False if `indicator_fname` is a sibling to `fname` else True"
if p.exists(): return not bool(p.parent.ls().attrgot('name').filter(lambda x: x == ignore_fname))
else: True
Sometimes you may wish to override one or more of the skip_flags, in which case you can use the argument force_flags
which will remove the appropriate tag(s) from skip_flags
. This is useful because skip_flags
are meant to be set in the tst_flags
field of settings.ini
, whereas force_flags
are usually passed in by the user.
#|export
@call_parse
def nbdev_test(
fname:str=None, # A notebook name or glob to test
flags:str='', # Space separated list of test flags to run that are normally ignored
n_workers:int=None, # Number of workers
timing:bool=False, # Time each notebook to see which are slow
do_print:bool=False, # Print start and end of each notebook
pause:float=0.01, # Pause time (in seconds) between notebooks to avoid race conditions
symlinks:bool=False, # Follow symlinks?
recursive:bool=None, # Include subfolders?
file_re:str=None, # Only include files matching regex
folder_re:str=None, # Only enter folders matching regex
skip_file_glob:str=None, # Skip files matching glob
skip_file_re:str='^[_.]', # Skip files matching regex
ignore_fname:str='.notest' # Filename that will result in siblings being ignored
):
"Test in parallel notebooks matching `fname`, passing along `flags`"
skip_flags = get_config().tst_flags.split()
force_flags = flags.split()
files = nbglob(fname, recursive=recursive, file_re=file_re, folder_re=folder_re,
skip_file_glob=skip_file_glob, skip_file_re=skip_file_re, as_path=True, symlinks=symlinks)
files = [f.absolute() for f in sorted(files) if _keep_file(f, ignore_fname)]
if len(files)==0: return print('No files were eligible for testing')
if n_workers is None: n_workers = 0 if len(files)==1 else min(num_cpus(), 8)
os.chdir(get_config().path('nbs_path'))
if IN_NOTEBOOK: kwargs = {'method':'spawn'} if os.name=='nt' else {'method':'forkserver'}
else: kwargs = {}
results = parallel(test_nb, files, skip_flags=skip_flags, force_flags=force_flags, n_workers=n_workers,
basepath=get_config().config_path, pause=pause, do_print=do_print, **kwargs)
passed,times = zip(*results)
if all(passed): print("Success.")
else:
_fence = '='*50
failed = '\n\t'.join(f.name for p,f in zip(passed,files) if not p)
sys.stderr.write(f"\nnbdev Tests Failed On The Following Notebooks:\n{_fence}\n\t{failed}\n")
exit(1)
if timing:
for i,t in sorted(enumerate(times), key=lambda o:o[1], reverse=True): print(f"{files[i].name}: {int(t)} secs")
#|eval:false
nbdev_test(n_workers=0)
Success.
You can even run nbdev_test
in non nbdev projects, for example, you can test an individual notebook like so:
#|eval:false
!nbdev_test --fname ../tests/minimal.ipynb --do_print
No files were eligible for testing
Or you can test an entire directory of notebooks filtered for only those that match a regular expression like so:
#|eval:false
!nbdev_test --fname ../tests --file_re '.*test.ipynb' --do_print
No files were eligible for testing
#|hide
import nbdev; nbdev.nbdev_export()