#!/usr/bin/env python # coding: utf-8 # # Interacting with dask workers via IPython # # Experimentation/illustration for https://github.com/dask/distributed/pull/370 # In[1]: from distributed import Executor e = Executor('127.0.0.1:8786') e # In[2]: futures = e.map(lambda x: 1/(x-5), range(10)) futures # `Executor.start_ipython()` starts IPython on workers, # returning the connection information needed to connect Jupyter clients to them. # In[3]: info = e.start_ipython() info # If you pass `qtconsole=True`, then dask will spawn a local Juptyer QtConsole, connected to the remote worker(s). # # The worker object is injected into the worker namespace as `worker`. # In[4]: workers = list(e.ncores()) info = e.start_ipython(workers[0], qtconsole=True) # In[8]: from IPython.display import Image Image('./qtc.png') # If we pass magic_names as a list of strings of the same length, # we will get a line and cell magic that redirects execution to the given worker. # In[5]: info = e.start_ipython(workers[:2], magic_names=['w1', 'w2']) # In[6]: get_ipython().run_line_magic('w1', 'os.getpid()') get_ipython().run_line_magic('w2', 'os.getpid()') os.getpid() # Since the remote kernel is a full IPython instance, # we can even do plotting with matplotlib and see the figures: # In[7]: get_ipython().run_cell_magic('w1', '', '%matplotlib inline\nimport matplotlib.pyplot as plt\nplt.plot(list(worker.data.values()))\n')