#!/usr/bin/env python # coding: utf-8 # # A Hybrid of Imperative and Symbolic Programming # In[1]: import d2l from mxnet import np, npx, sym from mxnet.gluon import nn npx.set_np() def add(a, b): return a + b def fancy_func(a, b, c): e = add(a, b) return add(c, e) fancy_func(1, 2, 3) # Symbolic programming # In[2]: def add_str(): return '''def add(a, b): return a + b ''' def fancy_func_str(): return '''def fancy_func(a, b, c): e = add(a, b) return add(c, e) ''' def evoke_str(): return add_str() + fancy_func_str() + ''' print(fancy_func(1, 2, 3)) ''' prog = evoke_str() y = compile(prog, '', 'exec') exec(y) # Construct with the ``HybridSequential`` class # In[3]: def get_net(): net = nn.HybridSequential() net.add(nn.Dense(256, activation='relu'), nn.Dense(128, activation='relu'), nn.Dense(2)) net.initialize() return net x = np.random.normal(size=(1, 512)) net = get_net() net(x) # Compile and optimize the workload # In[4]: net.hybridize() net(x) # Benchmark # In[5]: def benchmark(net, x): timer = d2l.Timer() for i in range(1000): _ = net(x) npx.waitall() return timer.stop() net = get_net() print('before hybridizing: %.4f sec' % (benchmark(net, x))) net.hybridize() print('after hybridizing: %.4f sec' % (benchmark(net, x))) # Export the program to other languages # In[6]: net.export('my_mlp') get_ipython().system('ls my_mlp*') get_ipython().system('head -n20 my_mlp-symbol.json')