This notebook is a counterpart to the notebook MeanHamilMinimizer_native_with_autograd
.
The first problem in both notebooks is identical and done all natively, but here it is done with scipy instead of autograd.
import os
import sys
print(os.getcwd())
os.chdir('../../')
print(os.getcwd())
sys.path.insert(0,os.getcwd())
/home/rrtucci/PycharmProjects/qubiter/qubiter/jupyter_notebooks /home/rrtucci/PycharmProjects/qubiter
from qubiter.adv_applications.MeanHamil_native import *
from qubiter.adv_applications.MeanHamilMinimizer import *
loaded OneQubitGate, WITHOUT autograd.numpy
num_qbits = 2
file_prefix = 'mean_hamil_rigetti_test1'
emb = CktEmbedder(num_qbits, num_qbits)
wr = SEO_writer(file_prefix, emb)
wr.write_Rx(0, rads='#1')
wr.write_Ry(0, rads='-#2*.5')
wr.close_files()
wr.print_eng_file(jup=True)
1 | ROTX #1 AT 0 | 2 | ROTY -#2*.5 AT 0 |
wr.print_pic_file(jup=True)
1 | | Rx | 2 | | Ry |
fun_name_to_fun = None
hamil = QubitOperator('Z0', 1.)
print('hamil=\n', hamil)
hamil= 1.0 [Z0]
init_var_num_to_rads = {1: .3, 2: .8}
all_var_nums = [1, 2]
num_samples = 0
print_hiatus = 4
verbose = False
np.random.seed(1234)
emp_mhamil = MeanHamil_native(file_prefix, num_qbits, hamil,
all_var_nums, fun_name_to_fun, simulator_name='SEO_simulator', num_samples=num_samples)
targ_mhamil = MeanHamil_native(file_prefix, num_qbits, hamil,
all_var_nums, fun_name_to_fun, simulator_name='SEO_simulator') # zero samples
mini = MeanHamilMinimizer(emp_mhamil, targ_mhamil,
all_var_nums, init_var_num_to_rads,
print_hiatus=print_hiatus, verbose=verbose)
mini.find_min(minlib='scipy', method='Powell')
x_val~ (#1, #2) iter=0, cost=0.575017, x_val=0.300000, 0.800000 iter=4, cost=-0.597000, x_val=1.300000, 0.800000 iter=8, cost=-0.696599, x_val=1.579575, 0.800000 iter=12, cost=-0.683637, x_val=1.566905, -0.818034 iter=16, cost=-0.999970, x_val=1.566905, 0.000457 iter=20, cost=-0.999970, x_val=1.566905, 0.000457 iter=24, cost=-0.321153, x_val=0.948872, 0.000457 iter=28, cost=-1.000000, x_val=1.570796, 0.000457 iter=32, cost=-0.539918, x_val=1.570796, 1.000457 iter=36, cost=-0.927763, x_val=1.570796, 0.382423 iter=40, cost=-1.000000, x_val=1.570796, 0.000005
direc: array([[1., 0.], [0., 1.]]) fun: -0.9999999999999881 message: 'Optimization terminated successfully.' nfev: 42 nit: 2 status: 0 success: True x: array([ 1.57079640e+00, -8.49466221e-11])