We are looking at the problem Max $k$XOR, where each variable is involved in $D+1$ clauses and has no overlapping constraints.
This is code comparing one-local algorithms: Hirvonen's threshold algorithm and Farhi's QAOA at depth 1.
from functools import lru_cache
from matplotlib.ticker import StrMethodFormatter
from mpl_toolkits.axes_grid.inset_locator import (inset_axes, InsetPosition, mark_inset)
from numpy import cos, sin, tan, arctan as atan
from scipy.optimize import minimize
from scipy.special import binom, erf
from sympy import exp, simplify, expand, expand_trig, trigsimp
import matplotlib.pyplot as plt
import numpy as np
import sympy
import time
plt.rc('axes', axisbelow=True)
/tmp/ipykernel_78203/3810261615.py:3: MatplotlibDeprecationWarning: The mpl_toolkits.axes_grid module was deprecated in Matplotlib 2.1 and will be removed two minor releases later. Use mpl_toolkits.axes_grid1 and mpl_toolkits.axisartist, which provide the same functionality instead. from mpl_toolkits.axes_grid.inset_locator import (inset_axes, InsetPosition, mark_inset)
print("Starting time:", time.ctime())
Starting time: Sat Sep 11 18:13:44 2021
def qaoa_perf_3xor(s, D):
return s/ ( 1 + 3/(1 - s*s)**D)**0.5
gamma = sympy.Symbol('gamma')
D_symbol = sympy.Symbol('D')
qaoa_perf_3xor(sympy.sin(gamma), D_symbol)
Ds = [5, 10, 20, 50, 100, 500, 1000, 5000, 10000, 50000, 500000]
inps = np.linspace(0, 3, 100000)
for D in Ds:
outs = [qaoa_perf_3xor(np.sin(i*D**-0.5), D)*D**0.5 for i in inps]
plt.plot(inps, outs, label="$D$=" + str(D))
plt.grid()
plt.legend()
plt.title("QAOA performance for $k=3$")
plt.xlabel("$\gamma \sqrt{D}$")
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
plt.savefig("images/QAOA_max3xor.png", dpi=300)
We can also look at the derivative of performance with respect to $\gamma$:
qaoa_perf_derivative = sympy.lambdify((gamma, D_symbol), sympy.Derivative(qaoa_perf_3xor(sympy.sin(gamma), D_symbol), gamma, evaluate=True))
for D in Ds:
outs = [qaoa_perf_derivative(i*D**-0.5, D) for i in inps]
plt.plot(inps, outs, label="$D$=" + str(D))
pt = np.argmin(np.abs(outs))
print(D, inps[pt], outs[pt])
plt.grid()
plt.plot(inps, [0]*inps, color='black')
plt.legend()
plt.title("first derivative")
5 0.9889298892988929 -7.89382122379223e-06 10 1.019440194401944 -5.557421224877679e-07 20 1.035970359703597 9.036765468040109e-06 50 1.0463804638046381 -6.05624229221835e-06 100 1.049920499204992 -3.116431702476774e-06 500 1.052770527705277 7.448490694839105e-06 1000 1.053130531305313 7.55082733017165e-06 5000 1.053430534305343 -1.179938904916078e-07 10000 1.053460534605346 3.909266844126513e-06 50000 1.053490534905349 3.155058544646039e-06 500000 1.053490534905349 7.4604497740105025e-06
Text(0.5, 1.0, 'first derivative')
Let's check to make sure this works given $\gamma$ and $\beta$. Using the asymptotic values:
def qaoa_perf_2var_3xor(beta, gamma, D):
p, q = np.cos(2*beta), np.sin(2*beta)
c, s = np.cos(gamma), np.sin(gamma)
return 0.5*q*s*(3*p*p*c**D - q*q*c**(3*D))
D = 100
gamma = 1.0535/D**0.5
beta = np.arcsin((1/(np.cos(gamma)**(2*D) + 3))**0.5)/2
print(beta, gamma)
print(qaoa_perf_2var_3xor(beta, gamma, D)*D**0.5)
0.29003649304891665 0.10535000000000001 0.3305431427004284
Ds = range(3, 500, 1)
plt.plot(Ds, [qaoa_perf_2var_3xor(0.29, 1.0535/D**0.5, D)*D**0.5 for D in Ds])
plt.grid()
plt.xlabel("degree $D$")
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
plt.title("QAOA for Max 3XOR")
Text(0.5, 1.0, 'QAOA for Max 3XOR')
Ds = range(3000000, 1000000000, 100000)
plt.plot(Ds, [qaoa_perf_2var_3xor(0.29, 1.0535/D**0.5, D)*D**0.5 for D in Ds])
plt.grid()
plt.xlabel("degree $D$")
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
plt.title("QAOA for Max 3XOR")
Text(0.5, 1.0, 'QAOA for Max 3XOR')
What's the asymptotic value of QAOA on Max 3XOR?
def v(k):
return k / (1 + 3*exp(k*k))**0.5
k = sympy.Symbol('k')
der = sympy.lambdify(k, sympy.Derivative(v(k), k, evaluate=True))
sympy.Derivative(v(k), k, evaluate=True)
inps = np.linspace(0, 2, 10000000)
plt.plot(inps, der(inps))
plt.plot(inps, [0]*inps, color='black')
plt.grid()
pt = np.argmin(np.abs(der(inps)))
inps[pt], der(inps)[pt]
(1.0535025053502505, 3.2796303450766118e-09)
v(inps[pt])
What are the optimal values of QAOA for every degree?
fmaker = lambda D: lambda i: -qaoa_perf_3xor(np.sin(i*D**-0.5), D)*D**0.5
Ds = range(2, 10000)
optimized_vals = [-minimize(fmaker(D), 1).fun for D in Ds]
comparison = [qaoa_perf_2var_3xor(0.29, 1.0535/D**0.5, D)*D**0.5 for D in Ds]
assert np.all(np.array([o-c for o,c in zip(optimized_vals, comparison)]) > 0)
plt.plot(Ds, optimized_vals)
plt.grid()
@lru_cache(maxsize=int(1e7))
def g(D, t):
return 2**(-D) * sum([binom(D,i) for i in range(0, t+1)])
@lru_cache(maxsize=int(1e7))
def Delta(D, t):
return 2**(-D)* binom(D, t)
@lru_cache(maxsize=int(1e7))
def general_improvement(D, t, k):
val_delta = Delta(D, t)
val_g = g(D, t)
return 0.25 * ( (1-2*val_g + 2*val_delta)**k - (1 - 2*val_g)**k )
What's the performance at $k=3$?
for D in [5, 10, 20, 30, 40, 80, 100, 300, 1000]:
inps = np.array(list(range(-1, D+2)))
plt.plot((inps/D - 1/2)*D**0.5, [general_improvement(D, t, 3)*D**0.5 for t in inps], label="$D$=" + str(D))
plt.legend()
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
plt.xlabel("a: threshold is $D/2 + a/\sqrt{D}$")
plt.xlim(-2, 2)
plt.title("Performance of threshold algorithm for Max 3XOR")
plt.grid()
Finding the optimal threshold:
def get_argmax(D, k, negative_only=False):
if negative_only:
inps = range(-1, int(D/2))
else:
inps = range(-1, D+2)
return inps[np.argmax([general_improvement(D, t, k) for t in inps])]
inps = range(1, 500)
plt.scatter(inps, [(get_argmax(D, 3) - D/2)/D**0.5 for D in inps], s=3)
plt.xlabel("degree $D$")
plt.ylabel("a: threshold is $D/2 + a/\sqrt{D}$")
plt.title("Optimal threshold for Max 3XOR")
plt.grid()
If we only look at thresholds below $D/2$, we get a clearer pattern:
inps = range(1, 500)
plt.scatter(inps, [(get_argmax(D, 3, True) - D/2)/D**0.5 for D in inps], s=3)
plt.xlabel("degree $D$")
plt.ylabel("a: threshold is $D/2 + a/\sqrt{D}$")
plt.title("Optimal (negative) threshold for Max 3XOR")
plt.grid()
def get_max(D, k):
inps = range(-1, D+2)
return max([general_improvement(D, t, k) for t in inps])
inps = range(1, 500)
plt.plot(inps, [get_max(D, 3)*D**0.5 for D in inps], label='threshold')
plt.plot(inps, optimized_vals[:len(inps)], label='qaoa')
plt.legend()
plt.grid()
plt.title("Performance on Max 3XOR")
plt.xlabel("degree $D$")
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
Text(0, 0.5, '$C_{3,D}$: performance is $1/2 + C_{3,D}/\\sqrt{D}$')
Zooming in at small degree:
inps = range(1, 100)
plt.plot(inps, [get_max(D, 3)*D**0.5 for D in inps], label='threshold')
plt.plot(inps, optimized_vals[:len(inps)], label='qaoa')
plt.legend()
plt.grid()
plt.title("Performance on Max 3XOR")
plt.xlabel("degree $D$")
plt.ylabel("$C_{3,D}$: performance is $1/2 + C_{3,D}/\sqrt{D}$")
Text(0, 0.5, '$C_{3,D}$: performance is $1/2 + C_{3,D}/\\sqrt{D}$')
When does QAOA win?
np.array(inps)[[get_max(D, 3)*D**0.5 - o < 0 for D, o in zip(inps, optimized_vals[:len(inps)])]]
array([ 1, 3, 4, 6, 8, 11, 13, 18, 20, 27])
The performance increases with $k$.
inps = range(1, 200)
for k in range(2, 4):
plt.plot(inps, [get_max(D, k)*D**0.5 for D in inps],label="threshold $k$=" + str(k))
plt.grid()
plt.legend()
plt.title("Performance on Max $k$XOR")
plt.xlabel("degree $D$")
plt.ylabel("$C_{k,D}$: performance is $1/2 + C_{k,D}/\sqrt{D}$")
Text(0, 0.5, '$C_{k,D}$: performance is $1/2 + C_{k,D}/\\sqrt{D}$')
inps = range(20, 300)
for k in range(2, 10):
plt.plot(inps, [get_max(D, k)*D**0.5 for D in inps],label="threshold $k$=" + str(k))
plt.grid()
plt.legend()
plt.title("Performance on Max $k$XOR")
plt.xlabel("degree $D$")
plt.ylabel("$C_{k,D}$: performance is $1/2 + C_{k,D}/\sqrt{D}$")
Text(0, 0.5, '$C_{k,D}$: performance is $1/2 + C_{k,D}/\\sqrt{D}$')
inps = range(50, 200)
for k in range(100, 1000, 100):
plt.plot(inps, [get_max(D, k)*D**0.5 for D in inps],label="threshold $k$=" + str(k))
plt.grid()
plt.legend()
plt.title("Performance on Max $k$XOR")
plt.xlabel("degree $D$")
plt.ylabel("$C_{k,D}$: performance is $1/2 + C_{k,D}/\sqrt{D}$")
Text(0, 0.5, '$C_{k,D}$: performance is $1/2 + C_{k,D}/\\sqrt{D}$')
What about the optimal threshold?
inps = range(50, 500)
for k in range(2,10):
plt.plot(inps, [abs(get_argmax(D, k) - D/2)/D**0.5 for D in inps], label='$k$=' + str(k))
plt.grid()
plt.legend()
plt.title("Optimal threshold for Max $k$XOR")
plt.xlabel("degree $D$")
plt.ylabel("a: threshold is $D/2 + a/\sqrt{D}$")
Text(0, 0.5, 'a: threshold is $D/2 + a/\\sqrt{D}$')
def f(alpha):
return -erf(2**0.5 * alpha)
def grtD(alpha):
return (2/np.pi)**0.5 * np.e**(-2*alpha*alpha)
def large_degree_improvement(alpha, k):
return f(alpha)**k * (k/2) * (-2*alpha)/(k-1)
inps = np.linspace(-1, -0.1, 100000)
plt.plot(inps, [grtD(i)/f(i) for i in inps])
for k in range(2, 7):
plt.plot(inps, -2*inps/(k-1), label='$k$=' + str(k))
alpha = inps[[(grtD(i)/f(i)+2*i/(k-1) > 0) for i in inps]][0]
print('alpha (k=' + str(k) + '):', alpha)
print('improvement*rtD:', large_degree_improvement(alpha, k))
plt.legend()
plt.grid()
alpha (k=2): -0.4384483844838448 improvement*rtD: 0.33648925802613555 alpha (k=3): -0.5661056610566105 improvement*rtD: 0.34753464968074316 alpha (k=4): -0.6461164611646116 improvement*rtD: 0.35948123367974816 alpha (k=5): -0.7040770407704077 improvement*rtD: 0.37007481521513996 alpha (k=6): -0.7493114931149312 improvement*rtD: 0.37934716987803424
We can use SciPy to find the optimal threshold and value.
fmaker = lambda k: lambda i: abs(grtD(i)/f(i)+2*i/(k-1))
for k in [2,3,4]:
plt.plot(inps, fmaker(k)(inps), label='$k$=' + str(k))
plt.xlabel("alpha")
plt.ylabel("difference (optimal at zero)")
plt.grid()
plt.legend()
<matplotlib.legend.Legend at 0x7fda24d32280>
ks = range(2, 200)
best_alphas = [minimize(fmaker(k), -1, method='Nelder-Mead', options={"ftol":1e-10}).x[0] for k in ks]
best_threshold_perf = [large_degree_improvement(a, k) for a,k in zip(best_alphas, ks)]
for i in range(18):
print("k:", ks[i], "alpha:", best_alphas[i], "perf:", best_threshold_perf[i])
k: 2 alpha: -0.4384504927322263 perf: 0.33649336434608373 k: 3 alpha: -0.566110652685165 perf: 0.34754360662427763 k: 4 alpha: -0.646119719278067 perf: 0.35948708249696404 k: 5 alpha: -0.7040850546211003 perf: 0.37008946845499674 k: 6 alpha: -0.7493118736892935 perf: 0.37934788180340984 k: 7 alpha: -0.7862569719552992 perf: 0.3874867747324553 k: 8 alpha: -0.817399865761399 perf: 0.39471621046350674 k: 9 alpha: -0.8442601535469293 perf: 0.40120335548614106 k: 10 alpha: -0.8678350865840913 perf: 0.4070774997450675 k: 11 alpha: -0.8888135105371475 perf: 0.4124388540093239 k: 12 alpha: -0.9076905906200408 perf: 0.41736591797938943 k: 13 alpha: -0.924833858758211 perf: 0.4219210415038646 k: 14 alpha: -0.9405233614146706 perf: 0.4261543840769254 k: 15 alpha: -0.9549772322177886 perf: 0.43010682728185895 k: 16 alpha: -0.9683685787022114 perf: 0.43381207871486654 k: 17 alpha: -0.9808370001614093 perf: 0.4372982310985033 k: 18 alpha: -0.9924966532737016 perf: 0.4405889197979946 k: 19 alpha: -1.0034420400857929 perf: 0.44370420750299183
plt.scatter(ks, best_threshold_perf)
plt.grid()
plt.xlabel("$k$")
plt.ylabel("$C_{k}$: performance is $1/2 + C_{k}/\sqrt{D}$")
plt.title("Best asymptotic performance of threshold algorithm for Max $k$XOR")
Text(0.5, 1.0, 'Best asymptotic performance of threshold algorithm for Max $k$XOR')
Comparing the expression with its large-degree asymptotic form:
@lru_cache(maxsize=int(1e7))
def long_range_improvement(D, t, k):
alpha = (t - D/2)*D**-0.5
val_delta = (2/np.pi/D)**0.5 * np.e**(-2*alpha*alpha)
val_g = 0.5 + 0.5*erf(2**0.5 * alpha) + 0.5 * val_delta
return 0.25 * ( (1-2*val_g + 2*val_delta)**k - (1 - 2*val_g)**k )
D = 1000
k = 8
inps = np.array(list(range(-1, D+2)))
plt.plot((inps/D - 1/2)*D**0.5, [long_range_improvement(D, t, k)*D**0.5 for t in inps], label='asymptotic')
plt.plot((inps/D - 1/2)*D**0.5, [general_improvement(D, t, k)*D**0.5 for t in inps], label='generic')
plt.xlim(-2, 2)
plt.grid()
plt.xlabel("$a$: threshold is $D/2 + a/\sqrt{D}$")
plt.ylabel("$C_{k}$: performance is $1/2 + C_{k}/\sqrt{D}$")
plt.legend()
plt.title("asymptotic (large-degree) vs generic performance of threshold algorithm; $D$=" +str(D))
Text(0.5, 1.0, 'asymptotic (large-degree) vs generic performance of threshold algorithm; $D$=1000')
We have a relationship from $\gamma$ to $\beta$. So let's plot performance as a single-variable function:
# @lru_cache(maxsize=int(1e7))
def gamma_from_beta(b, k, D):
return atan( ((tan(2*b)**(-2) + 1)/k/D)**0.5 )
def f(b, k, D):
p, q = cos(2*b), sin(2*b)
g = gamma_from_beta(b, k, D)
c, s = cos(g), sin(g)
return -0.25*s*1j*( (p + 1j*q*c**D)**k - (p - 1j*q*c**D)**k )
for k in [3, 4, 5, 7, 10, 100]:
plt.figure()
Ds = [2, 5, 10, 20, 100, 500, 1000, 10000]
for D in Ds:
inps = np.linspace(1e-15, np.pi/4, 10000)
outs = [complex(f(i, k, D)).real*D**0.5 for i in inps]
plt.plot(inps/np.pi, outs, label="$D$=" + str(D))
print('k:', k, "D:", D, 'beta/pi:', inps[np.argmax(outs)]/np.pi, 'gamma:', gamma_from_beta(inps[np.argmax(outs)], k, D), 'perf:', np.max(outs))
plt.title("QAOA for Max $k$XOR, $k$=" + str(k))
plt.grid()
plt.ylabel("$C_{" + str(k) + ",D}$: performance is $1/2 + C_{" + str(k) + ",D}/\sqrt{D}$")
plt.xlabel("$ beta / \pi$")
plt.legend()
k: 3 D: 2 beta/pi: 0.09113411341134134 gamma: 0.6457133550969802 perf: 0.29410412861806556 k: 3 D: 5 beta/pi: 0.09178417841784199 gamma: 0.4422524799948612 perf: 0.3146569554910147 k: 3 D: 10 beta/pi: 0.09203420342034223 gamma: 0.3223806563933743 perf: 0.32268419004512405 k: 3 D: 20 beta/pi: 0.09215921592159236 gamma: 0.23167755729751552 perf: 0.32697175492210245 k: 3 D: 100 beta/pi: 0.09228422842284248 gamma: 0.10498972786802262 perf: 0.33054742740112547 k: 3 D: 500 beta/pi: 0.09230923092309251 gamma: 0.04707999959190555 perf: 0.33127896219847697 k: 3 D: 1000 beta/pi: 0.09230923092309251 gamma: 0.033302887938561315 perf: 0.33137080088961723 k: 3 D: 10000 beta/pi: 0.09230923092309251 gamma: 0.010534803199471842 perf: 0.3314535299965894 k: 4 D: 2 beta/pi: 0.07350735073507372 gamma: 0.6707077469306936 perf: 0.31273335595182417 k: 4 D: 5 beta/pi: 0.0744824482448247 gamma: 0.460203752247109 perf: 0.33641563218166026 k: 4 D: 10 beta/pi: 0.07485748574857508 gamma: 0.33568475060518954 perf: 0.34572486409324354 k: 4 D: 20 beta/pi: 0.07505750575057528 gamma: 0.2412987329831272 perf: 0.35071041090831745 k: 4 D: 100 beta/pi: 0.07523252325232545 gamma: 0.10938136825980022 perf: 0.3548750224898438 k: 4 D: 500 beta/pi: 0.07525752575257547 gamma: 0.04905838095594313 perf: 0.35572784016109366 k: 4 D: 1000 beta/pi: 0.07525752575257547 gamma: 0.034703431880728046 perf: 0.35583491989138766 k: 4 D: 10000 beta/pi: 0.07525752575257547 gamma: 0.010978155318363525 perf: 0.3559313779605138 k: 5 D: 2 beta/pi: 0.06238123812381262 gamma: 0.6914843004927927 perf: 0.3281876209826412 k: 5 D: 5 beta/pi: 0.06355635563556379 gamma: 0.4750964091162255 perf: 0.3546963040752536 k: 5 D: 10 beta/pi: 0.06398139813981421 gamma: 0.3468346065582889 perf: 0.36517453657251486 k: 5 D: 20 beta/pi: 0.06423142314231446 gamma: 0.2493408953847929 perf: 0.370799005813208 k: 5 D: 100 beta/pi: 0.06443144314431466 gamma: 0.11306015754691891 perf: 0.3755040521630587 k: 5 D: 500 beta/pi: 0.06445644564456468 gamma: 0.050716447902231246 perf: 0.37646827287224693 k: 5 D: 1000 beta/pi: 0.06445644564456468 gamma: 0.0358773219470221 perf: 0.37658933555084917 k: 5 D: 10000 beta/pi: 0.06448144814481471 gamma: 0.011345631207865314 perf: 0.3766984221260389 k: 7 D: 2 beta/pi: 0.04885488548854911 gamma: 0.7241763672015993 perf: 0.3524320255598121 k: 7 D: 5 beta/pi: 0.05020502050205046 gamma: 0.49886966247200565 perf: 0.38384779513791106 k: 7 D: 10 beta/pi: 0.050705070507050955 gamma: 0.364531438704665 perf: 0.39638119437081776 k: 7 D: 20 beta/pi: 0.05098009800980123 gamma: 0.2622344618087329 perf: 0.4031346410999332 k: 7 D: 100 beta/pi: 0.05120512051205146 gamma: 0.11896535473529471 perf: 0.40879767143215534 k: 7 D: 500 beta/pi: 0.0512551255125515 gamma: 0.05335431842602078 perf: 0.4099596976860411 k: 7 D: 1000 beta/pi: 0.0512551255125515 gamma: 0.0377451050228159 perf: 0.4101056478639212 k: 7 D: 10000 beta/pi: 0.0512551255125515 gamma: 0.011941154273739346 perf: 0.4102371058046652 k: 10 D: 2 beta/pi: 0.037803780378038075 gamma: 0.7599211884000758 perf: 0.3786286364946791 k: 10 D: 5 beta/pi: 0.03922892289228949 gamma: 0.5252912290279241 perf: 0.4160734614929132 k: 10 D: 10 beta/pi: 0.039778977897790044 gamma: 0.38420289214024267 perf: 0.4311762570138485 k: 10 D: 20 beta/pi: 0.040079007900790345 gamma: 0.2765142408952303 perf: 0.4393511679241211 k: 10 D: 100 beta/pi: 0.04030403040304057 gamma: 0.125554693219044 perf: 0.44622581933117295 k: 10 D: 500 beta/pi: 0.040354035403540614 gamma: 0.0563186231943233 perf: 0.4476386226442099 k: 10 D: 1000 beta/pi: 0.040379037903790646 gamma: 0.03982022411775004 perf: 0.447816050716899 k: 10 D: 10000 beta/pi: 0.040379037903790646 gamma: 0.01259825382237099 perf: 0.44797610429101614 k: 100 D: 2 beta/pi: 0.007475747574757784 gamma: 0.9846191191253346 perf: 0.528330274340077 k: 100 D: 5 beta/pi: 0.008450845084508758 gamma: 0.7002022674161685 perf: 0.621413963206732 k: 100 D: 10 beta/pi: 0.008825882588259133 gamma: 0.5184745277920191 perf: 0.6625250178308172 k: 100 D: 20 beta/pi: 0.009050905090509357 gamma: 0.3748139084978714 perf: 0.6856606454079309 k: 100 D: 100 beta/pi: 0.009225922592259532 gamma: 0.17092097273305687 perf: 0.7055912953067912 k: 100 D: 500 beta/pi: 0.009250925092509557 gamma: 0.07683141025610506 perf: 0.7097254589486673 k: 100 D: 1000 beta/pi: 0.009275927592759583 gamma: 0.05423536485283009 perf: 0.710256965197822 k: 100 D: 10000 beta/pi: 0.009275927592759583 gamma: 0.017165877947379102 perf: 0.7107338122175934
Find the maximum performance of QAOA for any k and D:
fmaker = lambda k, D: lambda i: -complex(f(i, k, D)).real*D**0.5
def get_qaoa_max(k,D):
# do pass to find approximate optimum
inps = np.linspace(1e-15, np.pi/4, 100)
outs = [complex(f(i, k, D)).real*D**0.5 for i in inps]
return -minimize(fmaker(k, D), inps[np.argmax(outs)]).fun
%%time
for k in range(2, 10):
Ds = range(2, 300)
qaoa_kxor = [get_qaoa_max(k,D) for D in Ds]
plt.plot(Ds, qaoa_kxor, label="$k$=" + str(k))
plt.grid()
plt.xlabel('degree $D$')
plt.legend()
plt.ylabel("$C_{k,D}$: performance is $1/2 + C_{k,D}/\sqrt{D}$")
plt.title("Best QAOA performance for Max $k$XOR")
plt.savefig('images/qaoa_best_maxkxor.png', dpi=300)
CPU times: user 7.26 s, sys: 15.8 ms, total: 7.28 s Wall time: 7.28 s
def qaoa_asymptotic_kxor(k,t):
if t*t*k < 1:
return 0
term_1 = (t*t*k - 1)**0.5 + 1j*np.e**(-t*t/2)
term_2 = (t*t*k - 1)**0.5 - 1j*np.e**(-t*t/2)
return -0.25*1j * t**(1-k) * k**(-k/2) * (term_1**k - term_2**k)
qaoa_asymptotic_maxes = []
for k in range(2, 200):
inps = np.linspace(0, 2, 100000)
outs = [qaoa_asymptotic_kxor(k,i).real for i in inps]
plt.plot(inps, outs, label="$k$=" + str(k))
if k < 20 or k % 5 == 0:
t = inps[np.nanargmax(outs)]
print("k=", str(k), "Max:", np.nanmax(outs), "at t=", t, 'beta=', np.arcsin(1/t/k**0.5)/2)
qaoa_asymptotic_maxes.append(np.nanmax(outs))
plt.grid()
plt.xlabel("$t$: gamma = $t/\sqrt{D}$")
plt.ylabel("$C_{k}$: performance is $1/2 + C_{k}/\sqrt{D}$")
# plt.legend()
k= 2 Max: 0.30326532976533727 at t= 1.000010000100001 beta= 0.3926940817237238 k= 3 Max: 0.33146272666069077 at t= 1.053510535105351 beta= 0.2900007252938392 k= 4 Max: 0.3559421057897224 at t= 1.097790977909779 beta= 0.2364444875259528 k= 5 Max: 0.3767105761875506 at t= 1.134771347713477 beta= 0.20254429534400661 k= 6 Max: 0.3945945274726129 at t= 1.1663716637166373 beta= 0.1787938843543141 k= 7 Max: 0.41025178270098717 at t= 1.1939319393193932 beta= 0.16105629649069836 k= 8 Max: 0.42415677803753304 at t= 1.2183321833218332 beta= 0.14721507819709934 k= 9 Max: 0.436652898412887 at t= 1.2402124021240213 beta= 0.1360585132329121 k= 10 Max: 0.4479939244835788 at t= 1.2600526005260053 beta= 0.12683796844261103 k= 11 Max: 0.45837176828801823 at t= 1.2781727817278172 beta= 0.1190684308018922 k= 12 Max: 0.46793469585178904 at t= 1.2948529485294853 beta= 0.1124149146297139 k= 13 Max: 0.4767994633283053 at t= 1.3103131031310313 beta= 0.10664016752303325 k= 14 Max: 0.48505957497866997 at t= 1.3246932469324693 beta= 0.10157385452874085 k= 15 Max: 0.4927910317504684 at t= 1.3381533815338154 beta= 0.09708471535533157 k= 16 Max: 0.500056421040816 at t= 1.3508135081350814 beta= 0.09307340836387287 k= 17 Max: 0.5069078845008667 at t= 1.3627336273362733 beta= 0.0894652652894109 k= 18 Max: 0.5133893126807297 at t= 1.3739937399373994 beta= 0.08619903774779461 k= 19 Max: 0.5195379937755544 at t= 1.3846938469384693 beta= 0.08322362503779358 k= 20 Max: 0.5253858710444761 at t= 1.3948539485394853 beta= 0.08050154038452571 k= 25 Max: 0.5509636811068565 at t= 1.4392943929439295 beta= 0.06970404853736062 k= 30 Max: 0.5719838963056846 at t= 1.4758147581475813 beta= 0.06201425968054553 k= 35 Max: 0.5898068021689621 at t= 1.5067750677506775 beta= 0.05620858907729072 k= 40 Max: 0.6052637696458998 at t= 1.5336353363533635 beta= 0.05164048078066199 k= 45 Max: 0.6188999875431959 at t= 1.5573355733557335 beta= 0.0479343674408014 k= 50 Max: 0.6310922401449064 at t= 1.5785357853578537 beta= 0.04485524761360049 k= 55 Max: 0.6421117416978646 at t= 1.5976959769597696 beta= 0.04224851351195602 k= 60 Max: 0.6521601660819848 at t= 1.6151761517615175 beta= 0.04000718536146906 k= 65 Max: 0.6613915200872363 at t= 1.6312563125631256 beta= 0.03805489178122506 k= 70 Max: 0.669926043626932 at t= 1.6461164611646115 beta= 0.03633647307704454 k= 75 Max: 0.6778593942828799 at t= 1.6599565995659957 beta= 0.03480915754390776 k= 80 Max: 0.6852689052875331 at t= 1.6728767287672877 beta= 0.03344143529528098 k= 85 Max: 0.6922179770576518 at t= 1.6849968499684997 beta= 0.03220785721108846 k= 90 Max: 0.6987592263249378 at t= 1.6964169641696416 beta= 0.031088230478568527 k= 95 Max: 0.704936794282571 at t= 1.7072170721707216 beta= 0.03006638535265316 k= 100 Max: 0.710788070882332 at t= 1.7174371743717436 beta= 0.029129622001118922 k= 105 Max: 0.716344998965084 at t= 1.7271372713727138 beta= 0.02826700946054821 k= 110 Max: 0.7216350829403267 at t= 1.7363973639736396 beta= 0.0274690113931174 k= 115 Max: 0.7266821696568858 at t= 1.7452374523745238 beta= 0.026728428316747342 k= 120 Max: 0.7315070703718496 at t= 1.7536775367753676 beta= 0.026039100103206533 k= 125 Max: 0.7361280503094398 at t= 1.7617776177761777 beta= 0.025395142091060568 k= 130 Max: 0.7405612298183664 at t= 1.7695376953769537 beta= 0.024792281002654782 k= 135 Max: 0.7448209028950489 at t= 1.7769977699776998 beta= 0.02422624911952744 k= 140 Max: 0.7489198045223782 at t= 1.7841978419784197 beta= 0.02369329833341943 k= 145 Max: 0.752869329028446 at t= 1.7911179111791118 beta= 0.023190897250129763 k= 150 Max: 0.7566797084358322 at t= 1.7977979779797797 beta= 0.02271605441273773 k= 155 Max: 0.7603601678121659 at t= 1.8042580425804258 beta= 0.022266351514169155 k= 160 Max: 0.7639190512095958 at t= 1.8105181051810517 beta= 0.02183962849299841 k= 165 Max: 0.7673639301990055 at t= 1.8165581655816558 beta= 0.02143442076825826 k= 170 Max: 0.7707016962498999 at t= 1.822438224382244 beta= 0.0210484930497432 k= 175 Max: 0.7739386396084962 at t= 1.8281182811828118 beta= 0.02068095205508028 k= 180 Max: 0.7770805158690821 at t= 1.8336383363833637 beta= 0.02033011075323807 k= 185 Max: 0.7801326069422092 at t= 1.839018390183902 beta= 0.01999465081359408 k= 190 Max: 0.7830997688198661 at t= 1.8442384423844238 beta= 0.019673797821672293 k= 195 Max: 0.7859864778822064 at t= 1.8493184931849318 beta= 0.019366430518416054
Text(0, 0.5, '$C_{k}$: performance is $1/2 + C_{k}/\\sqrt{D}$')
%%time
fig, axs = plt.subplots(3, 2, sharey=True, figsize=(10, 14))
axs = axs.reshape(-1,)
ks = [3, 4, 5, 7, 10, 20]
for i in range(len(ks)):
ax = axs[i]
k = ks[i]
Ds = range(2,300)
ax.plot(Ds, [get_max(D, k)*D**0.5 for D in Ds], label='threshold')
ax.plot(Ds, [get_qaoa_max(k,D) for D in Ds], label='qaoa')
ax.grid()
ax.set_xlabel('degree $D$')
ax.legend(loc='lower right')
ax.set_ylabel("$C_{" + str(k) + ",D}$: performance is $1/2 + C_{" + str(k) + ",D}/\sqrt{D}$")
ax.set_title("Best performance for Max $" + str(k) + "$XOR")
fig.tight_layout()
plt.savefig('images/comparison.png', dpi=300)
CPU times: user 7.64 s, sys: 208 ms, total: 7.85 s Wall time: 7.54 s
So for $k > 4$, the QAOA does better than the threshold algorithm.
This is true even in the large-degree limit.
max([q for q in qaoa_asymptotic_maxes if q != np.inf])
0.7882407077057314
fig, ax1 = plt.subplots()
ks = range(2, 200)
ax1.scatter(ks[:len(best_threshold_perf)], best_threshold_perf, label='threshold', s=3)
ax1.scatter(ks[:len(qaoa_asymptotic_maxes)], qaoa_asymptotic_maxes, label='qaoa', s=3)
ax1.grid()
ax1.set_xlabel("$k$")
ax1.set_ylabel("$C_{k}$: performance is $1/2 + C_{k}/\sqrt{D}$")
ax1.legend(loc=0)
ax1.set_title("Asymptotic performance of algorithms for Max $k$XOR")
# Following https://scipython.com/blog/inset-plots-in-matplotlib/
ax2 = plt.axes([0,0,1,1])
ip = InsetPosition(ax1, [0.69,0.095,0.25,0.3])
ax2.set_axes_locator(ip)
max_k_vals_inset = 8
ax2.grid()
ax2.scatter(ks[:max_k_vals_inset], best_threshold_perf[:max_k_vals_inset], s=30, label='threshold')
ax2.scatter(ks[:max_k_vals_inset], qaoa_asymptotic_maxes[:max_k_vals_inset], s=30, label='qaoa')
ax2.set_xticks(np.arange(2, max_k_vals_inset+2, 2))
ax2.set_xticklabels(ax2.get_xticks(), backgroundcolor='w', fontsize=9)
ax2.set_yticklabels(ax2.get_yticks(), backgroundcolor='w', fontsize=9)
ax2.yaxis.set_major_formatter(StrMethodFormatter('{x:,.2f}')) # 2 decimal places
plt.savefig('images/asymptotic_comparison.png', dpi=300)
plt.show()
/tmp/ipykernel_78203/1739955069.py:21: UserWarning: FixedFormatter should only be used together with FixedLocator ax2.set_yticklabels(ax2.get_yticks(), backgroundcolor='w', fontsize=9)
def logfit(x, y, deg=1):
logx = np.log(x)
logy = np.log(y)
return np.polyfit(logx,logy,deg=deg)
# poly = np.poly1d(coeffs)
# yfit = lambda x: np.exp(poly(np.log(x)))
# plt.loglog(inps, yfit(inps))
# plt.loglog(inps, dataset)
dataset = qaoa_asymptotic_maxes
inps = ks[:len(dataset)]
qaoa_coeffs = logfit(inps, dataset, deg=1)
print(qaoa_coeffs)
dataset = best_threshold_perf
inps = ks[:len(dataset)]
threshold_coeffs = logfit(inps, dataset, deg=1)
print(threshold_coeffs)
[ 0.1917382 -1.23055679] [ 0.11544326 -1.15496742]
plt.loglog(ks, best_threshold_perf, label='threshold')
plt.loglog(ks[:len(qaoa_asymptotic_maxes)], qaoa_asymptotic_maxes, label='qaoa')
plt.loglog(ks, [(np.e**threshold_coeffs[1])*k**threshold_coeffs[0] for k in ks], label='threshold fit')
plt.loglog(ks, [(np.e**qaoa_coeffs[1])*k**qaoa_coeffs[0] for k in ks], label='qaoa fit')
plt.legend()
plt.grid()
This suggests that QAOA's performance scales as $k^{0.2}$ and the threshold algorithm's performance scales as $k^{0.1}$.
# %%time
# %run parisi.ipynb
ps = range(2, 35)
outs_scaled = np.array([1.0799048 , 1.15044002, 1.16740424, 1.17328777, 1.17560844,
1.17659504, 1.1770332 , 1.17723326, 1.17732629, 1.17737008,
1.17739087, 1.17740081, 1.17740557, 1.17740787, 1.17740898,
1.17740952, 1.17740978, 1.1774099 , 1.17740996, 1.17740999,
1.17741001, 1.17741002, 1.17741002, 1.17741002, 1.17741002,
1.17741002, 1.17741002, 1.17741002, 1.17741002, 1.17741002,
1.17741002, 1.17741002, 1.17741002])
parisi_clean_data = np.array([[p, x * p**0.5 / 2] for p,x in zip(ps, outs_scaled)])
parisi_estimate = lambda x: (x*np.log(2)/2)**0.5
size = len(parisi_clean_data)
ks = np.array(list(range(2, 200)))
assert np.allclose(ks[:size], parisi_clean_data[:, 0])
plt.plot(ks[:len(best_threshold_perf)], best_threshold_perf, label='threshold', linewidth=3)
plt.plot(ks[:len(qaoa_asymptotic_maxes)], qaoa_asymptotic_maxes, label='qaoa', linewidth=3)
plt.plot(parisi_clean_data[:, 0], parisi_clean_data[:,1], label='optimal (parisi value)', color='green', linewidth=3)
plt.plot(ks, parisi_estimate(ks), label='parisi limit at high $k$', color='gray', alpha=0.3, linewidth=6)
plt.grid()
plt.xlabel("$k$")
plt.ylabel("$C_{k}$: performance is $1/2 + C_{k}/\sqrt{D}$")
plt.legend()
plt.title("Upper and lower bounds for Max $k$XOR")
plt.savefig('images/parisi_comparison.png', dpi=300)
print("Ending time:", time.ctime())
Ending time: Sat Sep 11 18:18:08 2021