Author: Cheuk Ting Li
from psitip import *
PsiOpts.setting(
solver = "ortools.GLOP", # Set linear programming solver
repr_latex = True, # Jupyter Notebook LaTeX display
venn_latex = True, # LaTeX in diagrams
proof_note_color = "blue", # Reasons in proofs are blue
solve_display_reg = True, # Display claims in solve commands
random_seed = 4321 # Random seed for example searching
)
X, Y, U = rv("X, Y, U")
M0, M1, M2 = rv_array("M", 3)
R0, R1, R2 = real_array("R", 3)
model = CodingModel() # Define Gray-Wyner system
model.set_rate(M0, R0) # The rate of M0, M1, M2 are R0, R1, R2 resp.
model.set_rate(M1, R1)
model.set_rate(M2, R2)
model.add_edge(X, Y) # X, Y are correlated source
model.add_node(X+Y, M0+M1+M2,
label = "Enc") # Encoder maps X,Y to M0,M1,M2
model.add_node(M0+M1, X,
label = "Dec 1") # Decoder 1 maps M0,M1 to X
model.add_node(M0+M2, Y,
label = "Dec 2") # Decoder 2 maps M0,M2 to Y
model.graph() # Draw diagram
r = model.get_inner() # Automatic inner bound
r
# Although the above region does not look like the Gray-Wyner region [Gray-Wyner 1974],
# they are actually equivalent.
# Write the Gray-Wyner region
r_gw = ((R0 >= I(X+Y&U)) & (R1 >= H(X|U)) & (R2 >= H(Y|U))).exists(U)
r_gw
# Prove r is the same region as r_gw
print(bool(r >> r_gw)) # r implies r_gw
print(bool(r_gw >> r)) # r_gw implies r
True True
# Automatic outer bound with 1 auxiliary, coincides with inner bound
model.get_outer(1)
(model.get_outer() >> r).check_getaux_array() # Converse proof, print auxiliary
# Output converse proof (is_proof = True for shorter proof)
# Lower search level to avoid modification to regions
with PsiOpts(auxsearch_level = 1):
(model.get_outer(is_proof = True) >> r_gw).proof().display()
# Minimum sum rate is H(X, Y)
sumrate = r.minimum(R0 + R1 + R2, [R0, R1, R2])
sumrate
# Minimum weighted sum rate when R0 is counted twice is H(X)+H(Y)
wsumrate = r.minimum(R0 * 2 + R1 + R2, [R0, R1, R2])
wsumrate
# Minimum symmetric rate
symrate = r.minimum(emax(R0, R1, R2), [R0, R1, R2])
symrate
# The corner point max R0 s.t. R0 + R1 = H(X), R0 + R2 = H(Y)
corner1 = (r & (R0 + R1 == H(X)) & (R0 + R2 == H(Y))).maximum(R0, [R0, R1, R2])
corner1
# This is the Gacs-Korner common information [Gacs-Korner 1973]
bool(corner1 == gacs_korner(X & Y))
True
# The corner point min R0 s.t. R0 + R1 + R2 = H(X,Y)
corner2 = (r & (R0 + R1 + R2 == H(X+Y))).minimum(R0, [R0, R1, R2])
corner2
# This is Wyner's common information [Wyner 1975]
bool(corner2 == wyner_ci(X & Y))
True
# The corner point min R0 s.t. R0 + R2 = H(Y), R1 = H(X|Y)
corner3 = (r & (R0 + R2 == H(Y)) & (R1 == H(X|Y))).minimum(R0, [R0, R1, R2])
corner3
# This is the necessary conditional entropy [Cuff-Permuter-Cover 2010] plus I(X;Y)
# We need the double Markov property [Csiszar-Korner 2011] to prove this
with dblmarkov().assumed():
print(bool(corner3 <= H_nec(Y | X) + I(X & Y)))
print(bool(corner3 >= H_nec(Y | X) + I(X & Y)))
True True