Author: Cheuk Ting Li
from psitip import *
PsiOpts.setting(
solver = "ortools.GLOP", # Set linear programming solver
repr_latex = True, # Jupyter Notebook LaTeX display
venn_latex = True, # LaTeX in diagrams
proof_note_color = "blue", # Reasons in proofs are blue
random_seed = 4321 # Random seed for example searching
)
X, Y, U = rv("X, Y, U")
M1, M2 = rv_array("M", 1, 3)
R1, R2 = real_array("R", 1, 3)
model = CodingModel() # Define WAK network [Wyner 1975], [Ahlswede-Körner 1975]
model.set_rate(M1, R1) # The rate of M1, M2 are R1, R2 resp.
model.set_rate(M2, R2)
model.add_edge(X, Y) # X, Y are correlated source
model.add_node(X, M1,
label = "Enc 1") # Encoder 1 maps X to M1
model.add_node(Y, M2,
label = "Enc 2") # Encoder 2 maps Y to M2
model.add_node(M1+M2, Y,
label = "Dec") # Decoder maps M1,M2 to Y
model.graph() # Draw diagram
r = model.get_inner() # Automatic inner bound
r
# Although the above region does not look like the WAK region
# [Wyner 1975], [Ahlswede-Körner 1975], they are actually equivalent.
# Write the WAK region
r_wak = alland([
R1 >= I(U & X),
R2 >= H(Y | U),
markov(U, X, Y)
]).exists(U)
r_wak
# Prove r is the same region as r_wak
# Requires a higher level of searching to prove
with PsiOpts(auxsearch_level = 10):
print(bool(r >> r_wak))
print(bool(r_wak >> r))
True True
# Automatic outer bound with 1 auxiliary, coincides with inner bound
model.get_outer(1)
bool(model.get_outer() >> r_wak) # Converse proof
True
# Output converse proof (is_proof = True for shorter proof)
(model.get_outer(is_proof = True) >> r_wak).proof()