Author: Cheuk Ting Li
from psitip import *
PsiOpts.setting(solver = "pyomo.glpk") # Set linear programming solver
PsiOpts.setting(repr_latex = True) # Jupyter Notebook LaTeX display
PsiOpts.setting(venn_latex = True) # LaTeX in diagrams
PsiOpts.setting(proof_note_color = "blue") # Reasons in proofs are blue
X1, X2 = rv_array("X", 1, 3)
Y1, Y2 = rv_array("Y", 1, 3)
M1, M2 = rv_array("M", 1, 3)
R1, R2 = real_array("R", 1, 3)
U1, U2 = rv_array("U", 1, 3)
Q = rv("Q")
model = CodingModel() # Define distributed lossy compression
model.set_rate(M1, R1) # The rate of M1, M2 are R1, R2 resp.
model.set_rate(M2, R2)
model.add_edge(X1, X2) # X1, X2 are correlated source
model.add_node(X1, M1,
label = "Enc 1") # Encoder 1 maps X1 to M1
model.add_node(X2, M2,
label = "Enc 2") # Encoder 2 maps X2 to M2
model.add_node(M1+M2, Y1+Y2,
label = "Dec") # Decoder maps M1,M2 to X1,X2
model.graph() # Draw diagram
# Automatic inner bound, give Berger-Tung inner bound [Berger 1978], [Tung 1978]
r = model.get_inner(convexify = True)
r
# Write the Berger-Tung outer bound [Berger 1978], [Tung 1978]
r_bt_out = alland([
R1 >= I(X1+X2 & U1 | U2),
R2 >= I(X1+X2 & U2 | U1),
R1+R2 >= I(X1+X2 & U1+U2),
markov(Y1+Y2, U1+U2, X1+X2),
markov(U1, X1, X2),
markov(U2, X2, X1)
]).exists(U1+U2)
r_bt_out
r_out = model.get_outer(future = False) # Automatic outer bound
# Proof that Berger-Tung outer bound is an outer bound
(r_out >> r_bt_out).check_getaux_array()
# Output proof of Berger-Tung outer bound
(r_out >> r_bt_out).proof()