using PyPlot using NtToolBox p = 0.1; n = 512; x = (rand(n) .> p) + 1; h = [sum(x .== 1); sum(x .== 2)] h = h/sum(h) print("Empirical p = $(h[1])") e = - sum(h.*log2([max(e,1e-20) for e in h])) print("Entropy = $e") h = [.1, .15, .4, .15, .2]; m = length(h) T=Array{Any,1}(zeros(m)); # create an empty tree #we use the symbols i = 0,1,2,3,4 (as strings) with the associated probabilities h(i) for i in 1:m T[i] = (h[i],string(i)) end while length(T) > 1 sort!(T) #sort according to the first values of the tuples (the probabilities) t = tuple(T[1:2]...) q = T[1][1] + T[2][1] T = [T[3:end]; [(q,t)]] end function trim(T) T0 = T[2] if typeof(T0) == String return T0 else return (trim(T0[1]),trim(T0[2])) end end T = trim(T[1]); plot_hufftree(T); codes = Dict() function huffman_gencode(T,codes,c) if typeof(T) == String #test if T is a leaf codes[T] = c else huffman_gencode(T[1],codes, string(c, "0")) huffman_gencode(T[2],codes, string(c, "1")) end end huffman_gencode(T,codes,"") ; for e in keys(codes) println(string("Code of token ", e, ": ", codes[e])) end n = 1024; x = rand_discr(h, m=n); include("NtSolutions/coding_2_entropic/exo1.jl") ## Insert your code here. e = - sum(h.*log2([max(e,1e-20) for e in h])) println("Entropy bound = $(n*e)") println("Huffman code = $(length(y))") x1 = []; T0 = T for e in y if e == '0' T0 = T0[1] else T0 = T0[2] end if typeof(T0) == String append!(x1,T0) T0 = T end end err = norm(x-map(x->parse(Int, x), x1)) print("Error (should be zero) : $err") t = .12; h = [t, 1-t]; n = 4096*2 x = (rand(n) .> t) + 1; q = 3; m = 2; n1 = Int(ceil(n/q)*q); x1 = zeros(n1) x1[1:length(x)] = x x1[length(x)+1:end] = 1 x1 = x1 - 1 x2 = [] mult = [m^j for j in 0:q-1] for i in 1:q:n1-1 append!(x2,sum(x1[i:i+q-1].*mult)+1) end H = h for i in 1:q-1 Hold = H H = [] for j in 1:length(h) append!(H,[e*h[j] for e in Hold]) end end H = h for i in 1:q-1 H = kron(H, h) end include("NtSolutions/coding_2_entropic/exo2.jl"); ## Insert your code here.