Chapter 15 : Introduction to information theory

Page no 687 prob no 15.1

In [1]:
from math import log
# Here we have given six messages. For 4-ary Huffman code, we need to add one dummy variable to satisfy the required condition of r+k(r-1) messages.Probabilities are given as p(1)=0.3# p(2)=0.25# p(3)=0.15# p(4)=0.12# p(5)=0.1# p(6)=0.08# p(7)=0.

#The length L of this code is calculated as
  
n=5# the length of probability vector p
p=[.3, .25, .15, .12, .1, .08, 0]## enter probabilities in descending order
l=[1, 1, 1 ,2 ,2 ,2, 2]## code length of individual message according to order
L=0#
for i in range(0,n):
    L=L+(p[(i)]*l[(i)])

print "Length = %0.2f "%L,'4-ary digits'

# Entropy of source is calculated as
H=0#
for i in range(0,n-1):#since the value of log(1/0) for the last entry is infinite which when multiply by 0 gives result as 0
    H=H+(p[(i)]*log(1.0/p[(i)]))#

H1=H/log(4)
print "Entropy of source is, H = %0.2f "%H1,'4-ary units'

# Efficiency of code is given as 
N=H1/L#
print "Efficiency of code, N = %0.2f "%N
Length = 1.14  4-ary digits
Entropy of source is, H = 0.90  4-ary units
Efficiency of code, N = 0.79 

Page no 688 Example no. 15.2

In [2]:
from __future__ import division
from math import log
# N=1
#Here we have given two messages with probabilities m1=0.8 and m2=0.2 . Therefore, Huffman code for the source is simply 0 and 1.

#The length L of this code is calculated as
N=1#
p=[.8, .2]##enter probabilities in descending order
n=len(p)
l=[1, 1]##code length of individual message according to order
L=0#
for i in range(0,n):
    L=L+(p[(i)]*l[(i)])#

print "Length = %0.2f "%L

# Entropy of source is calculated as
H=0#
for i in range(0,n):
    H=H+(p[(i)]*log(1/p[(i)],2))

print "Entropy of source is, H = %0.2f bit"%H

# Efficiency of code is given as 
N1=H/L#
print "Efficiency of code, N = %0.2f "%N1

#for N=2
#There are four (2**N) combinations and their probabilities obtained by multiplying individuals probability.
#The length L of this code is calculated as
N=2#
p=[0.64, 0.16, 0.16, 0.04]##enter probabilities in descending order
n=len(p)#
l=[1 ,2 ,3 ,3]##code length of individual message according to order
L1=0#
for i in range(0,n):
    L1=L1+(p[(i)]*l[(i)])#

L=L1/N## word length per message
print "Length = %0.2f "%L

# Efficiency of code is given as 
N2=H/L#
print "Efficiency of code, N = %0.2f "%N2


#for N=3
#There are eight (2**N)combinations and their probabilities obtained by multiplying individuals probability
#The length L of this code is calculated as
N=3#
p=[.512, .128, .128, .128, .032, .032, .032, .008]##enter probabilities in descending order
n=len(p)#
l=[1, 3 ,3 ,3, 5, 5 ,5 ,5]##code length of individual message according to order
L1=0
for i in range(0,n):
    L1=L1+(p[(i)]*l[(i)])#

L=L1/N## word length per message
print "Length = %0.2f "%L

# Efficiency of code is given as 
N3=H/L#
print "Efficiency of code, N = %0.2f "%N3
Length = 1.00 
Entropy of source is, H = 0.72 bit
Efficiency of code, N = 0.72 
Length = 0.78 
Efficiency of code, N = 0.93 
Length = 0.73 
Efficiency of code, N = 0.99 

page no 702 prob no 15.4

In [3]:
from __future__ import division
from math import log
from mpmath import quad


x0=(-1)#
x1=1##given
y0=(-2)#
y1=2##given
G=2##gain of amplifier
#the probbilities are given as P(x)=1/2 for |x|<1 & P(y)=1/4 for |y<2| otherwise P(x)=P(y)=0.
#P(x<1 & -x<1)=1/2#
#P(y<2 & -y<2)=1/4#
# hence entropies are given as
g1=(1./2)*log(2,2)#
g2=(1./4)*log(4,2)# 
X=quad(lambda x:g1*1,[x0,x1])
Y=quad(lambda y:g2*1,[y0,y1])
print "entropy = %0.2f bits"%X
print "entropy = %0.2f bits"%Y
#Here the entropy of random variable 'y' is twice that of the 'x'.This results may come as a surprise,since a knowledge of 'x' uniquely determines 'y' and vice versa , since y=2x.Hence , the average uncertainty of x and y should be identical.
# The reference entropy R1 for x is -log dx ,and The reference entropy R2 for y is -log dy (in the limit as dx,dy->0 ).
# R1= lim (dx->0) -log dx
#R2= lim (dy->0) -log dy
#and R1-R2 = lim(dx,dy->0) log(dx/dy) = log (dy/dx) = log2 2 =1 bit
#Therefore,the reference entropy of x is higher than the reference entropy for y. Hence we conclude that 
print " if x and y have equal absolute entropies,their relative (differential) entropies must differ by 1 bit "
entropy = 1.00 bits
entropy = 2.00 bits
 if x and y have equal absolute entropies,their relative (differential) entropies must differ by 1 bit