Chapter 11 - Information theory

Example 1 - pg 488

In [1]:
#calculate the information content in all symbols
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
Px_1=1./2;#probability 1
Px_2=1./4;#probability 2
Px_3=1./8;#probability 3
Px_4=1./8;#probability 4

#calculations
Ix_1 = log2(1/(Px_1))#information content in first probability
Ix_2 = log2(1/(Px_2))#information content in first probability
Ix_3 = log2(1/(Px_3))#information content in first probability
Ix_4 = log2(1/(Px_3))#information content in first probability

#results
print "i. Information content of first symbol (bits) = ",Ix_1
print "ii. Information content of second symbol (bits) = ",Ix_2
print "iii. Information content of third  symbol (bits) = ",Ix_3
print "iV. Information content of fourth symbol (bits)",Ix_4
i. Information content of first symbol (bits) =  1.0
ii. Information content of second symbol (bits) =  2.0
iii. Information content of third  symbol (bits) =  3.0
iV. Information content of fourth symbol (bits) 3.0

Example 2 - pg 488

In [2]:
#calculate the amount of information
import math
from math import log
#given
Px_i = 1./4#probability of a symbol

#calculation
Ix_i = (log(1/Px_i))/log(2)#formula for amount of information of a symbol

#result
print "i. Amount of information (bits) = ",Ix_i
i. Amount of information (bits) =  2.0

Example 3 - pg 489

In [3]:
#calculate the amount of information
#given
import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#since there are only two binary levels i.e. 1 or 0. Since, these two binary levels occur with equal             likelihood of occurrence will be
Px_1 = 1./2#probability of zero level
Px_2 = 1./2#probability of first level

#calculations
Ix_1 = log2(1/Px_1)#amount of information of zero level with base 2
Ix_2 = log2(1/Px_2)#amount of information of first level with base 2
Ix_1= log(1/Px_1)/log(2)#amount of information content with base 10
Ix_2 = Ix_1 

#result
print "i.Amount of information content wrt binary PCM 0 (bit) = ",Ix_1
print "ii.Amount of information content wrt binary PCM 1 (bit) = ",Ix_2
i.Amount of information content wrt binary PCM 0 (bit) =  1.0
ii.Amount of information content wrt binary PCM 1 (bit) =  1.0

Example 4 - pg 489

In [4]:
#calculate the amount of information
import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#given
Px_1 = 1./4#probability wrt to binary PCM '0'
Px_2 = 3./4#probability wrt to binary PCM '1'

#calculations
Ix_1 = log2(1/Px_1)#amount of information of zero level with base 2
Ix_2 = log2(1/Px_2)#amount of information of first level with base 2
Ix_1= log(1/Px_1)/log(2)#amount of information content with base 10
Ix_2= log(1/Px_2)/log(2)#amount of information content with base 10

#results
print "i.Amount of information carried wrt to binary PCM 0 (bits) = ",Ix_1
print "ii.Amount of information carried wrt to binary PCM 1 (bits) = ",round(Ix_2,3)
i.Amount of information carried wrt to binary PCM 0 (bits) =  2.0
ii.Amount of information carried wrt to binary PCM 1 (bits) =  0.415

Example 9 - pg 492

In [5]:
#calculate the entropy and Amount of information
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y

Px_1 = .4#probability of first symbol
Px_2 = .3#probability of second symbol
Px_3 = .2#probability of third symbol
Px_4 = .1#probability of fourth symbol

#calculations
H_X = -Px_1*log2(Px_1)-Px_2*log2(Px_2)-Px_3*log2(Px_3)-Px_4*log2(Px_4);#entropy
Px1x2x1x3 = Px_1*Px_2*Px_1*Px_3;#product of probabilities
Ix1x2x1x3 =-log2(Px1x2x1x3);#information of four symbols
Px4x3x3x2 = Px_4*Px_3*Px_3*Px_2;#product of probabilities
Ix4x3x3x2 = -log2(Px4x3x3x2);#information of four symbols

#results
print "i.Entropy (bits/symbol) = ",round(H_X,2)
print "ii.Amount of information contained in x1x2x1x3 (bits/symbol) = ",round(Ix1x2x1x3,2)
print "Thus,Ix1x2x1x3 < 7.4[=4*H_X]bits/symbol"
print "iii.Amount of information contained in x4x3x3x2 (bits/symbol) = ",round(Ix4x3x3x2,2)
print "\nThus we conclude that\nIx4x3x3x2 > 7.4[=4*H_X]bits/symbol"
i.Entropy (bits/symbol) =  1.85
ii.Amount of information contained in x1x2x1x3 (bits/symbol) =  6.7
Thus,Ix1x2x1x3 < 7.4[=4*H_X]bits/symbol
iii.Amount of information contained in x4x3x3x2 (bits/symbol) =  9.7

Thus we conclude that
Ix4x3x3x2 > 7.4[=4*H_X]bits/symbol

Example 12 - pg 495

In [6]:
#calculate the average rate of information convyed
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y

n = 2.*10**6#elements od black and white TV picture
m = 16.#brightness levels of black and white TV picture
o = 32.#repeated rate of pictures per second 

#calculations
Px_i = 1./m#probability of  brightness levels of picture
H_X = 0;
for  i in range (0,15):
	H_Xi = (-1./(1./Px_i))*log2(1./(1./Px_i));
	H_X = H_X +H_Xi

r = n*o#rate of symbols generated 
R = r*math.ceil(H_X)#average rate of information convyed 
 
#results
print "i. Average rate of information convyed (Mbits/seconds) = ",R/10**6
i. Average rate of information convyed (Mbits/seconds) =  256.0

Example 13 - pg 495

In [7]:
#calculate the average information rate
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y

t_dot = .2#duration of dot symbol
t_dash = .6#duration of dash symbol
t_space = .2#time between the symbols
#wkt sum of the probability is 1 i.e P_dot + P_dash = 1 hence
#P_dot = 2*P_dash weget 
P_dot = 2./3#probality of dot symbol
P_dash = 1./3#probality of dash symbol

#calculations 
H_X = -P_dot*log2(P_dot)-P_dash*log2(P_dash);#entropy
T_s = P_dot*t_dot + P_dash*t_dash +t_space;#average time per symbol
r = 1/T_s;#average symbol rate 
R = r*H_X;#average information rate of the telegraph sourece

#result
print "i.The average information rate of the telegraph source (bits/seconds) = ",round(R,3)
i.The average information rate of the telegraph source (bits/seconds) =  1.722

Example 14 - pg 496

In [8]:
#calculate the entropy and information rate
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
#given symbols are equally likely all the symbols the probabilities are same
Px_1 = 1./8;#probability of first symbol
Px_2 = 1./8;#probability of second symbol
Px_3 = 3./8;#probability of third symbol
Px_4 = 3./8;#probability of fourth symbol

r = 2#average symbol rate from problem 11.14

#calculaitons
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4);#entropy
R = H_X*r;#information rate

#results
print "i.Entropy (bits/symbol) = ",round(H_X,1)
print "ii.The information rate of all symbols (f_m bits/seconds) = ", round(R,1)
i.Entropy (bits/symbol) =  1.8
ii.The information rate of all symbols (f_m bits/seconds) =  3.6

Example 15 - pg 497

In [9]:
#calculate the information rate
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
#given symbols are equally likely all the symbols the probabilities are same
Px_1 = 1./4;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./4;#probability of third symbol
Px_4 = 1./4;#probability of fourth symbol
r = 2#average symbol rate from problem 11.14

#calculaitons
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4);#entropy
R = H_X*r;#information rate

#results
print "i.The information rate of all symbols (f_m bits/seconds) = ", R
i.The information rate of all symbols (f_m bits/seconds) =  4.0

Example 16 - pg 498

In [10]:
#calculate the Entropy and rate of information
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
Px_1 = 1./2;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./8;#probability of third symbol
Px_4 = 1./16;#probability of fourth symbol
Px_4 = 1./16;#probability of fifth symbol
T_b = 1*10**-3#time required for emittion of each symbol
r = 1./(T_b)#symbol rate

#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_4*log2(1/Px_4);
R = r*H_X;#information rate

#results
print "i.Entropy of five symbols (bits/symbol) = ",H_X
print "ii.Rate of information (bits/sec) = ",R
i.Entropy of five symbols (bits/symbol) =  1.875
ii.Rate of information (bits/sec) =  1875.0

Example 17 - pg 498

In [11]:
#calculate the rate of information
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
Px_1 = 1./2;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./8;#probability of third symbol
Px_4 = 1./16;#probability of fourth symbol
Px_5 = 1./16;#probability of fifth symbol
r = 16#outcomes per second

#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_5*log2(1/Px_5);
R = r*H_X;#information rate

#result
print "Rate of information (bits/sec) = ",R
Rate of information (bits/sec) =  30.0

Example 18 - pg 499

In [12]:
#calculate the rate of information
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
Px_1 = 1./4;#probability of first symbol
Px_2 = 1./5;#probability of second symbol
Px_3 = 1./5;#probability of third symbol
Px_4 = 1./10;#probability of fourth symbol
Px_5 = 1./10;#probability of fifth symbol
Px_6 = 1./20;#probability of sixth symbol
Px_7 = 1./20;#probability of seventh symbol
Px_8 = 1./20;#probability of eigith symbol
f_m = 10*10**3#freuency of tranamitting symbol

#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_5*log2(1/Px_5) + Px_6*log2(1/Px_6)+ Px_7*log2(1/Px_7)+ Px_8*log2(1/Px_8);#entropy
f_s = 2*f_m#sampling frequency
r = f_s#sampling frequency equal to rate of transmission
R = r*H_X;#information rate

#result
print "Rate of information (bits/sec) = ",round(R,0)
print "Note:Their mistake in calculation of H_X in textbook"
Rate of information (bits/sec) =  54829.0
Note:Their mistake in calculation of H_X in textbook

Example 19 - pg 502

In [14]:
#calculate the channel matrix
import numpy
#given
#from fig
P_X = numpy.matrix([.5, .5])#x matrix
P_Xd = numpy.matrix([[.5, 0],[0, .5]])#diagonal x matrix
#calculations
P_YX = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig
P_Y = P_X*P_YX#y matrix
P_XY = P_Xd * P_YX#xy  matrix

#results
print "i.Channel matrix of the channelP_YX "
print(P_YX)
print "ii.a.P(y1) =",P_Y[0,0]
print "   b.P(y2) = ",P_Y[0,1]
print "iii.a.P(x1,y2) = ",P_XY[0,1]
print "     b.P(x2,y1) = ",P_XY[1,0]
i.Channel matrix of the channelP_YX 
[[ 0.9  0.1]
 [ 0.2  0.8]]
ii.a.P(y1) = 0.55
   b.P(y2) =  0.45
iii.a.P(x1,y2) =  0.05
     b.P(x2,y1) =  0.1

Example 20 - pg 503

In [15]:
#calculate the Channel matrix
import numpy
#given
P_X = numpy.matrix([.5, .5])#x matrix
#calculations
P_YX = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig\
P_ZY = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig
P_Y = P_X *P_YX#y matrix
P_ZX = P_YX * P_ZY#zx  matrix
P_Z = P_X *P_ZX#z matrix


#results
print "i.Channel matrix of the channelP_ZX "
print(P_ZX)
print("Matrix P(Z)")
print(P_Z)
print "a.P(Z1) = ",P_Z[0,0]
print "b.P(Z2) = ",P_Z[0,1]
i.Channel matrix of the channelP_ZX 
[[ 0.83  0.17]
 [ 0.34  0.66]]
Matrix P(Z)
[[ 0.585  0.415]]
a.P(Z1) =  0.585
b.P(Z2) =  0.415

Example 21 - pg 504

In [16]:
#calculate the probability associated with channel outputs
#given
import numpy
P_X = numpy.matrix([.5, .5])#x matrix
P_YX = numpy.matrix([[.8, .2, 0], [0, .2, .8]]);#yx matrix representation of given fig

#calculations
P_Y = P_X*P_YX;

#results
print "probability associated with the channel outputs for p=.2 is"
print(P_Y)
print "a.P(Y1) = ",P_Y[0,0]
print "b.P(Y2) = ",P_Y[0,1]
print "C.P(Y3) = ",P_Y[0,2]
probability associated with the channel outputs for p=.2 is
[[ 0.4  0.2  0.4]]
a.P(Y1) =  0.4
b.P(Y2) =  0.2
C.P(Y3) =  0.4

Example 28 - pg 504

In [17]:
#calculate the I_XY value
import numpy
import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#given
#wkt P_Y = P_X*P_YX from previous problems
alfa = .5
P_1 = .1#probability for first case
P_2 = .5#probability for second case

#calculations
P_X = numpy.matrix([alfa, alfa]);
#first case
P_YX = ([[1-P_1, P_1],[P_1, 1-P_1]]);
P_Y1 = P_X*P_YX;
H_Y1 = -P_Y1[0,0]*log2(P_Y1[0,0])-P_Y1[0,1]*log2(P_Y1[0,1]);
Q_1 = P_1*log2(P_1)  + (1-P_1)*log2(1-P_1)#from proof
I_XY1 =  1 + Q_1;
#second case
P_YX = ([[1-P_2, P_2],[P_2, 1-P_2]]);
P_Y2 = P_X*P_YX;
H_Y2 = -P_Y2[0,0]*log2(P_Y2[0,0])-P_Y2[0,1]*log2(P_Y2[0,1]);
Q_2 = P_2*log2(P_2)  + (1-P_2)*log2(1-P_2)#from proof 
I_XY2 =  1 + Q_2;

#results
print "I_XY for the first case = ",round(I_XY1,3)
print "I_XY for the second case = ",round(I_XY2,2)
I_XY for the first case =  0.531
I_XY for the second case =  0.0

Example 32 - pg 518

In [18]:
#calculate the Entropy in all cases
import math
from math import log

def log2(x):
	y=log(x)/log(2)
 	return y
#given
a1 = 1.
a2 = 2.
a3 = .5

#calculations
H_X1 = log2(a1);#Entropy for first case
H_X2 = log2(a2);#Entropy for second case
H_X3 = log2(a3);#Entropy for third case

#results
print "i.Entropy for first case =  ",H_X1
print "ii.Entropy for second case = ",H_X2
print "iii.Entropy for third case = ",H_X3
i.Entropy for first case =   0.0
ii.Entropy for second case =  1.0
iii.Entropy for third case =  -1.0

Example 35 - pg 520

In [19]:
#calculate the capacity of channel

import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#given
B = 4000.#bandwidth of AWGN channel
S = .1*10**-3#power of signal
neta = 2*10**-12#spectral dencity 
N = neta*B;#power 

#calculations 
C = B * log2(1 + (S/N));#capacity of channel

#result
print "Capacity of channel (b/s) = ",round(C,0)
Capacity of channel (b/s) =  54439.0

Example 36 - pg 521

In [20]:
#calculate the information rate, bandwidth and S/N ratio
import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#given
fm=4000. #Hz
tim=1.25
Pi=1/256.
SN=100.
#calculations 
fs=2*fm
r=fs*tim
H=log2(1/Pi)
R=r*H
C = r* log2(1 + SN);#capacity of channel
SN2=(1/Pi-1)
B2=R/log2(1+SN)
#result
print "information rate of source (kb/s) = ",R/1000.
print "Capacity of channel (b/s) = ",round(C,0)
print "Final S/N ratio = ",SN2
print "bandwidth required  (kHz) = ",round(B2/1000.,0)
information rate of source (kb/s) =  80.0
Capacity of channel (b/s) =  66582.0
Final S/N ratio =  255.0
bandwidth required  (kHz) =  12.0

Example 37 - pg 524

In [21]:
#calculate the efficiency of code and code redundancy

import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
#given
Px_1 = 0.9#probability of first symbol
Px_2 = 0.1#probability of second symbol
n1 = 1.#length of the code for x_1
n2 =1.#length of code for x_2

#calculations
#we know that the average code length L per symbol
L = Px_1*n1 + Px_2*n2#code length
H_X = -Px_1*log2(Px_1) - Px_2*log2(Px_2) #entropy
neta = H_X/L#efficiency 
neta1 = neta*100#neta in percentage
gama = 1 - neta#redundancy
gama1 = gama*100#gama in percentage

#results
print "i.Efficiency of code (percent) = ",round(neta1,1)
print "ii.Code redundancy (percent) =  ",round(gama1,1)
i.Efficiency of code (percent) =  46.9
ii.Code redundancy (percent) =   53.1

Example 38 - pg 524

In [22]:
#calculate the efficiency of code and code redundancy
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
Px_1 = 0.81#probability of first symbol
Px_2 = .09#probability of second symbol
Px_3 = .09#probability of third symbol
Px_4 = 0.01#probability of forth symbol
n1 = 1.#length of code for a_1
n2 =2.#length of code for a_2
n3 = 3.#length of code for a_3
n4 = 3.#length of code for a_4

#calculations
#we know that the average code length L per symbol
L = Px_1*n1 + Px_2*n2 + Px_3*n3 + Px_4*n4 #code length
H_X = -Px_1*log2(Px_1) - Px_2*log2(Px_2) - Px_3*log2(Px_3) - Px_4*log2(Px_4)#entropy 
neta = H_X/L#efficiency 
neta1 = neta*100#neta in percentage
gama = 1 - neta#redundancy
gama1 = gama*100#gama in percentage

#results
print "i.Efficiency of code (percent) = ",round(neta1,1)
print "ii.Code redundancy (percent) =  ",round(gama1,1)
i.Efficiency of code (percent) =  72.7
ii.Code redundancy (percent) =   27.3

Example 44 - pg 529

In [23]:
#calculate the efficiency 
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = 1./2#probability of first symbol
P_x2 = 1./4#probability of second symbol
P_x3 = 1./8#probability of third symbol
P_x4 = 1./8#probability of fouth symbol
n1 = 1.
n2 = 2.
n3 = 3.
n4 = 3.

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4;
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4;
neta = H_x/L;
P_neta = neta*100#efficiency in percentage

#results
print "Efficiency = ",neta
print "Efficiency in percentage (percent) = ",P_neta
Efficiency =  1.0
Efficiency in percentage (percent) =  100.0

Example 45 - pg 532

In [24]:
#calculate the efficiency using shannon code and huffman code
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = .2#probability of first signal
P_x2 = .2#probability of second signal
P_x3 = .2#probability of third signal
P_x4 = .2#probability of fourth signal
P_x5 = .2#probability of fifth signal
n1 = 2.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 3.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4 +  P_x5*I_x5;#entropy
L1 = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;
neta1 = H_x/L1;
P_neta1 = neta1*100#efficiency in percentage using Shannon Fano code
L2 = P_x1*n1 + P_x2*n2 + P_x3*n3 +P_x4*n4 +P_x5*n5
neta2 = H_x/L2;
P_neta2 = neta2*100#efficiency in percentage using huffman code

#results
print "Efficiency in percentage using Shannon Fano code (percent) = ",round(P_neta1,1)
print "Efficiency in percentage using huffman code (percent) = ",round(P_neta2,1)
Efficiency in percentage using Shannon Fano code (percent) =  96.7
Efficiency in percentage using huffman code (percent) =  96.7

Example 46 - pg 532

In [26]:
#calculate the efficiency using shannon code and huffman code
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = .4#probability of first signal
P_x2 = .19#probability of second signal
P_x3 = .16#probability of third signal
P_x4 = .15#probability of fourth signal
P_x5 = .1#probability of fifth signal
n1 = 1.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 3.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4 +  P_x5*I_x5;#entropy
L1 = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;
neta1 = H_x/L1;
P_neta1 = neta1*100#efficiency in percentage using Shannon Fano code
L2 = P_x1*1 + (P_x2 + P_x3 +P_x4 +P_x5 )*3
neta2 = H_x/L2;
P_neta2 = neta2*100#efficiency in percentage using huffman code

#results
print "Efficiency in percentage using Shannon Fano code (percent) = ",round(P_neta1,1)
print "Efficiency in percentage using huffman code (percent) = ",round(P_neta2,1)
print "Note: There is mistake in the textbook in calculation of L using SHannon Fano code"
Efficiency in percentage using Shannon Fano code (percent) =  116.2
Efficiency in percentage using huffman code (percent) =  97.7
Note: There is mistake in the textbook in calculation of L using SHannon Fano code

Example 47 - pg 532

In [27]:
#calculate the Efficiency
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = .05#probability of first signal
P_x2 = .15#probability of second signal
P_x3 = .2#probability of third signal
P_x4 = .05#probability of fourth signal
P_x5 = .15#probability of fifth signal
P_x6 = .3#probability of sixth signal
P_x7 = .1#probability of seventh signal
n1 = 4.#number of bits in code obtained from table given textbook
n2 = 3.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook
n6 = 2.#number of bits in code obtained from table given textbook
n7 = 3.#number of bits in code obtained from table given textbook

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
I_x6 = -log2(P_x6);
I_x7 = -log2(P_x7);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4 +  P_x5*I_x5 + P_x6*I_x6 + P_x7*I_x7;#entropy
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5 + P_x6*n6 + P_x7*n7;
neta = (H_x*100)/L#Efficiency in percentage

#results
print "Efficiency in percentage (percent) = ",round(neta,2)
Efficiency in percentage (percent) =  98.88

Example 49 - pg 534

In [28]:
#calculate the Variance of codeword length

#given
P_x1 = .4#probability of first signal
P_x2 = .2#probability of second signal
P_x3 = .8#probability of third signal
P_x4 = .08#probability of fourth signal
P_x5 = .02#probability of fifth signal
n1 = 2.#number of bits in code obtained from table given textbook
n2 = 3.#number of bits in code obtained from table given textbook
n3 = 1.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 4.#number of bits in code obtained from table given textbook

#calculations
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;#average codeword length per symbol
#since sigma = sqrt(summation of product of probability and (n- L)**2)
sigmasquare = P_x1*(n1-L)**2 + P_x2*(n2-L)**2 +P_x3*(n3-L)**2 + P_x4*(n4-L)**2 +P_x5*(n5-L)**2;#Variance of codewoed length

#results
print "Variance of codeword length = ",sigmasquare
Variance of codeword length =  2.42

Example 50 - pg 535

In [29]:
#calculate the Entropy and Information rate
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = 1./2#probability of first signal
P_x2 = 1./4#probability of second signal
P_x3 = 1./8#probability of third signal
P_x4 = 1./16#probability of fourth signal
P_x5 = 1./32#probability of fifth signal
P_x6 = 1./32#probability of sixth signal
r = 16#message rate in outcomes per second

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
I_x6 = -log2(P_x6);
H_X = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4 +  P_x5*I_x5 + P_x6*I_x6 #entropy
R = H_X*r#Information rate

#results
print "Entropy of the system (bits/message) = ",H_X
print "Information rate (bits/seconds) = ",R
Entropy of the system (bits/message) =  1.9375
Information rate (bits/seconds) =  31.0

Example 51 - pg 535

In [30]:
#calculate the Entropy
import numpy
#given
import math
from math import log
def log2(x):
	y=log(x)/log(2)
 	return y
P_x1 = .3#probability of first signal
P_x2 = .4#probability of second signal
P_x3 = .3#probability of third signal
P_YX = numpy.matrix([[.8, .2, 0],[0, .1, 0],[0, .3, 0.7]])#matrix obtained from the figure 

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
H_X = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 #entropy
P_y1 = P_YX[0,0]*P_x1 + P_YX[0,1]*P_x1 + P_YX[0,2]*P_x1;
P_y2 = P_YX[1,0]*P_x2 + P_YX[1,1]*P_x2 + P_YX[1,2]*P_x2;
P_y3 = P_YX[2,0]*P_x3 + P_YX[2,1]*P_x3 + P_YX[2,2]*P_x3;
I_y1 = -log2(P_y1);
I_y2 = -log2(P_y2);
I_y3 = -log2(P_y3);
H_Y = -P_y1*I_y1 -  P_y2*I_y2 -  P_y3*I_y3 #entropy

#results
print " Entropy H(X) = ",round(H_X,3)
print "Entropy H(Y) = ",round(H_Y,3)
print " Note:There is mistake in the calculation of P_y3 in the textbook so their is change in entropy H_Y"
 Entropy H(X) =  1.571
Entropy H(Y) =  -1.228
 Note:There is mistake in the calculation of P_y3 in the textbook so their is change in entropy H_Y

Example 52 - pg 536

In [31]:
#calculate the Entropy of the second order extension
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y

P_x1 = .7#probability of first signal
P_x2 = .15#probability of second signal
P_x3 = .15#probability of third signal
n = 2#second order extention

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3#entropy
H_x2 = n*H_x#entropy of second order extention

#results
print "Entropy of second order extension (bits/symbol) = ",round(H_x2,3)
Entropy of second order extension (bits/symbol) =  2.363

Example 54 - pg 537

In [32]:
#calculate the Entropy of the source
import math
from math import log
#given
def log2(x):
	y=log(x)/log(2)
 	return y

#given
P_x1 = 1./3#probability of first signal
P_x2 = 1./6#probability of second signal
P_x3 = 1./4#probability of third signal
P_x4 = 1./4#probability of fourth signal

#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
H_x = P_x1*I_x1 +  P_x2*I_x2 +  P_x3*I_x3 +  P_x4*I_x4 #entropy

#results
print "Entropy of the source (bits/symbol) =  ",round(H_x,4)
Entropy of the source (bits/symbol) =   1.9591

Example 55 - pg 538

In [33]:
#calculate the Average number of bits per message

#given
P_x1 = 1./2#probability of first signal
P_x2 = 1./4#probability of second signal
P_x3 = 1./8#probability of third signal
P_x4 = 1./16#probability of fourth signal
P_x5 = 1./16#probability of fifth signal
n1 = 1.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 3.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 4.#number of bits in code obtained from table given textbook

#calculations
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;#Average number of bits per message

#results
print "Average number of bits per message (bits) = ",L
Average number of bits per message (bits) =  1.875

Example 56 - pg 538

In [34]:
#calculate the Information capacity of telephone channel
import math
from math import log,exp
#given
def log2(x):
	y=log(x)/log(2)
 	return y
B = 3.4*10**3#bandwidth
SbyN = 30#signal to the noise ratio in dB


#calculations
SbyN1 = exp((SbyN/10)*log(10))#signal to noise ratio 
C = B*log2(1+SbyN1);

#result
print "Information capacity of the telephone channel (kbps) = ",round(C/1000.,2)
Information capacity of the telephone channel (kbps) =  33.89