Chpater 8: INFORMATION THEORY

Example 8.1, Page No 464

In [1]:
import math 
#Find Information Content of Each Symbol

#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/8.0

#Calculation
#information content of each symbol
Ix1=math.log(1/px1,2)
Ix2=math.log(1/px2,2)
Ix3=math.log(1/px3,2)
Ix4=math.log(1/px4,2)

#Result
print("Information Content tI(x1)= %.2f bit" %Ix1)
print(" tI(x2)= %.f bits" %Ix2)
print(" tI(x3)= %.f bits" %Ix3)
print(" tI(x4)= %.f bits" %Ix4)
Information Content tI(x1)= 1.00 bit
 tI(x2)= 2 bits
 tI(x3)= 3 bits
 tI(x4)= 3 bits

Example 8.2, Page No 464

In [2]:
import math
#Find amount of Information
#Variable Declaration
#Calculation
pxi=1/4.0
Ixi=(math.log10(1/pxi))/math.log10(2)

#RESULTS
print("The amount of Information I(Xi)= %.f " %Ixi)
The amount of Information I(Xi)= 2 

Example 8.3, Page No 464

In [3]:
import math
#Find Amount of Information

#Variable Declaration
px1=1/2.0
px2=1/2.0

#Calculation
Ix1=math.log(1/px1,2) #entropy
Ix2=math.log(1/px2,2)

#Result
print("The amount of Information I(X1)= %.f bit" %Ix1)
print("The amount of Information I(X2)= %.f bit" %Ix2)
The amount of Information I(X1)= 1 bit
The amount of Information I(X2)= 1 bit

Example 8.4, Page No 465

In [4]:
import math
#Find Amount of Information 

#Variable Declaration
px1=1/4.0
px2=3/4.0

#Calculation
Ix1=math.log(1/px1,2)
Ix2=math.log(1/px2,2)

#Result
print("The amount of Information I(X1)= %.f bit" %Ix1)
print("The amount of Information I(X2)= %.2f bit" %Ix2)
The amount of Information I(X1)= 2 bit
The amount of Information I(X2)= 0.42 bit

Example 8.9, Page No 468

In [5]:
import math
#Find Entropy,Amount of information

#Variable Declaration
px1=0.4
px2=0.3
px3=0.2
px4=0.1

#Calculation
HX=-px1*math.log(px1,2)-px2*math.log(px2,2)-px3*math.log(px3,2)-px4*math.log(px4,2)
Px1x2x1x3=px1*px2*px1*px3
Ix1x2x1x3=-math.log(Px1x2x1x3,2)
Px4x3x3x2=px4*px3*px3*px2
Ix4x3x3x2=-math.log(Px4x3x3x2,2)

#Result
print(" \n Entropy  H(X) = %.2f bits/symbol " %HX)
print("The amount of Information I(x1x2x1x3)= %.2f bits/symbol" %Ix1x2x1x3)
print(" I(x4x3x3x2) = %.2f bits/symbol " %Ix4x3x3x2)
 
 Entropy  H(X) = 1.85 bits/symbol 
The amount of Information I(x1x2x1x3)= 6.70 bits/symbol
 I(x4x3x3x2) = 9.70 bits/symbol 

Example 8.13, Page No 471

In [6]:
import math

#Find information rate the telegraphic source

#Variable Declaration
pdash=1/3.0
pdot=2/3.0
tdot=0.2
tdash=0.6
tspace=0.2

#Calculation
HX=-pdash*math.log(pdash,2)-pdot*math.log(pdot,2)
Ts=pdot*tdot+pdash*tdash+tspace
r=1/Ts
R=r*HX

#Result
print('Average rate of information R = %.2f bits/s' %R)
Average rate of information R = 1.72 bits/s

Example 8.14, Page No 471

In [7]:
import math
#Find information rate of the source

f=input('Enter the frequncy f=')
px1=1/8.0
px2=1/8.0
px3=3/8.0
px4=3/8.0

HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2) #entropy of the source
R=2*f*HX #r=2*f
print('information rate  R=  %.1f  bits/sec ' %R)   #f=signal bandwidth
Enter the frequncy f=34
information rate  R=  123.2  bits/sec 

Example 8.15, Page No 472

In [8]:
import math
#Find information rate of the source
#all symbols are equally likely

#Variable Declaration
px1=1/2.0
px2=1/2.0
px3=1/2.0
px4=1/2.0

#Calculation
f=input('Enter the frequncy of system fm(in Hz) =')
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)

#Result
print('\n Entropy H(X) =%.f bits/symbol ' %HX)
R=2*f*HX
print('information rate =%.f bits/sec' %R)
Enter the frequncy of system fm(in Hz) =45

 Entropy H(X) =2 bits/symbol 
information rate =180 bits/sec

Example 8.16, Page No 473

In [9]:
import math
#Find source entropy ,information rate

#Variable Declaration
#probability symbols
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/16.0
px5=1/16.0
Tb=10.0**-3

#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4)+px5*math.log(1/px5)

#Result
print('1. source entropy H(X) = %.2f bits/symbol ' %HX) #source entropy
r=1.0/Tb
R=r*HX #information rate
print(' 2. Information rate R = %.2f bits/sec ' %R)
print('Approximation error')
1. source entropy H(X) = 1.72 bits/symbol 
 2. Information rate R = 1721.57 bits/sec 
Approximation error

Example 8.17, Page No 473

In [10]:
import math
#assume if there are 16 outcomes per second

#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/16.0
px5=1/16.0
r=16.0

#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)+px5*math.log(1/px5,2)

#Result
print('1. Entropy H(X) = %.2f bits/symbol ' %HX) #source entropy

R=r*HX
print('2., Information rate R = %.f bits/sec' %R)
1. Entropy H(X) = 1.88 bits/symbol 
2., Information rate R = 30 bits/sec

Example 8.18, Page No 474

In [11]:
import math
#determine entropy ,information rate

#Variable Declaration
px1=1/4.0
px2=1/5.0
px3=1/5.0
px4=1/10.0
px5=1/10.0
px6=1/20.0
px7=1/20.0
px8=1/20.0
f=10*10**3.0
fs=10*2*10**3.0

#Calculation
#entropy
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)+px5*math.log(1/px5,2)+px6*math.log(1/px6,2)+px7*math.log(1/px7,2)+px8*math.log(1/px8,2) 

#Result
print('bits/message H(X) = %.2f ' %HX)
r=fs
R=r*HX    #information rate
print('bits/sec R = %.2f' %R)
print('Approximation error')
bits/message H(X) = 2.74 
bits/sec R = 54828.92
Approximation error

Example 8.19, Page No 476

In [12]:
import math
from array import *
#Find Channel Matrix,joint probability

#Variable Declaration
px1=0.5
px2=0.5
py1x1=0.9
py2x1=0.1
py1x2=0.2
py2x2=0.8
PYX=[[py1x1,py2x1],[py1x2,py2x2]]
PX=[[px1,px2]]
PY = [[0,0],
    [0,0]]
PXY = [[0,0],
    [0,0]]

for i in range(len(PYX)):
   # iterate through columns of Y
   for j in range(len(PX[0])):
       # iterate through rows of Y
       for k in range(len(PX)):
           PY[i][j] += PYX[i][k] * PX[k][j]
print('  PY ARRAY =  \n')
for r in PY:
   print(r)
PXd=[[px1,0],[0,px2]]


for i in range(len(PXd)):
   # iterate through columns of Y
   for j in range(len(PYX[0])):
       # iterate through rows of Y
       for k in range(len(PYX)):
           PXY[i][j] += PXd[i][k] * PYX[k][j]

            
print(' \n PXY ARRAY =  \n')
for r in PXY:
   print(r)
  PY ARRAY =  

[0.45, 0.45]
[0.1, 0.1]
 
 PXY ARRAY =  

[0.45, 0.05]
[0.1, 0.4]

Example 8.35, Page No 498

In [13]:
import math
#Channel is aproximated by the AWGN Channel

#Variable Declaration
B=4000.0
S=0.1*10**-3
n=2*10**-12

#Calculation
N=n*B
C=B*math.log(1+(S/N),2) #Capacity of Channel
C=C/1000.0
#Result
print(' Capacity of Channel  C=%.3f(10^3) b/s ' %C)
 Capacity of Channel  C=54.439(10^3) b/s 

Example 8.36i, Page No 499

In [14]:
import math

#assume that  succeissive samples are statistically independent

#Variable Declaration
fm=4000.0
fs=2*fm
n=1.25

#Calculation
r=fs*n
pxi=1/256.0
HX=-math.log(pxi,2)
R=r*HX
R=R/1000
print('Information Rate  R= %.f kb/s' %R)
Information Rate  R= 80 kb/s

Example 8.36ii, Page No 499

In [15]:
import math

#assume that  succeissive samples are statistically independent

#Variable Declaration
B=10*10**3.0
SN=20.0

#Calculation
SNR=10**(SN/10.0)
C=B*math.log(1+(SNR),2)
C=C/1000

#Result
print('The channel capacity = %.2f 10^3 b/s' %C)
The channel capacity = 66.58 10^3 b/s

Example 8.36iii, Page No 499

In [16]:
import math
#assume that  succeissive samples are statistically independent

#Variable Declaration
C=8*10**4.0
B=10**4.0

#Calculation
SN=2**(C/B)-1
SNR=10*math.log(SN,10) #SNR

#Result
print(' The S/N ratio required for error-free transmission  =%.2f dB ' %SNR)     #required SNR is greater that 24.064
 The S/N ratio required for error-free transmission  =24.07 dB 

Example 8.36iv, Page No 499

In [17]:
import math
#assume that  succeissive samples are statistically independent

#Variable Declaration
SN=20.0

#Calculation
SNR=10**(SN/10.0)
C=8*10**4.0
B=C/(math.log(1+SNR,2)) #Bandwidth
B=B/1000

#Result
print('Bandwidth required for AWGN channel B =%.2f kHz ' %B)
Bandwidth required for AWGN channel B =12.02 kHz 

Example 8.37, Page No 502

In [18]:
import math
#Find code efficiency,redundancy

#Variable Declaration
px1=0.9
px2=0.1
n1=1.0
n2=1.0

#Calculation
L=px1*n1+px2*n2 #code leght
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)
n=(HX/L) #code efficiency
n=n*100

print('Code efficiency = %.1f percent'  %n)
r=(100-n) #code reduncy
print('Code redundancy = %.1f percent' %r)
Code efficiency = 46.9 percent
Code redundancy = 53.1 percent

Example 8.38, Page No 502

In [19]:
import math
#Find code efficiency,redundancy

#Variable Declaration
pa1=0.81
pa2=0.09 
pa3=0.09
pa4=0.01 
n1=1
n2=2 
n3=3
n4=3 

#Calculation
L=pa1*n1+pa2*n2+pa3*n3+pa4*n4
HX2=pa1*math.log(1/pa1,2)+pa2*math.log(1/pa2,2)+pa3*math.log(1/pa3,2)+pa4*math.log(1/pa4,2)
n=HX2/L*100

#Result
print(' code efficiency = %.2f  percent' %n)

r=(100-n) #code reduncy
print(' code redundancy = %.1f percent' %r)
 code efficiency = 72.71  percent
 code redundancy = 27.3 percent

Example 8.44, Page No 507

In [20]:
import math
#Find  efficiency of the code

#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/8.0
n1=1.0
n2=2.0
n3=3.0
n4=3.0

#Calculation
#information content of each symbol
Ix1=-math.log(px1,2)
Ix2=-math.log(px2,2)
Ix3=-math.log(px3,2)
Ix4=-math.log(px4,2)

HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)
L=px1*n1+px2*n2+px3*n3+px4*n4

n=HX/L*100

#Result
print('Ccode efficiency = %.f  Percent' %n)
Ccode efficiency = 100  Percent

Example 8.50, Page No 512

In [21]:
import math
#Find entropy ,information rate
#If there are 16 outcomes per second

#Variable Declaration
P1=1/2.0
P2=1/4.0
P3=1/8.0
P4=1/16.0
P5=1/32.0
P6=1/32.0
r=16     #message rate

#Calculation
H=P1*math.log(1/P1,2)+P2*math.log(1/P2,2)+P3*math.log(1/P3,2)+P4*math.log(1/P4,2)+P5*math.log(1/P5,2)+P6*math.log(1/P6,2)
#Entropy of system

#Result
print('1. Entropy of system H = %.2f bits/message ' %H)
R=H*r                            #R=Entropy*message rate
print(' 2. Information rate  R = %.f bits/sec ' %R)
1. Entropy of system H = 1.94 bits/message 
 2. Information rate  R = 31 bits/sec 

Example 8.51, Page No 512

In [22]:
import math
#Calculate H(X) ,H(Y)

#Variable Declaration
px1=0.3
px2=0.4
px3=0.3

#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)  #Entropy of X


print(' 1.Entropy of X  H(X)=%.3f bits/symbol ' %HX)

PYX=[[0.8, 0.2, 0],[ 0, 1, 0],[ 0, 0.3, 0.7]]
PX=[[px1, px2, px3]]
PXY = [[0,0,0],
    [0,0,0],
    [0,0,0]]

for i in range(len(PYX)):
   # iterate through columns of PXd
   for j in range(len(PX[0])):
       # iterate through rows of PYX
       for k in range(len(PX)):
           PXY[i][j] += PYX[i][k] * PX[k][j]

py1=PXY[0][0]
py2=PXY[0][1]
py3=PXY[0][2]
HY=py1*math.log(1/py1,2)+py2*math.log(1/py2,2)+py3*math.log(1/py3,2)  #Entropy of Y
print(' 2. Entropy of Y H(Y)= %.2f bits/symbol ' %HY)
print('Approximation error')	
 1.Entropy of X  H(X)=1.571 bits/symbol 
 2. Entropy of Y H(Y)= 1.51 bits/symbol 
Approximation error

Example 8.52, Page No 513

In [23]:
import math
#Find entropy of source ,entropy of second order extension

#Variable Declaration
P1=0.7
P2=0.15
P3=0.15

#Calculation
HX=P1*math.log(1/P1,2)+P2*math.log(1/P2,2)+P3*math.log(1/P3,2)     #Entropy of source
print(' 1. Entropy of system  H(X)=%.2f bits/symbol ' %HX)
#H(X^n)=n*H(X)
n=2    #for second order
HX2=n*HX

#Result
print(' 2. Entropy of second order system extension of source can be H(X^2)=%.2f bits/symbol ' %(HX*2))
 1. Entropy of system  H(X)=1.18 bits/symbol 
 2. Entropy of second order system extension of source can be H(X^2)=2.36 bits/symbol 

Example 8.54, Page No 514

In [24]:
import math
#Find entropy of source 

#Variable Declaration
S0=1/3.0
S1=1/6.0
S2=1/4.0
S3=1/4.0

#Calculation
HX=S0*math.log(1/S0,2)+S1*math.log(1/S1,2)+S2*math.log(1/S2,2)+S3*math.log(1/S3,2)   #EntroSy of source

#Result
print(' Entropy of system  H(X)=%.2f bits/symbol ' %HX)
 Entropy of system  H(X)=1.96 bits/symbol 

Example 8.56, Page No 515

In [25]:
import math
#Find Information capacity of telephone

#Variable Declaration
B=3.4*10**3
SNR=30.0

#Calculation
SN=10**(SNR/10)
C=B*math.log(1+SN,2)   #Information capacity
C=C/1000

#Result
print(' Information capacity of telephone is C = %.2f kbps ' %C)
 Information capacity of telephone is C = 33.89 kbps 

Example 8.59, Page No 516

In [26]:
import math
#Find entropy of source 

#Variable Declaration
S0=1/3.0
S1=1/6.0
S2=1/4.0
S3=1/4.0

#Calculation
HX=S0*math.log(1/S0,2)+S1*math.log(1/S1,2)+S2*math.log(1/S2,2)+S3*math.log(1/S3,2)   #EntroSy of source

#Result
print(' Entropy of system  H(X)=%.2f bits/symbol ' %HX)
 Entropy of system  H(X)=1.96 bits/symbol 

Example 8.60, Page No 516

In [27]:
import math
#Find entropy of source 

#Variable Declaration
m1=1/2.0
m2=1/4.0
m3=1/8.0
m4=1/16.0
m5=1/16.0

#Calculation
L=(m1*1)+(m2*2)+(m3*3)+(2*(m4)*4)

#Result
print(' Average number of bits per message =%.2f bits ' %L)
 Average number of bits per message =1.88 bits 

Example 8.61, Page No 517

In [28]:
import math
#Find Information capacity of telephone

#Variable Declaration
B=3.4*10**3
SNR=30.0

#Calculation
SN=10**(SNR/10)
C=B*math.log(1+SN,2)   #Information capacity
C=C/1000

#Result
print(' Information capacity of telephone is C = %.2f kbps ' %C)
 Information capacity of telephone is C = 33.89 kbps 

Example 8.62, Page No 517

In [29]:
import math
#Find entropy of source 

#Variable Declaration
p1=4.0
m1=0.5
m2=0.5
m3=0.375
m4=0.375
m5=0.375
m6=0.375

#Calculation
I1=p1*math.log(1/p1,2) 
HX=m1*math.log(1/m1,2)+m2*math.log(1/m2,2)+m3*math.log(1/m3,2)+m4*math.log(1/m4,2)+m5*math.log(1/m5,2)+m6*math.log(1/m6,2)   #EntroSy of source

#Result
print(' Entropy of system  H(X)=%.2f bits/symbol ' %HX)
print('Approximation error')	
 Entropy of system  H(X)=3.12 bits/symbol 
Approximation error

Example 8.65, Page No 519

In [30]:
import math
#Find entropy of source 

#Variable Declaration
S0=1/2.0
S1=1/4.0
S2=1/8.0
S3=1/8.0
n=1

#Calculation
H=S0*math.log(1.0/S0,2)+S1*math.log(1.0/S1,2)+S2*math.log(1.0/S2,2)+S3*math.log(1.0/S3,2)   #EntroSy of source
L=H*n


#Result
print(' Code length =%.2f bits/messages ' %L)
 Code length =1.75 bits/messages 

Example 8.67, Page No 520

In [31]:
import math
#Find channel capacity and new bandwidth

#Variable Declaration
B=8*10**3
SNR=31.0
SNR2=61

#Calculation
C=B*math.log(1+SNR,2)   #Information capacity
B2=C/math.log(1+SNR2,2)
#Result
print(' Channel capacity is C = %.2f x 10^3 bits/sec ' %(C/1000))
print(' New Bandwidth is C = %.2f x kHz ' %(B2/1000))
 Channel capacity is C = 40.00 x 10^3 bits/sec 
 New Bandwidth is C = 6.72 x kHz