Chapter No 15 - Basic Information theory

Example15.1, page no 533

In [4]:
from math import log
#Given
P_A=0.5# probability of producing symbol 'A'
P_B=0.25# probability of producing symbol 'B'
P_C=0.25# probability of producing symbol 'C'
def log2(x):
    return log(x,2)
H=P_A*log2(1/P_A)+P_B*log2(1/P_B)+P_C*log2(1/P_C)# the source entropy
print 'The source entropy is: %0.2f bits/symbol'%(H)
The source entropy is: 1.50 bits/symbol

Example15.2, page no 535

In [9]:
from __future__ import division
from math import log
def log2(x):
    return log(x,2)

#Given
P_A=0.5
P_B=0.25
P_C=1/32
P_D=1/8
P_E=1/16
P_F=1/32# probabilities of producing respective symbol
H=(P_A*log2(1/P_A))+(P_B*log2(1/P_B))+(P_C*log2(1/P_C))+(P_D*log2(1/P_D))+(P_E*log2(1/P_E))+(P_F*log2(1/P_F))# Source Entropy
n=6
T=1
print 'The source entropy is: %0.2f bits/symbol'%(round(1000*H)/1000)
The source entropy is: 1.94 bits/symbol

Example15.3, page no 536

In [2]:
from math import log
def log2(x):
    return log(x,2)

#Given
#a
B1=4e3#Channel Bandwidth
SNR1=31#Channel SNR
C1=B1*log2(1+SNR1)#Channel Capacity
SNR2=14#Reduced SNR
B2=round(C1/log2(1+SNR2))#Bandwidth for reduced SNR with same Channel capacity

#b
B3=3e3#Reduced Bandwidth
SNR3=(2**(C1/B3))-1#Signal Power for reduced bandwidth
print 'a)\n  Channel capacity is: %d Kbits/sec\n    Bandwidth: %d KHz\nb)\n  SNR for 3KHz bandwidth: %0.2f'%(C1*1e-3,B2*1e-3,SNR3)
# the Answer in the book is wrong.It is printed as 90.4 for SNR3 but it should be 100.59
a)
  Channel capacity is: 20 Kbits/sec
    Bandwidth: 5 KHz
b)
  SNR for 3KHz bandwidth: 100.59