import math
#Find Information Content of Each Symbol
#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/8.0
#Calculation
#information content of each symbol
Ix1=math.log(1/px1,2)
Ix2=math.log(1/px2,2)
Ix3=math.log(1/px3,2)
Ix4=math.log(1/px4,2)
#Result
print("Information Content tI(x1)= %.2f bit" %Ix1)
print(" tI(x2)= %.f bits" %Ix2)
print(" tI(x3)= %.f bits" %Ix3)
print(" tI(x4)= %.f bits" %Ix4)
import math
#Find amount of Information
#Variable Declaration
#Calculation
pxi=1/4.0
Ixi=(math.log10(1/pxi))/math.log10(2)
#RESULTS
print("The amount of Information I(Xi)= %.f " %Ixi)
import math
#Find Amount of Information
#Variable Declaration
px1=1/2.0
px2=1/2.0
#Calculation
Ix1=math.log(1/px1,2) #entropy
Ix2=math.log(1/px2,2)
#Result
print("The amount of Information I(X1)= %.f bit" %Ix1)
print("The amount of Information I(X2)= %.f bit" %Ix2)
import math
#Find Amount of Information
#Variable Declaration
px1=1/4.0
px2=3/4.0
#Calculation
Ix1=math.log(1/px1,2)
Ix2=math.log(1/px2,2)
#Result
print("The amount of Information I(X1)= %.f bit" %Ix1)
print("The amount of Information I(X2)= %.2f bit" %Ix2)
import math
#Find Entropy,Amount of information
#Variable Declaration
px1=0.4
px2=0.3
px3=0.2
px4=0.1
#Calculation
HX=-px1*math.log(px1,2)-px2*math.log(px2,2)-px3*math.log(px3,2)-px4*math.log(px4,2)
Px1x2x1x3=px1*px2*px1*px3
Ix1x2x1x3=-math.log(Px1x2x1x3,2)
Px4x3x3x2=px4*px3*px3*px2
Ix4x3x3x2=-math.log(Px4x3x3x2,2)
#Result
print(" \n Entropy H(X) = %.2f bits/symbol " %HX)
print("The amount of Information I(x1x2x1x3)= %.2f bits/symbol" %Ix1x2x1x3)
print(" I(x4x3x3x2) = %.2f bits/symbol " %Ix4x3x3x2)
import math
#Find information rate the telegraphic source
#Variable Declaration
pdash=1/3.0
pdot=2/3.0
tdot=0.2
tdash=0.6
tspace=0.2
#Calculation
HX=-pdash*math.log(pdash,2)-pdot*math.log(pdot,2)
Ts=pdot*tdot+pdash*tdash+tspace
r=1/Ts
R=r*HX
#Result
print('Average rate of information R = %.2f bits/s' %R)
import math
#Find information rate of the source
f=input('Enter the frequncy f=')
px1=1/8.0
px2=1/8.0
px3=3/8.0
px4=3/8.0
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2) #entropy of the source
R=2*f*HX #r=2*f
print('information rate R= %.1f bits/sec ' %R) #f=signal bandwidth
import math
#Find information rate of the source
#all symbols are equally likely
#Variable Declaration
px1=1/2.0
px2=1/2.0
px3=1/2.0
px4=1/2.0
#Calculation
f=input('Enter the frequncy of system fm(in Hz) =')
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)
#Result
print('\n Entropy H(X) =%.f bits/symbol ' %HX)
R=2*f*HX
print('information rate =%.f bits/sec' %R)
import math
#Find source entropy ,information rate
#Variable Declaration
#probability symbols
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/16.0
px5=1/16.0
Tb=10.0**-3
#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4)+px5*math.log(1/px5)
#Result
print('1. source entropy H(X) = %.2f bits/symbol ' %HX) #source entropy
r=1.0/Tb
R=r*HX #information rate
print(' 2. Information rate R = %.2f bits/sec ' %R)
print('Approximation error')
import math
#assume if there are 16 outcomes per second
#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/16.0
px5=1/16.0
r=16.0
#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)+px5*math.log(1/px5,2)
#Result
print('1. Entropy H(X) = %.2f bits/symbol ' %HX) #source entropy
R=r*HX
print('2., Information rate R = %.f bits/sec' %R)
import math
#determine entropy ,information rate
#Variable Declaration
px1=1/4.0
px2=1/5.0
px3=1/5.0
px4=1/10.0
px5=1/10.0
px6=1/20.0
px7=1/20.0
px8=1/20.0
f=10*10**3.0
fs=10*2*10**3.0
#Calculation
#entropy
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)+px5*math.log(1/px5,2)+px6*math.log(1/px6,2)+px7*math.log(1/px7,2)+px8*math.log(1/px8,2)
#Result
print('bits/message H(X) = %.2f ' %HX)
r=fs
R=r*HX #information rate
print('bits/sec R = %.2f' %R)
print('Approximation error')
import math
from array import *
#Find Channel Matrix,joint probability
#Variable Declaration
px1=0.5
px2=0.5
py1x1=0.9
py2x1=0.1
py1x2=0.2
py2x2=0.8
PYX=[[py1x1,py2x1],[py1x2,py2x2]]
PX=[[px1,px2]]
PY = [[0,0],
[0,0]]
PXY = [[0,0],
[0,0]]
for i in range(len(PYX)):
# iterate through columns of Y
for j in range(len(PX[0])):
# iterate through rows of Y
for k in range(len(PX)):
PY[i][j] += PYX[i][k] * PX[k][j]
print(' PY ARRAY = \n')
for r in PY:
print(r)
PXd=[[px1,0],[0,px2]]
for i in range(len(PXd)):
# iterate through columns of Y
for j in range(len(PYX[0])):
# iterate through rows of Y
for k in range(len(PYX)):
PXY[i][j] += PXd[i][k] * PYX[k][j]
print(' \n PXY ARRAY = \n')
for r in PXY:
print(r)
import math
#Channel is aproximated by the AWGN Channel
#Variable Declaration
B=4000.0
S=0.1*10**-3
n=2*10**-12
#Calculation
N=n*B
C=B*math.log(1+(S/N),2) #Capacity of Channel
C=C/1000.0
#Result
print(' Capacity of Channel C=%.3f(10^3) b/s ' %C)
import math
#assume that succeissive samples are statistically independent
#Variable Declaration
fm=4000.0
fs=2*fm
n=1.25
#Calculation
r=fs*n
pxi=1/256.0
HX=-math.log(pxi,2)
R=r*HX
R=R/1000
print('Information Rate R= %.f kb/s' %R)
import math
#assume that succeissive samples are statistically independent
#Variable Declaration
B=10*10**3.0
SN=20.0
#Calculation
SNR=10**(SN/10.0)
C=B*math.log(1+(SNR),2)
C=C/1000
#Result
print('The channel capacity = %.2f 10^3 b/s' %C)
import math
#assume that succeissive samples are statistically independent
#Variable Declaration
C=8*10**4.0
B=10**4.0
#Calculation
SN=2**(C/B)-1
SNR=10*math.log(SN,10) #SNR
#Result
print(' The S/N ratio required for error-free transmission =%.2f dB ' %SNR) #required SNR is greater that 24.064
import math
#assume that succeissive samples are statistically independent
#Variable Declaration
SN=20.0
#Calculation
SNR=10**(SN/10.0)
C=8*10**4.0
B=C/(math.log(1+SNR,2)) #Bandwidth
B=B/1000
#Result
print('Bandwidth required for AWGN channel B =%.2f kHz ' %B)
import math
#Find code efficiency,redundancy
#Variable Declaration
px1=0.9
px2=0.1
n1=1.0
n2=1.0
#Calculation
L=px1*n1+px2*n2 #code leght
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)
n=(HX/L) #code efficiency
n=n*100
print('Code efficiency = %.1f percent' %n)
r=(100-n) #code reduncy
print('Code redundancy = %.1f percent' %r)
import math
#Find code efficiency,redundancy
#Variable Declaration
pa1=0.81
pa2=0.09
pa3=0.09
pa4=0.01
n1=1
n2=2
n3=3
n4=3
#Calculation
L=pa1*n1+pa2*n2+pa3*n3+pa4*n4
HX2=pa1*math.log(1/pa1,2)+pa2*math.log(1/pa2,2)+pa3*math.log(1/pa3,2)+pa4*math.log(1/pa4,2)
n=HX2/L*100
#Result
print(' code efficiency = %.2f percent' %n)
r=(100-n) #code reduncy
print(' code redundancy = %.1f percent' %r)
import math
#Find efficiency of the code
#Variable Declaration
px1=1/2.0
px2=1/4.0
px3=1/8.0
px4=1/8.0
n1=1.0
n2=2.0
n3=3.0
n4=3.0
#Calculation
#information content of each symbol
Ix1=-math.log(px1,2)
Ix2=-math.log(px2,2)
Ix3=-math.log(px3,2)
Ix4=-math.log(px4,2)
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2)+px4*math.log(1/px4,2)
L=px1*n1+px2*n2+px3*n3+px4*n4
n=HX/L*100
#Result
print('Ccode efficiency = %.f Percent' %n)
import math
#Find entropy ,information rate
#If there are 16 outcomes per second
#Variable Declaration
P1=1/2.0
P2=1/4.0
P3=1/8.0
P4=1/16.0
P5=1/32.0
P6=1/32.0
r=16 #message rate
#Calculation
H=P1*math.log(1/P1,2)+P2*math.log(1/P2,2)+P3*math.log(1/P3,2)+P4*math.log(1/P4,2)+P5*math.log(1/P5,2)+P6*math.log(1/P6,2)
#Entropy of system
#Result
print('1. Entropy of system H = %.2f bits/message ' %H)
R=H*r #R=Entropy*message rate
print(' 2. Information rate R = %.f bits/sec ' %R)
import math
#Calculate H(X) ,H(Y)
#Variable Declaration
px1=0.3
px2=0.4
px3=0.3
#Calculation
HX=px1*math.log(1/px1,2)+px2*math.log(1/px2,2)+px3*math.log(1/px3,2) #Entropy of X
print(' 1.Entropy of X H(X)=%.3f bits/symbol ' %HX)
PYX=[[0.8, 0.2, 0],[ 0, 1, 0],[ 0, 0.3, 0.7]]
PX=[[px1, px2, px3]]
PXY = [[0,0,0],
[0,0,0],
[0,0,0]]
for i in range(len(PYX)):
# iterate through columns of PXd
for j in range(len(PX[0])):
# iterate through rows of PYX
for k in range(len(PX)):
PXY[i][j] += PYX[i][k] * PX[k][j]
py1=PXY[0][0]
py2=PXY[0][1]
py3=PXY[0][2]
HY=py1*math.log(1/py1,2)+py2*math.log(1/py2,2)+py3*math.log(1/py3,2) #Entropy of Y
print(' 2. Entropy of Y H(Y)= %.2f bits/symbol ' %HY)
print('Approximation error')
import math
#Find entropy of source ,entropy of second order extension
#Variable Declaration
P1=0.7
P2=0.15
P3=0.15
#Calculation
HX=P1*math.log(1/P1,2)+P2*math.log(1/P2,2)+P3*math.log(1/P3,2) #Entropy of source
print(' 1. Entropy of system H(X)=%.2f bits/symbol ' %HX)
#H(X^n)=n*H(X)
n=2 #for second order
HX2=n*HX
#Result
print(' 2. Entropy of second order system extension of source can be H(X^2)=%.2f bits/symbol ' %(HX*2))
import math
#Find entropy of source
#Variable Declaration
S0=1/3.0
S1=1/6.0
S2=1/4.0
S3=1/4.0
#Calculation
HX=S0*math.log(1/S0,2)+S1*math.log(1/S1,2)+S2*math.log(1/S2,2)+S3*math.log(1/S3,2) #EntroSy of source
#Result
print(' Entropy of system H(X)=%.2f bits/symbol ' %HX)
import math
#Find Information capacity of telephone
#Variable Declaration
B=3.4*10**3
SNR=30.0
#Calculation
SN=10**(SNR/10)
C=B*math.log(1+SN,2) #Information capacity
C=C/1000
#Result
print(' Information capacity of telephone is C = %.2f kbps ' %C)
import math
#Find entropy of source
#Variable Declaration
S0=1/3.0
S1=1/6.0
S2=1/4.0
S3=1/4.0
#Calculation
HX=S0*math.log(1/S0,2)+S1*math.log(1/S1,2)+S2*math.log(1/S2,2)+S3*math.log(1/S3,2) #EntroSy of source
#Result
print(' Entropy of system H(X)=%.2f bits/symbol ' %HX)
import math
#Find entropy of source
#Variable Declaration
m1=1/2.0
m2=1/4.0
m3=1/8.0
m4=1/16.0
m5=1/16.0
#Calculation
L=(m1*1)+(m2*2)+(m3*3)+(2*(m4)*4)
#Result
print(' Average number of bits per message =%.2f bits ' %L)
import math
#Find Information capacity of telephone
#Variable Declaration
B=3.4*10**3
SNR=30.0
#Calculation
SN=10**(SNR/10)
C=B*math.log(1+SN,2) #Information capacity
C=C/1000
#Result
print(' Information capacity of telephone is C = %.2f kbps ' %C)
import math
#Find entropy of source
#Variable Declaration
p1=4.0
m1=0.5
m2=0.5
m3=0.375
m4=0.375
m5=0.375
m6=0.375
#Calculation
I1=p1*math.log(1/p1,2)
HX=m1*math.log(1/m1,2)+m2*math.log(1/m2,2)+m3*math.log(1/m3,2)+m4*math.log(1/m4,2)+m5*math.log(1/m5,2)+m6*math.log(1/m6,2) #EntroSy of source
#Result
print(' Entropy of system H(X)=%.2f bits/symbol ' %HX)
print('Approximation error')
import math
#Find entropy of source
#Variable Declaration
S0=1/2.0
S1=1/4.0
S2=1/8.0
S3=1/8.0
n=1
#Calculation
H=S0*math.log(1.0/S0,2)+S1*math.log(1.0/S1,2)+S2*math.log(1.0/S2,2)+S3*math.log(1.0/S3,2) #EntroSy of source
L=H*n
#Result
print(' Code length =%.2f bits/messages ' %L)
import math
#Find channel capacity and new bandwidth
#Variable Declaration
B=8*10**3
SNR=31.0
SNR2=61
#Calculation
C=B*math.log(1+SNR,2) #Information capacity
B2=C/math.log(1+SNR2,2)
#Result
print(' Channel capacity is C = %.2f x 10^3 bits/sec ' %(C/1000))
print(' New Bandwidth is C = %.2f x kHz ' %(B2/1000))