#calculate the information content in all symbols
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1=1./2;#probability 1
Px_2=1./4;#probability 2
Px_3=1./8;#probability 3
Px_4=1./8;#probability 4
#calculations
Ix_1 = log2(1/(Px_1))#information content in first probability
Ix_2 = log2(1/(Px_2))#information content in first probability
Ix_3 = log2(1/(Px_3))#information content in first probability
Ix_4 = log2(1/(Px_3))#information content in first probability
#results
print "i. Information content of first symbol (bits) = ",Ix_1
print "ii. Information content of second symbol (bits) = ",Ix_2
print "iii. Information content of third symbol (bits) = ",Ix_3
print "iV. Information content of fourth symbol (bits)",Ix_4
#calculate the amount of information
import math
from math import log
#given
Px_i = 1./4#probability of a symbol
#calculation
Ix_i = (log(1/Px_i))/log(2)#formula for amount of information of a symbol
#result
print "i. Amount of information (bits) = ",Ix_i
#calculate the amount of information
#given
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#since there are only two binary levels i.e. 1 or 0. Since, these two binary levels occur with equal likelihood of occurrence will be
Px_1 = 1./2#probability of zero level
Px_2 = 1./2#probability of first level
#calculations
Ix_1 = log2(1/Px_1)#amount of information of zero level with base 2
Ix_2 = log2(1/Px_2)#amount of information of first level with base 2
Ix_1= log(1/Px_1)/log(2)#amount of information content with base 10
Ix_2 = Ix_1
#result
print "i.Amount of information content wrt binary PCM 0 (bit) = ",Ix_1
print "ii.Amount of information content wrt binary PCM 1 (bit) = ",Ix_2
#calculate the amount of information
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
Px_1 = 1./4#probability wrt to binary PCM '0'
Px_2 = 3./4#probability wrt to binary PCM '1'
#calculations
Ix_1 = log2(1/Px_1)#amount of information of zero level with base 2
Ix_2 = log2(1/Px_2)#amount of information of first level with base 2
Ix_1= log(1/Px_1)/log(2)#amount of information content with base 10
Ix_2= log(1/Px_2)/log(2)#amount of information content with base 10
#results
print "i.Amount of information carried wrt to binary PCM 0 (bits) = ",Ix_1
print "ii.Amount of information carried wrt to binary PCM 1 (bits) = ",round(Ix_2,3)
#calculate the entropy and Amount of information
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1 = .4#probability of first symbol
Px_2 = .3#probability of second symbol
Px_3 = .2#probability of third symbol
Px_4 = .1#probability of fourth symbol
#calculations
H_X = -Px_1*log2(Px_1)-Px_2*log2(Px_2)-Px_3*log2(Px_3)-Px_4*log2(Px_4);#entropy
Px1x2x1x3 = Px_1*Px_2*Px_1*Px_3;#product of probabilities
Ix1x2x1x3 =-log2(Px1x2x1x3);#information of four symbols
Px4x3x3x2 = Px_4*Px_3*Px_3*Px_2;#product of probabilities
Ix4x3x3x2 = -log2(Px4x3x3x2);#information of four symbols
#results
print "i.Entropy (bits/symbol) = ",round(H_X,2)
print "ii.Amount of information contained in x1x2x1x3 (bits/symbol) = ",round(Ix1x2x1x3,2)
print "Thus,Ix1x2x1x3 < 7.4[=4*H_X]bits/symbol"
print "iii.Amount of information contained in x4x3x3x2 (bits/symbol) = ",round(Ix4x3x3x2,2)
print "\nThus we conclude that\nIx4x3x3x2 > 7.4[=4*H_X]bits/symbol"
#calculate the average rate of information convyed
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
n = 2.*10**6#elements od black and white TV picture
m = 16.#brightness levels of black and white TV picture
o = 32.#repeated rate of pictures per second
#calculations
Px_i = 1./m#probability of brightness levels of picture
H_X = 0;
for i in range (0,15):
H_Xi = (-1./(1./Px_i))*log2(1./(1./Px_i));
H_X = H_X +H_Xi
r = n*o#rate of symbols generated
R = r*math.ceil(H_X)#average rate of information convyed
#results
print "i. Average rate of information convyed (Mbits/seconds) = ",R/10**6
#calculate the average information rate
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
t_dot = .2#duration of dot symbol
t_dash = .6#duration of dash symbol
t_space = .2#time between the symbols
#wkt sum of the probability is 1 i.e P_dot + P_dash = 1 hence
#P_dot = 2*P_dash weget
P_dot = 2./3#probality of dot symbol
P_dash = 1./3#probality of dash symbol
#calculations
H_X = -P_dot*log2(P_dot)-P_dash*log2(P_dash);#entropy
T_s = P_dot*t_dot + P_dash*t_dash +t_space;#average time per symbol
r = 1/T_s;#average symbol rate
R = r*H_X;#average information rate of the telegraph sourece
#result
print "i.The average information rate of the telegraph source (bits/seconds) = ",round(R,3)
#calculate the entropy and information rate
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
#given symbols are equally likely all the symbols the probabilities are same
Px_1 = 1./8;#probability of first symbol
Px_2 = 1./8;#probability of second symbol
Px_3 = 3./8;#probability of third symbol
Px_4 = 3./8;#probability of fourth symbol
r = 2#average symbol rate from problem 11.14
#calculaitons
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4);#entropy
R = H_X*r;#information rate
#results
print "i.Entropy (bits/symbol) = ",round(H_X,1)
print "ii.The information rate of all symbols (f_m bits/seconds) = ", round(R,1)
#calculate the information rate
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
#given symbols are equally likely all the symbols the probabilities are same
Px_1 = 1./4;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./4;#probability of third symbol
Px_4 = 1./4;#probability of fourth symbol
r = 2#average symbol rate from problem 11.14
#calculaitons
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4);#entropy
R = H_X*r;#information rate
#results
print "i.The information rate of all symbols (f_m bits/seconds) = ", R
#calculate the Entropy and rate of information
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1 = 1./2;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./8;#probability of third symbol
Px_4 = 1./16;#probability of fourth symbol
Px_4 = 1./16;#probability of fifth symbol
T_b = 1*10**-3#time required for emittion of each symbol
r = 1./(T_b)#symbol rate
#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_4*log2(1/Px_4);
R = r*H_X;#information rate
#results
print "i.Entropy of five symbols (bits/symbol) = ",H_X
print "ii.Rate of information (bits/sec) = ",R
#calculate the rate of information
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1 = 1./2;#probability of first symbol
Px_2 = 1./4;#probability of second symbol
Px_3 = 1./8;#probability of third symbol
Px_4 = 1./16;#probability of fourth symbol
Px_5 = 1./16;#probability of fifth symbol
r = 16#outcomes per second
#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_5*log2(1/Px_5);
R = r*H_X;#information rate
#result
print "Rate of information (bits/sec) = ",R
#calculate the rate of information
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1 = 1./4;#probability of first symbol
Px_2 = 1./5;#probability of second symbol
Px_3 = 1./5;#probability of third symbol
Px_4 = 1./10;#probability of fourth symbol
Px_5 = 1./10;#probability of fifth symbol
Px_6 = 1./20;#probability of sixth symbol
Px_7 = 1./20;#probability of seventh symbol
Px_8 = 1./20;#probability of eigith symbol
f_m = 10*10**3#freuency of tranamitting symbol
#calculations
H_X = Px_1*log2(1/Px_1) + Px_2*log2(1/Px_2) + Px_3*log2(1/Px_3) + Px_4*log2(1/Px_4) + Px_5*log2(1/Px_5) + Px_6*log2(1/Px_6)+ Px_7*log2(1/Px_7)+ Px_8*log2(1/Px_8);#entropy
f_s = 2*f_m#sampling frequency
r = f_s#sampling frequency equal to rate of transmission
R = r*H_X;#information rate
#result
print "Rate of information (bits/sec) = ",round(R,0)
print "Note:Their mistake in calculation of H_X in textbook"
#calculate the channel matrix
import numpy
#given
#from fig
P_X = numpy.matrix([.5, .5])#x matrix
P_Xd = numpy.matrix([[.5, 0],[0, .5]])#diagonal x matrix
#calculations
P_YX = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig
P_Y = P_X*P_YX#y matrix
P_XY = P_Xd * P_YX#xy matrix
#results
print "i.Channel matrix of the channelP_YX "
print(P_YX)
print "ii.a.P(y1) =",P_Y[0,0]
print " b.P(y2) = ",P_Y[0,1]
print "iii.a.P(x1,y2) = ",P_XY[0,1]
print " b.P(x2,y1) = ",P_XY[1,0]
#calculate the Channel matrix
import numpy
#given
P_X = numpy.matrix([.5, .5])#x matrix
#calculations
P_YX = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig\
P_ZY = numpy.matrix([[.9, .1],[.2, .8]]);#yx matrix representation of given fig
P_Y = P_X *P_YX#y matrix
P_ZX = P_YX * P_ZY#zx matrix
P_Z = P_X *P_ZX#z matrix
#results
print "i.Channel matrix of the channelP_ZX "
print(P_ZX)
print("Matrix P(Z)")
print(P_Z)
print "a.P(Z1) = ",P_Z[0,0]
print "b.P(Z2) = ",P_Z[0,1]
#calculate the probability associated with channel outputs
#given
import numpy
P_X = numpy.matrix([.5, .5])#x matrix
P_YX = numpy.matrix([[.8, .2, 0], [0, .2, .8]]);#yx matrix representation of given fig
#calculations
P_Y = P_X*P_YX;
#results
print "probability associated with the channel outputs for p=.2 is"
print(P_Y)
print "a.P(Y1) = ",P_Y[0,0]
print "b.P(Y2) = ",P_Y[0,1]
print "C.P(Y3) = ",P_Y[0,2]
#calculate the I_XY value
import numpy
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
#wkt P_Y = P_X*P_YX from previous problems
alfa = .5
P_1 = .1#probability for first case
P_2 = .5#probability for second case
#calculations
P_X = numpy.matrix([alfa, alfa]);
#first case
P_YX = ([[1-P_1, P_1],[P_1, 1-P_1]]);
P_Y1 = P_X*P_YX;
H_Y1 = -P_Y1[0,0]*log2(P_Y1[0,0])-P_Y1[0,1]*log2(P_Y1[0,1]);
Q_1 = P_1*log2(P_1) + (1-P_1)*log2(1-P_1)#from proof
I_XY1 = 1 + Q_1;
#second case
P_YX = ([[1-P_2, P_2],[P_2, 1-P_2]]);
P_Y2 = P_X*P_YX;
H_Y2 = -P_Y2[0,0]*log2(P_Y2[0,0])-P_Y2[0,1]*log2(P_Y2[0,1]);
Q_2 = P_2*log2(P_2) + (1-P_2)*log2(1-P_2)#from proof
I_XY2 = 1 + Q_2;
#results
print "I_XY for the first case = ",round(I_XY1,3)
print "I_XY for the second case = ",round(I_XY2,2)
#calculate the Entropy in all cases
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
a1 = 1.
a2 = 2.
a3 = .5
#calculations
H_X1 = log2(a1);#Entropy for first case
H_X2 = log2(a2);#Entropy for second case
H_X3 = log2(a3);#Entropy for third case
#results
print "i.Entropy for first case = ",H_X1
print "ii.Entropy for second case = ",H_X2
print "iii.Entropy for third case = ",H_X3
#calculate the capacity of channel
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
B = 4000.#bandwidth of AWGN channel
S = .1*10**-3#power of signal
neta = 2*10**-12#spectral dencity
N = neta*B;#power
#calculations
C = B * log2(1 + (S/N));#capacity of channel
#result
print "Capacity of channel (b/s) = ",round(C,0)
#calculate the information rate, bandwidth and S/N ratio
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
fm=4000. #Hz
tim=1.25
Pi=1/256.
SN=100.
#calculations
fs=2*fm
r=fs*tim
H=log2(1/Pi)
R=r*H
C = r* log2(1 + SN);#capacity of channel
SN2=(1/Pi-1)
B2=R/log2(1+SN)
#result
print "information rate of source (kb/s) = ",R/1000.
print "Capacity of channel (b/s) = ",round(C,0)
print "Final S/N ratio = ",SN2
print "bandwidth required (kHz) = ",round(B2/1000.,0)
#calculate the efficiency of code and code redundancy
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
#given
Px_1 = 0.9#probability of first symbol
Px_2 = 0.1#probability of second symbol
n1 = 1.#length of the code for x_1
n2 =1.#length of code for x_2
#calculations
#we know that the average code length L per symbol
L = Px_1*n1 + Px_2*n2#code length
H_X = -Px_1*log2(Px_1) - Px_2*log2(Px_2) #entropy
neta = H_X/L#efficiency
neta1 = neta*100#neta in percentage
gama = 1 - neta#redundancy
gama1 = gama*100#gama in percentage
#results
print "i.Efficiency of code (percent) = ",round(neta1,1)
print "ii.Code redundancy (percent) = ",round(gama1,1)
#calculate the efficiency of code and code redundancy
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
Px_1 = 0.81#probability of first symbol
Px_2 = .09#probability of second symbol
Px_3 = .09#probability of third symbol
Px_4 = 0.01#probability of forth symbol
n1 = 1.#length of code for a_1
n2 =2.#length of code for a_2
n3 = 3.#length of code for a_3
n4 = 3.#length of code for a_4
#calculations
#we know that the average code length L per symbol
L = Px_1*n1 + Px_2*n2 + Px_3*n3 + Px_4*n4 #code length
H_X = -Px_1*log2(Px_1) - Px_2*log2(Px_2) - Px_3*log2(Px_3) - Px_4*log2(Px_4)#entropy
neta = H_X/L#efficiency
neta1 = neta*100#neta in percentage
gama = 1 - neta#redundancy
gama1 = gama*100#gama in percentage
#results
print "i.Efficiency of code (percent) = ",round(neta1,1)
print "ii.Code redundancy (percent) = ",round(gama1,1)
#calculate the efficiency
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = 1./2#probability of first symbol
P_x2 = 1./4#probability of second symbol
P_x3 = 1./8#probability of third symbol
P_x4 = 1./8#probability of fouth symbol
n1 = 1.
n2 = 2.
n3 = 3.
n4 = 3.
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4;
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4;
neta = H_x/L;
P_neta = neta*100#efficiency in percentage
#results
print "Efficiency = ",neta
print "Efficiency in percentage (percent) = ",P_neta
#calculate the efficiency using shannon code and huffman code
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = .2#probability of first signal
P_x2 = .2#probability of second signal
P_x3 = .2#probability of third signal
P_x4 = .2#probability of fourth signal
P_x5 = .2#probability of fifth signal
n1 = 2.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 3.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4 + P_x5*I_x5;#entropy
L1 = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;
neta1 = H_x/L1;
P_neta1 = neta1*100#efficiency in percentage using Shannon Fano code
L2 = P_x1*n1 + P_x2*n2 + P_x3*n3 +P_x4*n4 +P_x5*n5
neta2 = H_x/L2;
P_neta2 = neta2*100#efficiency in percentage using huffman code
#results
print "Efficiency in percentage using Shannon Fano code (percent) = ",round(P_neta1,1)
print "Efficiency in percentage using huffman code (percent) = ",round(P_neta2,1)
#calculate the efficiency using shannon code and huffman code
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = .4#probability of first signal
P_x2 = .19#probability of second signal
P_x3 = .16#probability of third signal
P_x4 = .15#probability of fourth signal
P_x5 = .1#probability of fifth signal
n1 = 1.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 3.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4 + P_x5*I_x5;#entropy
L1 = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;
neta1 = H_x/L1;
P_neta1 = neta1*100#efficiency in percentage using Shannon Fano code
L2 = P_x1*1 + (P_x2 + P_x3 +P_x4 +P_x5 )*3
neta2 = H_x/L2;
P_neta2 = neta2*100#efficiency in percentage using huffman code
#results
print "Efficiency in percentage using Shannon Fano code (percent) = ",round(P_neta1,1)
print "Efficiency in percentage using huffman code (percent) = ",round(P_neta2,1)
print "Note: There is mistake in the textbook in calculation of L using SHannon Fano code"
#calculate the Efficiency
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = .05#probability of first signal
P_x2 = .15#probability of second signal
P_x3 = .2#probability of third signal
P_x4 = .05#probability of fourth signal
P_x5 = .15#probability of fifth signal
P_x6 = .3#probability of sixth signal
P_x7 = .1#probability of seventh signal
n1 = 4.#number of bits in code obtained from table given textbook
n2 = 3.#number of bits in code obtained from table given textbook
n3 = 2.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 3.#number of bits in code obtained from table given textbook
n6 = 2.#number of bits in code obtained from table given textbook
n7 = 3.#number of bits in code obtained from table given textbook
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
I_x6 = -log2(P_x6);
I_x7 = -log2(P_x7);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4 + P_x5*I_x5 + P_x6*I_x6 + P_x7*I_x7;#entropy
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5 + P_x6*n6 + P_x7*n7;
neta = (H_x*100)/L#Efficiency in percentage
#results
print "Efficiency in percentage (percent) = ",round(neta,2)
#calculate the Variance of codeword length
#given
P_x1 = .4#probability of first signal
P_x2 = .2#probability of second signal
P_x3 = .8#probability of third signal
P_x4 = .08#probability of fourth signal
P_x5 = .02#probability of fifth signal
n1 = 2.#number of bits in code obtained from table given textbook
n2 = 3.#number of bits in code obtained from table given textbook
n3 = 1.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 4.#number of bits in code obtained from table given textbook
#calculations
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;#average codeword length per symbol
#since sigma = sqrt(summation of product of probability and (n- L)**2)
sigmasquare = P_x1*(n1-L)**2 + P_x2*(n2-L)**2 +P_x3*(n3-L)**2 + P_x4*(n4-L)**2 +P_x5*(n5-L)**2;#Variance of codewoed length
#results
print "Variance of codeword length = ",sigmasquare
#calculate the Entropy and Information rate
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = 1./2#probability of first signal
P_x2 = 1./4#probability of second signal
P_x3 = 1./8#probability of third signal
P_x4 = 1./16#probability of fourth signal
P_x5 = 1./32#probability of fifth signal
P_x6 = 1./32#probability of sixth signal
r = 16#message rate in outcomes per second
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
I_x5 = -log2(P_x5);
I_x6 = -log2(P_x6);
H_X = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4 + P_x5*I_x5 + P_x6*I_x6 #entropy
R = H_X*r#Information rate
#results
print "Entropy of the system (bits/message) = ",H_X
print "Information rate (bits/seconds) = ",R
#calculate the Entropy
import numpy
#given
import math
from math import log
def log2(x):
y=log(x)/log(2)
return y
P_x1 = .3#probability of first signal
P_x2 = .4#probability of second signal
P_x3 = .3#probability of third signal
P_YX = numpy.matrix([[.8, .2, 0],[0, .1, 0],[0, .3, 0.7]])#matrix obtained from the figure
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
H_X = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 #entropy
P_y1 = P_YX[0,0]*P_x1 + P_YX[0,1]*P_x1 + P_YX[0,2]*P_x1;
P_y2 = P_YX[1,0]*P_x2 + P_YX[1,1]*P_x2 + P_YX[1,2]*P_x2;
P_y3 = P_YX[2,0]*P_x3 + P_YX[2,1]*P_x3 + P_YX[2,2]*P_x3;
I_y1 = -log2(P_y1);
I_y2 = -log2(P_y2);
I_y3 = -log2(P_y3);
H_Y = -P_y1*I_y1 - P_y2*I_y2 - P_y3*I_y3 #entropy
#results
print " Entropy H(X) = ",round(H_X,3)
print "Entropy H(Y) = ",round(H_Y,3)
print " Note:There is mistake in the calculation of P_y3 in the textbook so their is change in entropy H_Y"
#calculate the Entropy of the second order extension
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
P_x1 = .7#probability of first signal
P_x2 = .15#probability of second signal
P_x3 = .15#probability of third signal
n = 2#second order extention
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3#entropy
H_x2 = n*H_x#entropy of second order extention
#results
print "Entropy of second order extension (bits/symbol) = ",round(H_x2,3)
#calculate the Entropy of the source
import math
from math import log
#given
def log2(x):
y=log(x)/log(2)
return y
#given
P_x1 = 1./3#probability of first signal
P_x2 = 1./6#probability of second signal
P_x3 = 1./4#probability of third signal
P_x4 = 1./4#probability of fourth signal
#calculations
I_x1 = -log2(P_x1);
I_x2 = -log2(P_x2);
I_x3 = -log2(P_x3);
I_x4 = -log2(P_x4);
H_x = P_x1*I_x1 + P_x2*I_x2 + P_x3*I_x3 + P_x4*I_x4 #entropy
#results
print "Entropy of the source (bits/symbol) = ",round(H_x,4)
#calculate the Average number of bits per message
#given
P_x1 = 1./2#probability of first signal
P_x2 = 1./4#probability of second signal
P_x3 = 1./8#probability of third signal
P_x4 = 1./16#probability of fourth signal
P_x5 = 1./16#probability of fifth signal
n1 = 1.#number of bits in code obtained from table given textbook
n2 = 2.#number of bits in code obtained from table given textbook
n3 = 3.#number of bits in code obtained from table given textbook
n4 = 4.#number of bits in code obtained from table given textbook
n5 = 4.#number of bits in code obtained from table given textbook
#calculations
L = P_x1*n1 + P_x2*n2 + P_x3*n3 + P_x4*n4 + P_x5*n5;#Average number of bits per message
#results
print "Average number of bits per message (bits) = ",L
#calculate the Information capacity of telephone channel
import math
from math import log,exp
#given
def log2(x):
y=log(x)/log(2)
return y
B = 3.4*10**3#bandwidth
SbyN = 30#signal to the noise ratio in dB
#calculations
SbyN1 = exp((SbyN/10)*log(10))#signal to noise ratio
C = B*log2(1+SbyN1);
#result
print "Information capacity of the telephone channel (kbps) = ",round(C/1000.,2)