You are on page 1of 5

Submitted by-nishant kumar

Roll no-213ec5246


%% Average code word length,Entropy,Efficiency
clc;clear all;close all;
p = [0.1 0.05 0.21 0.07 0.02 0.2 0.2 0.15]
pl=length(p);
symbols = [1:pl];
display('Average Length');
[dict,avglen] = huffmandict(symbols,p)
h=zeros(1,pl);
for i=1:pl
h(i)=-p(i)*log2(p(i));
end
h;
display('Entropy');ent=sum(h)
display('Efficiency');effi=(ent/avglen)*100


output:-

p =

Columns 1 through 6

0.1000 0.0500 0.2100 0.0700 0.0200 0.2000

Columns 7 through 8

0.2000 0.1500

Average Length

dict =

[1] [1x3 double]
[2] [1x5 double]
[3] [1x2 double]
[4] [1x4 double]
[5] [1x5 double]
[6] [1x3 double]
[7] [1x2 double]
[8] [1x3 double]


avglen =

2.8000

Entropy

ent =

2.7419

Efficiency

effi =

97.9236




%% HUFFMAN CODES FOR Kth EXTENSION

clc;clear all;close all;
ip_prob = [0.2,0.3,0.1,0.4];
k = 1;
k_ext=input('enter Kth extension=')
k_exte=k_ext-1;
while(k<=k_exte)
len = length(ip_prob);
op_prob = [];
for idx = 1:len
op_prob = [op_prob ip_prob(idx)*ip_prob];
end
k = k + 1;
ip_prob = op_prob;
end
pl=length(ip_prob);
symbols = [1:pl];
display('Average Length');
[dict,avglen] = huffmandict(symbols,ip_prob)
h=zeros(1,pl);
for i=1:pl
h(i)=-ip_prob(i)*log2(ip_prob(i));
end
h;
display('Entropy');ent=sum(h)
display('Efficiency');effi=(ent/avglen)*100

output

enter Kth extension=1

k_ext =

1

Average Length

dict =

[1] [1x3 double]
[2] [1x2 double]
[3] [1x3 double]
[4] [ 1]


avglen =

1.9000

Entropy

ent =

1.8464

Efficiency

effi =

97.1810



log2(N) vs N


Prog

N=[2 3 4 5 6 7 8];
x=zeros(1,7);
for i=1:7
x(i)=log2(N(i));
end
plot(x,N)
title('log2(N) vs N')
xlabel('log2(N)')
ylabel('N')


plot










N point gaussian
Prog
N=input('enter the value of N \n');
x=zeros(1,N+1)
t=-.5:1/N:.5; % unit variance
for i=1:N+1
x(i)=x(i)+1/sqrt(2*pi)*exp(-(t(i).^2)/2)

end
plot(t,x)
title('N point gaussian')
xlabel('t')
ylabel('amplitude')

You might also like