//source coding clc; x =input('Enter Symbols'); s = 1:x; p = input('enter probabilities'); order = sort(p,'descend'); [dict,avglen] = huffmandict(s,order); enco = huffmanenco(s,dict); deco = huffmandeco(enco,dict); len = length(order); avg = avglen; ent = 0; for i =1:len ent = ent+((-1)*p(i)*log2(p(i))); end eff = ent/avg; disp("input: "+ s); disp("encoded input: "+enco); disp("decoded: "+deco); disp("size of deco: "+size(deco)); disp("length: "+len); disp("entropy: "+ent); disp("avg len: "+avg); disp("efficiency: "+eff); // %ASKMARY clc; clear all; close all; m=2; snr=-5:0.5:10; for i=1:length(snr) x=randsrc(1,100000,[1,0]); y=pskmod(x,m); r=awgn(y,snr(1),'measured'); z=pskdemod(r,m); [bn,br(i)]=biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('ASK'); %PAM clc; clear all; M=2; fs=32; nsamp=8; freq_sep=8; snr=-5:0.5:10; for i=1:length(snr) x=randsrc(1,100000,[1,0]); Y=pammod(x,M); r=awgn(Y,snr(i),'measured');//////(,,Power) z=pamdemod(r,M); [bn,br(i)]=biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('PAM'); %CYCKIC CODES clc; clear all; close all; SNR = [-20:1:20] k = 4; n=7; for j=1:1:length(SNR) msg=randsrc(10000,k,[0,1]); enc_sig=encode(msg,n,k,'cyclic/binary'); msg_tx=pskmod(enc_sig,2); msg_rx=awgn(msg_tx,SNR(j)); msg_demod=pskdemod(msg_rx,2); dec_sig=decode(msg_demod,n,k,'cyclic/binary'); [errorbit,ratiobit]=biterr(msg,dec_sig); ber(j)=ratiobit; end semilogy(SNR,ber); xlabel('SNR'); ylabel('BER analysis of cyclic code'); %CONVOLUTION CODE clc; clear all; close all; N=2; k=3; ///Size of the Shift Register SNR=[-20:20]; for j=1:1:length(SNR) msg=randsrc(10000,k,[0 1]); t=poly2trellis(3,[7 5]); con_enc=convenc(msg,t); n_code=awgn(con_enc,SNR(j),'measured',244); /// (signal,snr,powerlevel,randomNoise level) q_code=quantiz(n_code,[0.001,0.1,0.3,0.5,0.7,0.9,0.999]); //// Match the output of the AWGN with the above quantised Value tblen=48; delay=tblen; //// tb is the number of branches pointing towards single node( an integer that indicates the number of trellis branches used to construct each traceback path) decod=vitdec(q_code,t,tblen,'cont','soft',3); [number,ratio]=biterr(decod(delay+1:end),msg(1:end-delay)); ber(j)=ratio; end semilogy(SNR,ber); xlabel('SNR'); ylabel('Bit Error Rate'); %HAMMING CODE clc; clear all; SNR=[-20:1:20] k=4; n=7; for j=1:1:length(SNR) msg=randsrc(10000, k, [0,1]); enc_sig=encode(msg,n,k, 'hamming/binary'); msg_tx=pskmod(enc_sig,2); msg_rx=awgn(msg_tx,SNR(j)); msg_demod=pskdemod(msg_rx,2); dec_sig=decode(msg_demod,n,k,'hamming/binary'); [errorbit,ratiobit]=biterr(msg,dec_sig); ber(j)=ratiobit; end semilogy(SNR,ber); xlabel('SNR'); ylabel('BER analysis of Hamming Code'); %ADAPTIVE EQUALISER clear all; close all; mse=[]; N=1000; sysorder=20; x=randn(N,1); b=fir1(sysorder-1,0.5); n=0.1*randn(N,1); d=filter(b,1,x)+n; steps=[0.008,0.02,0.05]; for i=1:length(steps) temp=0; w=zeros(sysorder,1); for n=sysorder:N u=x(n:-1:n-sysorder+1); y(n)=w'*u; e(n)=d(n)-y(n); w=w+steps(i)*u*e(n); temp=temp+(e(n)^2); mse(i,n)=temp/n; end; end; plot(1:n,mse(1,:),'-b',1:n,mse(2,:),'-m',1:n,mse(3,:),'-r'); axis([1 N -0.2 0.5]); title('OUTPUT'); xlabel('NO of iteration'); ylabel('Mean square Error'); legend('stepsiz=0.008','step size=0.02','step size=0.05'); %HAUFFMANN lc; p=[0.4 0.2 0.2 0.1 0.1]; n=length(p); symbols=[1:n]; [dict,avglen]=huffmandict(symbols,p); temp=dict; t=dict(:,2); for i=1:length(temp) temp{i,2}=num2str(temp{i,2}); end disp('the huffman code dict:'); disp(temp) %Encoding disp('enter the symbols between 1 to %d in[]'); sym=input(':'); encod=huffmanenco(sym,dict); disp('the encoded output:'); disp(encod); %Decoding bits=input('enter the bitstream in[];'); decode=huffmandeco(bits,dict); disp('the symbols are:'); disp(decode); %Average length avglen %Entrophy Entrophy = 0; for k=1:n Entrophy = Entrophy + (p(k)*log2(1/p(k))); end disp('Entrophy') disp(Entrophy) %Efficiency Efficiency = Entrophy/avglen %Redundancy Redundancy = (1 - Efficiency) %Variance for r = 1:n l(r) = length(t{r}); %Finding Length of each Code word end Variance =0; for m = 1:n Variance = Variance + (p(m)*((l(m)-avglen)^2)); end //-------------------------- 11) ADAPTIVE EQULIZER: clc; clear all; close all; mse=[]; N=1000; sysorder=20; x=randn(N,1); b=fir1(sysorder-1,0.5); n=0.1*randn(N,1); d=filter(b,1,x)+n; steps=[0.008,0.02,0.05] for i=1:length(steps) temp=0; w=zeros(sysorder,1); for n=sysorder:N u=x(n:-1: n-sysorder+1); y(n)=w'*u; e(n)=d(n)-y(n); w=w+steps(i)*u*e(n); temp=temp+(e(n)^2); mse(i,n)=temp/n; end end plot(1:n,mse(1,:),'-b',1:n,mse(2,:),'-m',1:n,mse(3,:),'-r'); axis([1 N-0.02 0.05]); title('output'); xlable('No of interation'); ylable('mean square error'); legend ('stepsize=0.008','stepsize=0.02','stepsize=0.05'); %ASK clc; clear all; close all; m=2; snr=-5:0.5:10; for i=1:length(snr) x=randsrc(1,100000,[1,0]); y=pskmod(x,m); r=awgn(y,snr(1),'measured'); z=pskdemod(r,m); [bn,br(i)]=biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('ASK'); %PAM clc; clear all; M=2; fs=32; nsamp=8; freq_sep=8; snr=-5:0.5:10; for i=1:length(snr) x=randsrc(1,100000,[1,0]); Y=pammod(x,M); r=awgn(Y,snr(i),'measured');//////(,,Power) z=pamdemod(r,M); [bn,br(i)]=biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('PAM'); %CYCKIC CODES clc; clear all; close all; SNR = [-20:1:20] k = 4; n=7; for j=1:1:length(SNR) msg=randsrc(10000,k,[0,1]); enc_sig=encode(msg,n,k,'cyclic/binary'); msg_tx=pskmod(enc_sig,2); msg_rx=awgn(msg_tx,SNR(j)); msg_demod=pskdemod(msg_rx,2); dec_sig=decode(msg_demod,n,k,'cyclic/binary'); [errorbit,ratiobit]=biterr(msg,dec_sig); ber(j)=ratiobit; end semilogy(SNR,ber); xlabel('SNR'); ylabel('BER analysis of cyclic code'); %CONVOLUTION CODE clc; clear all; close all; N=2; k=3; SNR=[-20:20]; for j=1:1:length(SNR) msg=randsrc(10000,k,[0 1]); t=poly2trellis(3,[7 5]); con_enc=convenc(msg,t); n_code=awgn(con_enc,SNR(j),'measured',244); q_code=quantiz(n_code,[0.001,0.1,0.3,0.5,0.7,0.9,0.999]); tblen=48; delay=tblen; decod=vitdec(q_code,t,tblen,'cont','soft',3); [number,ratio]=biterr(decod(delay+1:end),msg(1:end-delay)); ber(j)=ratio; end semilogy(SNR,ber); xlabel('SNR'); ylabel('Bit Error Rate'); %HAMMING CODE clc; clear all; SNR=[-20:1:20] k=4; n=7; for j=1:1:length(SNR) msg=randsrc(10000, k, [0,1]); enc_sig=encode(msg,n,k, 'hamming/binary'); msg_tx=pskmod(enc_sig,2); msg_rx=awgn(msg_tx,SNR(j)); msg_demod=pskdemod(msg_rx,2); dec_sig=decode(msg_demod,n,k,'hamming/binary'); [errorbit,ratiobit]=biterr(msg,dec_sig); ber(j)=ratiobit; end semilogy(SNR,ber); xlabel('SNR'); ylabel('BER analysis of Hamming Code'); %ADAPTIVE EQUALISER clear all; close all; mse=[]; N=1000; sysorder=20; x=randn(N,1); b=fir1(sysorder-1,0.5); n=0.1*randn(N,1); d=filter(b,1,x)+n; steps=[0.008,0.02,0.05]; for i=1:length(steps) temp=0; w=zeros(sysorder,1); for n=sysorder:N u=x(n:-1:n-sysorder+1); y(n)=w'*u; e(n)=d(n)-y(n); w=w+steps(i)*u*e(n); temp=temp+(e(n)^2); mse(i,n)=temp/n; end; end; plot(1:n,mse(1,:),'-b',1:n,mse(2,:),'-m',1:n,mse(3,:),'-r'); axis([1 N -0.2 0.5]); title('OUTPUT'); xlabel('NO of iteration'); ylabel('Mean square Error'); legend('stepsiz=0.008','step size=0.02','step size=0.05'); %HAUFFMANN lc; p=[0.4 0.2 0.2 0.1 0.1]; n=length(p); symbols=[1:n]; [dict,avglen]=huffmandict(symbols,p); temp=dict; t=dict(:,2); for i=1:length(temp) temp{i,2}=num2str(temp{i,2}); end disp('the huffman code dict:'); disp(temp) %Encoding disp('enter the symbols between 1 to %d in[]'); sym=input(':'); encod=huffmanenco(sym,dict); disp('the encoded output:'); disp(encod); %Decoding bits=input('enter the bitstream in[];'); decode=huffmandeco(bits,dict); disp('the symbols are:'); disp(decode); %Average length avglen %Entrophy Entrophy = 0; for k=1:n Entrophy = Entrophy + (p(k)*log2(1/p(k))); end disp('Entrophy') disp(Entrophy) %Efficiency Efficiency = Entrophy/avglen %Redundancy Redundancy = (1 - Efficiency) %Variance for r = 1:n l(r) = length(t{r}); %Finding Length of each Code word end Variance =0; for m = 1:n Variance = Variance + (p(m)*((l(m)-avglen)^2)); end others: %Huffmann coding clc; clear all; close all; n=input('Enter the number of symbols: '); s=(1:n); p=input('Enter the probabilities: '); [dict,avelen]=huffmandict(s,p); enc = huffmanenco(s,dict); dec = huffmandeco(enc,dict); ent = 0; var = 0; for i=1:n L(i)=length(dict{i,2}); ent = ent+(p(i)*log(1/p(i))/log(2)); var = var+(p(i)*(L(i)-avelen)^2); end disp("Entrophy: "+ent); disp("Avg length: "+avelen); disp("Efficiency: "+(ent/avelen*100)+ "%"); disp("Redundancy: "+((1-(ent/avelen))*100)+"%"); disp("Code variance: "+var); %Cyclic Code Program clc clear all; SNR = [-20:1:20]; k = 4; n = 7; for j = 1:1:length(SNR) msg = randsrc(10000, k, [0,1]); enc_sig = encode(msg, n, k, 'cyclic/binary'); msg_tx = pskmod(enc_sig, 2); msg_rx = awgn(msg_tx, SNR(j)); msg_demod = pskdemod(msg_rx, 2); dec_sig = decode(msg_demod, n, k, 'cyclic/binary'); [error, ratiobit] = biterr(msg, dec_sig); ber(j) = ratiobit; end semilogy(SNR, ber); xlabel('SNR'); ylabel('BER analysis of Cyclic Code'); %Hamming Code Program clc clear all; SNR = [-20:1:20]; k = 4; n = 7; for j = 1:1:length(SNR) msg = randsrc(10000, k, [0,1]); enc_sig = encode(msg, n, k, 'hamming/binary'); msg_tx = pskmod(enc_sig, 2); msg_rx = awgn(msg_tx, SNR(j)); msg_demod = pskdemod(msg_rx, 2); dec_sig = decode(msg_demod, n, k, 'hamming/binary'); [error, ratiobit] = biterr(msg, dec_sig); ber(j) = ratiobit; end semilogy(SNR, ber); xlabel('SNR'); ylabel('BER analysis of Hamming Code'); %Convolutional Code Program clc; clear all; n = 2; k = 1; SNR = [-20:1:20]; for j = 1:1:length(SNR) msg = randsrc(10000, k, [0,1]); t = poly2trellis(3, [7 5]); con_enc = convenc(msg, t); n_code = awgn(con_enc, SNR(j), 'measured', 244); q_code = quantiz(n_code, [0.001, 0.1, 0.3, 0.5, 0.9, 0.999]); tblen = 48; delay = tblen; decod = vitdec(q_code, t, tblen, 'cont', 'soft', 3); [number,ratio] = biterr(decod(delay+1 : end), msg(1 : end-delay)); ber(j) = ratio; end semilogy(SNR, ber); legend('r=1/2'); xlabel('SNR'); ylabel('BER analysis of Convolutional Code'); title('Convolutional Code') %ASK clc; close all; clear all; M=2; snr=-5:0.5;10; for i=1:length(snr) x = randsrc(1,100000,[1,0]); y = pammod(x,M); r = awgn(y,snr(i),'measured'); z = pamdemod(r,M); [bn,br(i)]=biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('PAM'); %PSK clc; close all; clear all; M=2; snr = [-5:0.5:10]; for i=1:length(snr) x = randsrc(1, 10000, [1,0]); y= pskmod (x, M); r = awgn(y, snr(i), 'measured'); z = pskdemod(r, M); [bn, br(i)] = biterr(x, z); end semilogy(snr, br, '-r'); xlabel('snr'); ylabel('bitter'); title('PSK'); %FSK clc; close all; clear all; M = 2; fs = 32; nsamp = 8; freq_sep = 8; snr = -5:0.5;10; for i = 1:length(snr) x = randsrc(1, 100000, [1, 0]); Y = fskmod(x,M,freq_sep,nsamp,fs); r = awgn(Y,snr(i),'measured'); z = fskdemod(r,M,freq_sep,nsamp,fs); [bn,br(i)]= biterr(x,z); end semilogy(snr,br,'-r'); xlabel('snr'); ylabel('biterr'); title('FSK'); %Adaptive Equalizer clear all; close all; mse = []; N = 1000; sysorder = 20; x = randn(N, 1); b = fir1(sysorder-1, 0.5); n = 0.1 * randn(N, 1); d = filter(b, 1, x) + n; steps = [0.008, 0.02, 0.05]; for i = 1:length(steps) temp = 0; w = zeros(sysorder, 1); for n = sysorder:N u = x(n : -1 : n-sysorder+1); y(n) = w'*u; e(n) = d(n) - y(n); w = w + steps(i) * u * e(n); temp = temp + (e(n)^2); mse(i, n) = temp / n; end end plot(1:n,mse(1,:),'-b',1:n,mse(2,:),'-m',1:n,mse(3,:),'-r'); axis([1 N-0.02 0.05]); title('output'); xlabel('No of interation'); ylabel('mean square error'); legend('stepsize=0.008','stepsize=0.02','stepsize=0.05');