(Created page with "<source lang="lisp"> x=1:n for j=1:100 j=j+1 end </source>") |
(code posted) |
||
Line 1: | Line 1: | ||
− | |||
− | + | ||
− | + | ==Function to compute Bayes error (based on Monte-Carlo method)== | |
− | + | <source lang="matlab"> | |
+ | |||
+ | function [ e ] = BayesErrorMonteCarlo( n, P1, M1, Sigma1, P2, M2, Sigma2, N ) | ||
+ | %Compute Bayes Error using Monte-Carlo method | ||
+ | %J. Zhou | ||
+ | |||
+ | % ....Input Parameters | ||
+ | % n --- data dimension | ||
+ | % P1 --- prior probability of class 1 | ||
+ | % M1 --- mean of class 1 data | ||
+ | % Sigma1 --- covariance of class 1 data | ||
+ | % p2 --- prior probability of class 2 | ||
+ | % M2 --- mean of class 2 data | ||
+ | % Sigma2 --- covariance of class 2 data | ||
+ | % N --- number of samples using in Monte-Carlo | ||
+ | |||
+ | %....Output Parameters | ||
+ | % e --- Bayes Error | ||
+ | |||
+ | x1 = randn(n,N); | ||
+ | y1 = discFunc(x1, P1, M1, Sigma1, P2, M2, Sigma2, 1); | ||
+ | e1 = y1/N; | ||
+ | |||
+ | |||
+ | temp = randn(n,N); | ||
+ | x2 = sqrt(Sigma2)*temp + repmat(M2, 1, N); | ||
+ | y2 = discFunc(x2, P1, M1, Sigma1, P2, M2, Sigma2, 2); | ||
+ | e2 = y2/N; | ||
+ | |||
+ | % Bayes Error e | ||
+ | e = P1*e1+P2*e2; | ||
+ | |||
+ | |||
end | end | ||
+ | </source> | ||
+ | |||
+ | ==Function #1 used in previous code== | ||
+ | <source lang="matlab"> | ||
+ | function [ z ] = discFunc(x,P1,M1,Sigma1,P2,M2,Sigma2, CFlag) | ||
+ | %compute 3.119 and return number of samples that are misclassified | ||
+ | %J. Zhou | ||
+ | |||
+ | [N,M] = size(x); %M samples | ||
+ | |||
+ | invSigma1 = inv(Sigma1); | ||
+ | invSigma2 = inv(Sigma2); | ||
+ | temp = 0.5*log(det(Sigma1)/det(Sigma2)) - log(P1/P2); | ||
+ | |||
+ | y = zeros(1,M); | ||
+ | |||
+ | for i = 1:M | ||
+ | |||
+ | if 0.5*(x(:,i)-M1)'*invSigma1*(x(:,i)-M1) - ... | ||
+ | 0.5*(x(:,i)-M2)'*invSigma2*(x(:,i)-M2) + temp > 0 | ||
+ | if CFlag == 1 | ||
+ | y(i) = 1; | ||
+ | else | ||
+ | y(i) = 0; | ||
+ | end | ||
+ | else | ||
+ | if CFlag == 1 | ||
+ | y(i) = 0; | ||
+ | else | ||
+ | y(i) = 1; | ||
+ | end | ||
+ | end | ||
+ | |||
+ | end | ||
+ | |||
+ | z = sum(y); | ||
+ | |||
+ | end | ||
+ | </source> | ||
+ | ---- | ||
+ | ==Test Function == | ||
+ | <source lang="matlab"> | ||
+ | %% ECE662 | ||
+ | % This code tests BayesErrorMonteCarlo.m using a data set with known | ||
+ | % Bayes Error | ||
+ | % J. Zhou | ||
+ | |||
+ | close all; | ||
+ | clear all; | ||
+ | |||
+ | % test data using Data I-^ ; Fukunaga page 45 | ||
+ | n = 8; %data dimension | ||
+ | |||
+ | P1 = 0.5; % prior probability for class 1 | ||
+ | M1 = zeros(n, 1); %mean of class 1 data | ||
+ | Sigma1 = eye(n,n); %covariance of class 1 data | ||
+ | |||
+ | P2 = 0.5; % prior probability for class 2 | ||
+ | M2 = [3.86, 3.10, 0.84, 0.84, 1.64, 1.08, 0.26, 0.01]'; | ||
+ | Sigma2 = diag([8.41, 12.06, 0.12, 0.22, 1.49, 1.77, 0.35, 2.73]); | ||
+ | |||
+ | |||
+ | %Number of samples for Monte-Carlo, start with 1 million, run a few times | ||
+ | %to see how much computed Bayes error vary, if it varies quite bit, then | ||
+ | %increase the number of samples, do this until Bayes error become | ||
+ | %relatively stable. | ||
+ | N = 1000000; | ||
+ | |||
+ | e = BayesErrorMonteCarlo(n, P1, M1, Sigma1, P2, M2, Sigma2, N ); | ||
+ | |||
+ | % Bayes Error in this case should 1.46% | ||
+ | sprintf('Bayes Error in this case should be %.3f', 0.018) | ||
+ | sprintf('Bayes Error computed is %.3f', e) | ||
</source> | </source> |
Revision as of 18:46, 4 February 2016
Function to compute Bayes error (based on Monte-Carlo method)
function [ e ] = BayesErrorMonteCarlo( n, P1, M1, Sigma1, P2, M2, Sigma2, N ) %Compute Bayes Error using Monte-Carlo method %J. Zhou % ....Input Parameters % n --- data dimension % P1 --- prior probability of class 1 % M1 --- mean of class 1 data % Sigma1 --- covariance of class 1 data % p2 --- prior probability of class 2 % M2 --- mean of class 2 data % Sigma2 --- covariance of class 2 data % N --- number of samples using in Monte-Carlo %....Output Parameters % e --- Bayes Error x1 = randn(n,N); y1 = discFunc(x1, P1, M1, Sigma1, P2, M2, Sigma2, 1); e1 = y1/N; temp = randn(n,N); x2 = sqrt(Sigma2)*temp + repmat(M2, 1, N); y2 = discFunc(x2, P1, M1, Sigma1, P2, M2, Sigma2, 2); e2 = y2/N; % Bayes Error e e = P1*e1+P2*e2; end
Function #1 used in previous code
function [ z ] = discFunc(x,P1,M1,Sigma1,P2,M2,Sigma2, CFlag) %compute 3.119 and return number of samples that are misclassified %J. Zhou [N,M] = size(x); %M samples invSigma1 = inv(Sigma1); invSigma2 = inv(Sigma2); temp = 0.5*log(det(Sigma1)/det(Sigma2)) - log(P1/P2); y = zeros(1,M); for i = 1:M if 0.5*(x(:,i)-M1)'*invSigma1*(x(:,i)-M1) - ... 0.5*(x(:,i)-M2)'*invSigma2*(x(:,i)-M2) + temp > 0 if CFlag == 1 y(i) = 1; else y(i) = 0; end else if CFlag == 1 y(i) = 0; else y(i) = 1; end end end z = sum(y); end
Test Function
%% ECE662 % This code tests BayesErrorMonteCarlo.m using a data set with known % Bayes Error % J. Zhou close all; clear all; % test data using Data I-^ ; Fukunaga page 45 n = 8; %data dimension P1 = 0.5; % prior probability for class 1 M1 = zeros(n, 1); %mean of class 1 data Sigma1 = eye(n,n); %covariance of class 1 data P2 = 0.5; % prior probability for class 2 M2 = [3.86, 3.10, 0.84, 0.84, 1.64, 1.08, 0.26, 0.01]'; Sigma2 = diag([8.41, 12.06, 0.12, 0.22, 1.49, 1.77, 0.35, 2.73]); %Number of samples for Monte-Carlo, start with 1 million, run a few times %to see how much computed Bayes error vary, if it varies quite bit, then %increase the number of samples, do this until Bayes error become %relatively stable. N = 1000000; e = BayesErrorMonteCarlo(n, P1, M1, Sigma1, P2, M2, Sigma2, N ); % Bayes Error in this case should 1.46% sprintf('Bayes Error in this case should be %.3f', 0.018) sprintf('Bayes Error computed is %.3f', e)