📄 algorithmlda.java,v
字号:
d62 216a277 8 // set the step index
step_index_d = 0;
// append message to process box
pro_box_d.appendMessage((String)description_d.get(step_index_d));
// exit initialize
return true;
d279 8a286 2
boolean step1()
d288 1a288 22 // Debug
//System.out.println(algo_id + " step1()");
pro_box_d.setProgressMin(0);
pro_box_d.setProgressMax(20);
pro_box_d.setProgressCurr(0);
// append message to process box
output_panel_d.addOutput(set1_d, Classify.PTYPE_INPUT,
data_points_d.color_dset1);
output_panel_d.addOutput(set2_d, Classify.PTYPE_INPUT,
data_points_d.color_dset2);
output_panel_d.addOutput(set3_d, Classify.PTYPE_INPUT,
data_points_d.color_dset3);
output_panel_d.addOutput(set4_d, Classify.PTYPE_INPUT,
data_points_d.color_dset4);
// step 1 completed
pro_box_d.setProgressCurr(20);
output_panel_d.repaint();
return true;
d290 14a303 2
boolean step2()
d305 3a307 43 // Debug
//System.out.println(algo_id + " step2()");
pro_box_d.setProgressMin(0);
pro_box_d.setProgressMax(20);
pro_box_d.setProgressCurr(0);
computeMeans();
// determine the within class scatter matrix
//
withinClass(W);
// determine the between class scatter matrix
//
betweenClass(B);
// determine the ratio of the between class scatter matrix
// to the within class scatter matrix
//
W.invertMatrix(invW);
invW.multMatrix(B, S);
// transform the samples from all data sets
//
transformLDA(data_points_d, S);
displayMatrices();
// display means
//----
output_panel_d.addOutput(point_means_d, Classify.PTYPE_OUTPUT_LARGE, Color.black);
// display support vectors
//----
output_panel_d.addOutput(support_vectors_d, Classify.PTYPE_INPUT, Color.black );
// display support vectors
//
pro_box_d.setProgressCurr(20);
output_panel_d.repaint();
return true;
d309 8a316 2
boolean step3()
d318 1a318 26 // Debug
//System.out.println(algo_id + " step3()");
pro_box_d.setProgressMin(0);
pro_box_d.setProgressMax(20);
pro_box_d.setProgressCurr(0);
// compute the decision regisions
//----
computeDecisionRegions();
// compute errors
//
computeErrors();
// display support vectors
//
// display support vectors
//----
output_panel_d.addOutput( decision_regions_d, Classify.PTYPE_INPUT, new Color(255, 200, 0));
//Color.black);
output_panel_d.repaint();
return true;
d320 2a321 2
// method: withinClass
d323 3a325 3 // arguments:
// Data d: input data points
// Matrix M: within class scatter matrix
d327 5a331 1 // return : none
d333 8a340 1 // this method determines the within class scatter matrix
d342 5a346 1 public void withinClass(Matrix M)
d348 1a348 159
// declare local variables
//
int size = 0;
double x[] = null;
double y[] = null;
DisplayScale scale = output_panel_d.disp_area_d.getDisplayScale();
// declare the covariance object
//
Covariance cov = new Covariance();
// declare local matrices
//
Matrix M1 = new Matrix();
Matrix M2 = new Matrix();
Matrix M3 = new Matrix();
Matrix M4 = new Matrix();
// compute the propabilities of each data set
//
double maxsamples = set1_d.size() + set2_d.size() + set3_d.size() + set4_d.size();
double p1 = set1_d.size() / maxsamples;
double p2 = set2_d.size() / maxsamples;
double p3 = set3_d.size() / maxsamples;
double p4 = set4_d.size() / maxsamples;
// get the first data set size
//
size = set1_d.size();
// initialize arrays to store the samples
//
x = new double[size];
y = new double[size];
// set up the initial random vectors i.e., the vectors of
// X and Y coordinate points form the display
//
for (int i = 0; i < size; i++)
{
MyPoint p = (MyPoint)set1_d.elementAt(i);
x[i] = p.x;
y[i] = p.y;
}
// compute the covariance matrix of the first data set
//
M1.row = M1.col = 2;
M1.Elem = new double[2][2];
M1.resetMatrix();
if (size > 0)
{
M1.Elem = cov.computeCovariance(x, y);
}
// get the second data set size
//
size = set2_d.size();
// initialize arrays to store the samples
//
x = new double[size];
y = new double[size];
// set up the initial random vectors i.e., the vectors of
// X and Y coordinate points form the display
//
for (int i = 0; i < size; i++)
{
MyPoint p = (MyPoint)set2_d.elementAt(i);
x[i] = p.x;
y[i] = p.y;
}
// compute the covariance matrix of the second data set
//
M2.row = M2.col = 2;
M2.Elem = new double[2][2];
M2.resetMatrix();
if (size > 0)
{
M2.Elem = cov.computeCovariance(x, y);
}
// get the third data set size
//
size = set3_d.size();
// initialize arrays to store the samples
//
x = new double[size];
y = new double[size];
// set up the initial random vectors i.e., the vectors of
// X and Y coordinate points form the display
//
for (int i = 0; i < size; i++)
{
MyPoint p = (MyPoint)set3_d.elementAt(i);
x[i] = p.x;
y[i] = p.y;
}
// compute the covariance matrix of the third data set
//
M3.row = M3.col = 2;
M3.Elem = new double[2][2];
M3.resetMatrix();
if (size > 0)
{
M3.Elem = cov.computeCovariance(x, y);
}
// get the fourth data set size
//
size = set4_d.size();
// initialize arrays to store the samples
//
x = new double[size];
y = new double[size];
// set up the initial random vectors i.e., the vectors of
// X and Y coordinate points form the display
//
for (int i = 0; i < size; i++)
{
MyPoint p = (MyPoint)set4_d.elementAt(i);
x[i] = p.x;
y[i] = p.y;
}
// compute the covariance matrix of the fourth data set
//
M4.row = M4.col = 2;
M4.Elem = new double[2][2];
M4.resetMatrix();
if (size > 0)
{
M4.Elem = cov.computeCovariance(x, y);
}
// compute the within class scatter matrix
//
M.row = M.col = 2;
M.Elem = new double[2][2];
M.resetMatrix();
M.addMatrix(M1);
M.addMatrix(M2);
M.addMatrix(M3);
M.addMatrix(M4);
CLDA = M;
d350 12d363 12a374 11 // method: betweenClass
//
// arguments:
// Data d: input data points
// Matrix M: between class scatter matrix
//
// return : none
//
// this method determines the between class scatter matrix for
// the class independent linear discrimination algorithm
//
d608 10a617 9 // method dispalyMatrices
//
// arguments:
// Data d: input data point
//
// return : none
//
// display two matrices
//
d675 13a687 13
// method transformLDA
//
// arguments:
// Data d: input data point
// Matrix S: between class to within class ratio
//
// return : none
//
// this method transforms a given set of points to a new space
// using the class independent linear discrimination analysis algorithm
//
d749 10a758 8
// method: computeDecisionRegions
//
// arguments: none
// return : none
//
// method computes the line of discrimination for for class independent LDA
//
d960 6d967 4a970 11
MyPoint point = (MyPoint)set1_d.elementAt(i);
samples1++;
if ((point.x > scale.xmin && point.x < scale.xmax)
&& (point.y > scale.ymin && point.y < scale.ymax))
{
if (output_canvas_d[(int)((point.x-scale.xmin)/incrementX)][(int)((point.y-scale.ymin)/incrementY)] != 0)
{
incorrect1++;
}
}
d972 1d975 19a993 19 {
error = ((double)incorrect1 / (double)samples1) * 100.0;
text =
new String(
" Results for class 0:\n"
+ " Total number of samples: "
+ samples1
+ "\n"
+ " Misclassified samples: "
+ incorrect1
+ "\n"
+ " Classification error: "
+ MathUtil.setDecimal(error, 2)
+ "%");
pro_box_d.appendMessage(text);
}
d998 6d1005 4a1008 11
MyPoint point = (MyPoint)set2_d.elementAt(i);
samples2++;
if ((point.x > scale.xmin && point.x < scale.xmax)
&& (point.y > scale.ymin && point.y < scale.ymax))
{
if (output_canvas_d[(int)((point.x-scale.xmin)/incrementX)][(int)((point.y-scale.ymin)/incrementY)] != 1)
{
incorrect2++;
}
}
d1010 1d1013 19a1031 19 {
error = ((double)incorrect2 / (double)samples2) * 100.0;
text =
new String(
" Results for class 1:\n"
+ " Total number of samples: "
+ samples2
+ "\n"
+ " Misclassified samples: "
+ incorrect2
+ "\n"
+ " Classification error: "
+ MathUtil.setDecimal(error, 2)
+ "%");
pro_box_d.appendMessage(text);
}
d1036 6d1043 4a1046 11
MyPoint point = (MyPoint)set3_d.elementAt(i);
samples3++;
if ((point.x > scale.xmin && point.x < scale.xmax)
&& (point.y > scale.ymin && point.y < scale.ymax))
{
if (output_canvas_d[(int)((point.x-scale.xmin)/incrementX)][(int)((point.y-scale.ymin)/incrementY)] != 2)
{
incorrect3++;
}
}
d1048 1d1051 19a1069 19 {
error = ((double)incorrect3 / (double)samples3) * 100.0;
text =
new String(
" Results for class 2:\n"
+ " Total number of samples: "
+ samples3
+ "\n"
+ " Misclassified samples: "
+ incorrect3
+ "\n"
+ " Classification error: "
+ MathUtil.setDecimal(error, 2)
+ "%");
pro_box_d.appendMessage(text);
}
d1074 6d1081 4a1084 11
MyPoint point = (MyPoint)set4_d.elementAt(i);
samples4++;
if ((point.x > scale.xmin && point.x < scale.xmax)
&& (point.y > scale.ymin && point.y < scale.ymax))
{
if (output_canvas_d[(int)((point.x-scale.xmin)/incrementX)][(int)((point.y-scale.ymin)/incrementY)] != 3)
{
incorrect4++;
}
}
d1086 1d1089 1a1089 1 {
d1091 17a1107 17 error = ((double)incorrect4 / (double)samples4) * 100.0;
text =
new String(
" Results for class 3:\n"
+ " Total number of samples: "
+ samples4
+ "\n"
+ " Misclassified samples: "
+ incorrect4
+ "\n"
+ " Classification error: "
+ MathUtil.setDecimal(error, 2)
+ "%");
pro_box_d.appendMessage(text);
}
d1131 1a1131 2
@1.1log@Initial revision@text@d33 1a33 1 System.out.println("THE FUNCTION IS CALLED");
d35 1a35 1 System.out.println(algo_id + " initialize()");
d88 1a88 1 System.out.println(algo_id + " step1()");
d114 1a114 1 System.out.println(algo_id + " step2()");
d161 1a161 1 System.out.println(algo_id + " step3()");
d579 1a579 1 System.out.println(algo_id + " run()");
d599 1a599 1 pro_box_d.appendMessage(" Step Sequence Complete");
d687 1a687 1 System.out.println(algo_id + " transformLDA(Data d, Matrix S)");
d756 1a756 1 System.out.println(algo_id + " computeDecisionRegions()");
@
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -