This example uses linear discrimination with equal prior probabilities on Fisher's (1936) iris data. This example illustrates the use of the DiscriminantAnalysis class.
using System; using Imsl.Stat; using PrintMatrix = Imsl.Math.PrintMatrix; using PrintMatrixFormat = Imsl.Math.PrintMatrixFormat; public class DiscriminantAnalysisEx1 { public static void Main(String[] args) { double[,] x = { {1.0, 5.1, 3.5, 1.4, .2},{1.0, 4.9, 3.0, 1.4, .2}, {1.0, 4.7, 3.2, 1.3, .2},{1.0, 4.6, 3.1, 1.5, .2}, {1.0, 5.0, 3.6, 1.4, .2},{1.0, 5.4, 3.9, 1.7, .4}, {1.0, 4.6, 3.4, 1.4, .3},{1.0, 5.0, 3.4, 1.5, .2}, {1.0, 4.4, 2.9, 1.4, .2},{1.0, 4.9, 3.1, 1.5, .1}, {1.0, 5.4, 3.7, 1.5, .2},{1.0, 4.8, 3.4, 1.6, .2}, {1.0, 4.8, 3.0, 1.4, .1},{1.0, 4.3, 3.0, 1.1, .1}, {1.0, 5.8, 4.0, 1.2, .2},{1.0, 5.7, 4.4, 1.5, .4}, {1.0, 5.4, 3.9, 1.3, .4},{1.0, 5.1, 3.5, 1.4, .3}, {1.0, 5.7, 3.8, 1.7, .3},{1.0, 5.1, 3.8, 1.5, .3}, {1.0, 5.4, 3.4, 1.7, .2},{1.0, 5.1, 3.7, 1.5, .4}, {1.0, 4.6, 3.6, 1.0, .2},{1.0, 5.1, 3.3, 1.7, .5}, {1.0, 4.8, 3.4, 1.9, .2},{1.0, 5.0, 3.0, 1.6, .2}, {1.0, 5.0, 3.4, 1.6, .4},{1.0, 5.2, 3.5, 1.5, .2}, {1.0, 5.2, 3.4, 1.4, .2},{1.0, 4.7, 3.2, 1.6, .2}, {1.0, 4.8, 3.1, 1.6, .2},{1.0, 5.4, 3.4, 1.5, .4}, {1.0, 5.2, 4.1, 1.5, .1},{1.0, 5.5, 4.2, 1.4, .2}, {1.0, 4.9, 3.1, 1.5, .2},{1.0, 5.0, 3.2, 1.2, .2}, {1.0, 5.5, 3.5, 1.3, .2},{1.0, 4.9, 3.6, 1.4, .1}, {1.0, 4.4, 3.0, 1.3, .2},{1.0, 5.1, 3.4, 1.5, .2}, {1.0, 5.0, 3.5, 1.3, .3},{1.0, 4.5, 2.3, 1.3, .3}, {1.0, 4.4, 3.2, 1.3, .2},{1.0, 5.0, 3.5, 1.6, .6}, {1.0, 5.1, 3.8, 1.9, .4},{1.0, 4.8, 3.0, 1.4, .3}, {1.0, 5.1, 3.8, 1.6, .2},{1.0, 4.6, 3.2, 1.4, .2}, {1.0, 5.3, 3.7, 1.5, .2},{1.0, 5.0, 3.3, 1.4, .2}, {2.0, 7.0, 3.2, 4.7, 1.4},{2.0, 6.4, 3.2, 4.5, 1.5}, {2.0, 6.9, 3.1, 4.9, 1.5},{2.0, 5.5, 2.3, 4.0, 1.3}, {2.0, 6.5, 2.8, 4.6, 1.5},{2.0, 5.7, 2.8, 4.5, 1.3}, {2.0, 6.3, 3.3, 4.7, 1.6},{2.0, 4.9, 2.4, 3.3, 1.0}, {2.0, 6.6, 2.9, 4.6, 1.3},{2.0, 5.2, 2.7, 3.9, 1.4}, {2.0, 5.0, 2.0, 3.5, 1.0},{2.0, 5.9, 3.0, 4.2, 1.5}, {2.0, 6.0, 2.2, 4.0, 1.0},{2.0, 6.1, 2.9, 4.7, 1.4}, {2.0, 5.6, 2.9, 3.6, 1.3},{2.0, 6.7, 3.1, 4.4, 1.4}, {2.0, 5.6, 3.0, 4.5, 1.5},{2.0, 5.8, 2.7, 4.1, 1.0}, {2.0, 6.2, 2.2, 4.5, 1.5},{2.0, 5.6, 2.5, 3.9, 1.1}, {2.0, 5.9, 3.2, 4.8, 1.8},{2.0, 6.1, 2.8, 4.0, 1.3}, {2.0, 6.3, 2.5, 4.9, 1.5},{2.0, 6.1, 2.8, 4.7, 1.2}, {2.0, 6.4, 2.9, 4.3, 1.3},{2.0, 6.6, 3.0, 4.4, 1.4}, {2.0, 6.8, 2.8, 4.8, 1.4},{2.0, 6.7, 3.0, 5.0, 1.7}, {2.0, 6.0, 2.9, 4.5, 1.5},{2.0, 5.7, 2.6, 3.5, 1.0}, {2.0, 5.5, 2.4, 3.8, 1.1},{2.0, 5.5, 2.4, 3.7, 1.0}, {2.0, 5.8, 2.7, 3.9, 1.2},{2.0, 6.0, 2.7, 5.1, 1.6}, {2.0, 5.4, 3.0, 4.5, 1.5},{2.0, 6.0, 3.4, 4.5, 1.6}, {2.0, 6.7, 3.1, 4.7, 1.5},{2.0, 6.3, 2.3, 4.4, 1.3}, {2.0, 5.6, 3.0, 4.1, 1.3},{2.0, 5.5, 2.5, 4.0, 1.3}, {2.0, 5.5, 2.6, 4.4, 1.2},{2.0, 6.1, 3.0, 4.6, 1.4}, {2.0, 5.8, 2.6, 4.0, 1.2},{2.0, 5.0, 2.3, 3.3, 1.0}, {2.0, 5.6, 2.7, 4.2, 1.3},{2.0, 5.7, 3.0, 4.2, 1.2}, {2.0, 5.7, 2.9, 4.2, 1.3},{2.0, 6.2, 2.9, 4.3, 1.3}, {2.0, 5.1, 2.5, 3.0, 1.1},{2.0, 5.7, 2.8, 4.1, 1.3}, {3.0, 6.3, 3.3, 6.0, 2.5},{3.0, 5.8, 2.7, 5.1, 1.9}, {3.0, 7.1, 3.0, 5.9, 2.1},{3.0, 6.3, 2.9, 5.6, 1.8}, {3.0, 6.5, 3.0, 5.8, 2.2},{3.0, 7.6, 3.0, 6.6, 2.1}, {3.0, 4.9, 2.5, 4.5, 1.7},{3.0, 7.3, 2.9, 6.3, 1.8}, {3.0, 6.7, 2.5, 5.8, 1.8},{3.0, 7.2, 3.6, 6.1, 2.5}, {3.0, 6.5, 3.2, 5.1, 2.0},{3.0, 6.4, 2.7, 5.3, 1.9}, {3.0, 6.8, 3.0, 5.5, 2.1},{3.0, 5.7, 2.5, 5.0, 2.0}, {3.0, 5.8, 2.8, 5.1, 2.4},{3.0, 6.4, 3.2, 5.3, 2.3}, {3.0, 6.5, 3.0, 5.5, 1.8},{3.0, 7.7, 3.8, 6.7, 2.2}, {3.0, 7.7, 2.6, 6.9, 2.3},{3.0, 6.0, 2.2, 5.0, 1.5}, {3.0, 6.9, 3.2, 5.7, 2.3},{3.0, 5.6, 2.8, 4.9, 2.0}, {3.0, 7.7, 2.8, 6.7, 2.0},{3.0, 6.3, 2.7, 4.9, 1.8}, {3.0, 6.7, 3.3, 5.7, 2.1},{3.0, 7.2, 3.2, 6.0, 1.8}, {3.0, 6.2, 2.8, 4.8, 1.8},{3.0, 6.1, 3.0, 4.9, 1.8}, {3.0, 6.4, 2.8, 5.6, 2.1},{3.0, 7.2, 3.0, 5.8, 1.6}, {3.0, 7.4, 2.8, 6.1, 1.9},{3.0, 7.9, 3.8, 6.4, 2.0}, {3.0, 6.4, 2.8, 5.6, 2.2},{3.0, 6.3, 2.8, 5.1, 1.5}, {3.0, 6.1, 2.6, 5.6, 1.4},{3.0, 7.7, 3.0, 6.1, 2.3}, {3.0, 6.3, 3.4, 5.6, 2.4},{3.0, 6.4, 3.1, 5.5, 1.8}, {3.0, 6.0, 3.0, 4.8, 1.8},{3.0, 6.9, 3.1, 5.4, 2.1}, {3.0, 6.7, 3.1, 5.6, 2.4},{3.0, 6.9, 3.1, 5.1, 2.3}, {3.0, 5.8, 2.7, 5.1, 1.9},{3.0, 6.8, 3.2, 5.9, 2.3}, {3.0, 6.7, 3.3, 5.7, 2.5},{3.0, 6.7, 3.0, 5.2, 2.3}, {3.0, 6.3, 2.5, 5.0, 1.9},{3.0, 6.5, 3.0, 5.2, 2.0}, {3.0, 6.2, 3.4, 5.4, 2.3},{3.0, 5.9, 3.0, 5.1, 1.8} }; int[] group = new int[x.GetLength(0)]; int[] varIndex = { 1, 2, 3, 4 }; for (int i = 0; i < x.GetLength(0); i++) { group[i] = (int)x[i,0]; } int nvar = x.GetLength(1) - 1; DiscriminantAnalysis da = new DiscriminantAnalysis(nvar, 3); da.CovarianceComputation = DiscriminantAnalysis.CovarianceMatrix.Pooled; da.ClassificationMethod = DiscriminantAnalysis.Classification.Reclassification; da.Update(x, group, varIndex); da.Classify(x, group, varIndex); PrintMatrixFormat pmf = new PrintMatrixFormat(); pmf.NumberFormat = "0.00"; new PrintMatrix("Xmean are: ").Print(pmf, da.GetMeans()); new PrintMatrix("Coef: ").Print(pmf, da.GetCoefficients()); new PrintMatrix("Counts: ").Print(da.GetGroupCounts()); new PrintMatrix("Stats: ").Print(pmf, da.GetStatistics()); int[] cm = da.GetClassMembership(); int[,] cMem = new int[1, cm.Length]; for (int i = 0; i < cm.Length; i++) { cMem[0, i] = cm[i]; } new PrintMatrix("ClassMembership").SetPageWidth(50).Print(cMem); new PrintMatrix("ClassTable: ").Print(da.GetClassTable()); double[,,] cov = da.GetCovariance(); double[,] tmpCov = new double[cov.GetLength(1), cov.GetLength(2)]; for (int i = 0; i < cov.GetLength(0); i++) { for (int j = 0; j < cov.GetLength(1); j++) for (int k = 0; k < cov.GetLength(2); k++) tmpCov[j, k] = cov[i, j, k]; new PrintMatrix ("Covariance Matrix " + i + " : ").Print(pmf, tmpCov); } new PrintMatrix("Prior : ").Print(da.GetPrior()); new PrintMatrix("PROB: ").Print(pmf, da.GetProbability()); new PrintMatrix("MAHALANOBIS: ").Print(pmf, da.GetMahalanobis()); Console.Out.WriteLine("nrmiss = " + da.NumberOfRowsMissing); } }
Xmean are: 0 1 2 3 0 5.01 3.43 1.46 0.25 1 5.94 2.77 4.26 1.33 2 6.59 2.97 5.55 2.03 Coef: 0 1 2 3 4 0 -86.31 23.54 23.59 -16.43 -17.40 1 -72.85 15.70 7.07 5.21 6.43 2 -104.37 12.45 3.69 12.77 21.08 Counts: 0 0 50 1 50 2 50 Stats: 0 0 147.00 1 NaN 2 NaN 3 NaN 4 NaN 5 NaN 6 NaN 7 -9.96 8 50.00 9 50.00 10 50.00 11 150.00 ClassMembership 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 0 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 0 2 2 2 2 2 2 2 2 2 2 3 2 2 2 2 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 0 2 2 2 2 2 2 2 2 3 2 2 2 2 2 2 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 0 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 0 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 0 3 3 3 3 3 3 3 3 3 3 3 3 3 2 3 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 0 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 ClassTable: 0 1 2 0 50 0 0 1 0 48 2 2 0 1 49 Covariance Matrix 0 : 0 1 2 3 0 0.27 0.09 0.17 0.04 1 0.09 0.12 0.06 0.03 2 0.17 0.06 0.19 0.04 3 0.04 0.03 0.04 0.04 Prior : 0 0 0.333333333333333 1 0.333333333333333 2 0.333333333333333 PROB: 0 1 2 0 1.00 0.00 0.00 1 1.00 0.00 0.00 2 1.00 0.00 0.00 3 1.00 0.00 0.00 4 1.00 0.00 0.00 5 1.00 0.00 0.00 6 1.00 0.00 0.00 7 1.00 0.00 0.00 8 1.00 0.00 0.00 9 1.00 0.00 0.00 10 1.00 0.00 0.00 11 1.00 0.00 0.00 12 1.00 0.00 0.00 13 1.00 0.00 0.00 14 1.00 0.00 0.00 15 1.00 0.00 0.00 16 1.00 0.00 0.00 17 1.00 0.00 0.00 18 1.00 0.00 0.00 19 1.00 0.00 0.00 20 1.00 0.00 0.00 21 1.00 0.00 0.00 22 1.00 0.00 0.00 23 1.00 0.00 0.00 24 1.00 0.00 0.00 25 1.00 0.00 0.00 26 1.00 0.00 0.00 27 1.00 0.00 0.00 28 1.00 0.00 0.00 29 1.00 0.00 0.00 30 1.00 0.00 0.00 31 1.00 0.00 0.00 32 1.00 0.00 0.00 33 1.00 0.00 0.00 34 1.00 0.00 0.00 35 1.00 0.00 0.00 36 1.00 0.00 0.00 37 1.00 0.00 0.00 38 1.00 0.00 0.00 39 1.00 0.00 0.00 40 1.00 0.00 0.00 41 1.00 0.00 0.00 42 1.00 0.00 0.00 43 1.00 0.00 0.00 44 1.00 0.00 0.00 45 1.00 0.00 0.00 46 1.00 0.00 0.00 47 1.00 0.00 0.00 48 1.00 0.00 0.00 49 1.00 0.00 0.00 50 0.00 1.00 0.00 51 0.00 1.00 0.00 52 0.00 1.00 0.00 53 0.00 1.00 0.00 54 0.00 1.00 0.00 55 0.00 1.00 0.00 56 0.00 0.99 0.01 57 0.00 1.00 0.00 58 0.00 1.00 0.00 59 0.00 1.00 0.00 60 0.00 1.00 0.00 61 0.00 1.00 0.00 62 0.00 1.00 0.00 63 0.00 0.99 0.01 64 0.00 1.00 0.00 65 0.00 1.00 0.00 66 0.00 0.98 0.02 67 0.00 1.00 0.00 68 0.00 0.96 0.04 69 0.00 1.00 0.00 70 0.00 0.25 0.75 71 0.00 1.00 0.00 72 0.00 0.82 0.18 73 0.00 1.00 0.00 74 0.00 1.00 0.00 75 0.00 1.00 0.00 76 0.00 1.00 0.00 77 0.00 0.69 0.31 78 0.00 0.99 0.01 79 0.00 1.00 0.00 80 0.00 1.00 0.00 81 0.00 1.00 0.00 82 0.00 1.00 0.00 83 0.00 0.14 0.86 84 0.00 0.96 0.04 85 0.00 0.99 0.01 86 0.00 1.00 0.00 87 0.00 1.00 0.00 88 0.00 1.00 0.00 89 0.00 1.00 0.00 90 0.00 1.00 0.00 91 0.00 1.00 0.00 92 0.00 1.00 0.00 93 0.00 1.00 0.00 94 0.00 1.00 0.00 95 0.00 1.00 0.00 96 0.00 1.00 0.00 97 0.00 1.00 0.00 98 0.00 1.00 0.00 99 0.00 1.00 0.00 100 0.00 0.00 1.00 101 0.00 0.00 1.00 102 0.00 0.00 1.00 103 0.00 0.00 1.00 104 0.00 0.00 1.00 105 0.00 0.00 1.00 106 0.00 0.05 0.95 107 0.00 0.00 1.00 108 0.00 0.00 1.00 109 0.00 0.00 1.00 110 0.00 0.01 0.99 111 0.00 0.00 1.00 112 0.00 0.00 1.00 113 0.00 0.00 1.00 114 0.00 0.00 1.00 115 0.00 0.00 1.00 116 0.00 0.01 0.99 117 0.00 0.00 1.00 118 0.00 0.00 1.00 119 0.00 0.22 0.78 120 0.00 0.00 1.00 121 0.00 0.00 1.00 122 0.00 0.00 1.00 123 0.00 0.10 0.90 124 0.00 0.00 1.00 125 0.00 0.00 1.00 126 0.00 0.19 0.81 127 0.00 0.13 0.87 128 0.00 0.00 1.00 129 0.00 0.10 0.90 130 0.00 0.00 1.00 131 0.00 0.00 1.00 132 0.00 0.00 1.00 133 0.00 0.73 0.27 134 0.00 0.07 0.93 135 0.00 0.00 1.00 136 0.00 0.00 1.00 137 0.00 0.01 0.99 138 0.00 0.19 0.81 139 0.00 0.00 1.00 140 0.00 0.00 1.00 141 0.00 0.00 1.00 142 0.00 0.00 1.00 143 0.00 0.00 1.00 144 0.00 0.00 1.00 145 0.00 0.00 1.00 146 0.00 0.01 0.99 147 0.00 0.00 1.00 148 0.00 0.00 1.00 149 0.00 0.02 0.98 MAHALANOBIS: 0 1 2 0 0.00 89.86 179.38 1 89.86 0.00 17.20 2 179.38 17.20 0.00 nrmiss = 0Link to C# source.