This example uses a database of a complete set of possible board configurations at the end of tic-tac-toe games, where "x" is assumed to have played first. The target concept is "win for x" (i.e., true when "x" has one of 8 possible ways to create a "three-in-a-row").
There are nine nominal input attributes for each square on the tic-tac-toe board and are encoded such that 0=player x has taken, 1=player o has taken, 2=blank.
The predicted atribute is a win or loose at tic-tac-toe. For this example the first 626 observations are a win and the next 332 are loss.
The structure of the network consists of 27 input nodes and three layers, with five perceptrons in the first hidden layer, three perceptrons in the second hidden layer, and one perceptron in the output layer.
There are a total of 162 weights in this network. The activations functions are logistic for all layers. Since the target output is binary classification the logistic activation function must be used in the output layer. Training is conducted using the quasi-newton trainer using the binary entropy error function provided by the BinaryClassification
class.
import com.imsl.datamining.neural.*; import java.io.*; import java.util.logging.*; import com.imsl.math.PrintMatrix; import com.imsl.math.PrintMatrixFormat; import com.imsl.stat.Random; //***************************************************************************** // Three Layer Feed-Forward Network with 4 inputs, all // continuous, and 2 classification categories. // // new classification training_ex4.c // // Three Layer Feed-Forward Network with 4 inputs, all // continuous, and 2 classification categories. // // This database encodes the complete set of possible board configurations // at the end of tic-tac-toe games, where "x" is assumed to have played // first. The target concept is "win for x" (i.e., true when "x" has one // of 8 possible ways to create a "three-in-a-row"). // // Predicted attribute: win or loose at tic-tac-toe // First 626 obs are positive (win) and the next 332 are negative (loss) // // Input Attributes (10 categorical Attributes) // Attribute Information: (0=player x has taken, 1=player o has taken, 2=blank) // // 1. top-left-square: {x,o,b} // 2. top-middle-square: {x,o,b} // 3. top-right-square: {x,o,b} // 4. middle-left-square: {x,o,b} // 5. middle-middle-square: {x,o,b} // 6. middle-right-square: {x,o,b} // 7. bottom-left-square: {x,o,b} // 8. bottom-middle-square: {x,o,b} // 9. bottom-right-square: {x,o,b} // 10. Class: {positive,negative} //***************************************************************************** public class BinaryClassificationEx2 implements Serializable { private static int nObs = 958; // number of training patterns private static int nInputs = 27; // 9 nominal coded as 0=x, 1=O, 2=blank private static int nCategorical = 27; // seven categorical attributes private static int nContinuous = 0; // two continuous input attribute private static int nOutputs = 1; // one continuous output (nClasses=2) private static int nLayers = 3; // number of perceptron layers private static int nPerceptrons1 = 5; // perceptrons in 1st hidden layer private static int nPerceptrons2 = 3; // perceptrons in 2nd hidden layer private static boolean trace = true; // Turns on/off training log private static Activation hiddenLayerActivation = Activation.LOGISTIC; private static Activation outputLayerActivation = Activation.LOGISTIC; private static int[][] data = { {0,0,0,0,1,1,0,1,1},{0,0,0,0,1,1,1,0,1},{0,0,0,0,1,1,1,1,0},{0,0,0,0,1,1,1,2,2}, {0,0,0,0,1,1,2,1,2},{0,0,0,0,1,1,2,2,1},{0,0,0,0,1,2,1,1,2},{0,0,0,0,1,2,1,2,1}, {0,0,0,0,1,2,2,1,1},{0,0,0,0,2,1,1,1,2},{0,0,0,0,2,1,1,2,1},{0,0,0,0,2,1,2,1,1}, {0,0,0,1,0,1,0,1,1},{0,0,0,1,0,1,1,0,1},{0,0,0,1,0,1,1,1,0},{0,0,0,1,0,1,1,2,2}, {0,0,0,1,0,1,2,1,2},{0,0,0,1,0,1,2,2,1},{0,0,0,1,0,2,1,1,2},{0,0,0,1,0,2,1,2,1}, {0,0,0,1,0,2,2,1,1},{0,0,0,1,1,0,0,1,1},{0,0,0,1,1,0,1,0,1},{0,0,0,1,1,0,1,1,0}, {0,0,0,1,1,0,1,2,2},{0,0,0,1,1,0,2,1,2},{0,0,0,1,1,0,2,2,1},{0,0,0,1,1,2,0,1,2}, {0,0,0,1,1,2,0,2,1},{0,0,0,1,1,2,1,0,2},{0,0,0,1,1,2,1,2,0},{0,0,0,1,1,2,2,0,1}, {0,0,0,1,1,2,2,1,0},{0,0,0,1,1,2,2,2,2},{0,0,0,1,2,0,1,1,2},{0,0,0,1,2,0,1,2,1}, {0,0,0,1,2,0,2,1,1},{0,0,0,1,2,1,0,1,2},{0,0,0,1,2,1,0,2,1},{0,0,0,1,2,1,1,0,2}, {0,0,0,1,2,1,1,2,0},{0,0,0,1,2,1,2,0,1},{0,0,0,1,2,1,2,1,0},{0,0,0,1,2,1,2,2,2}, {0,0,0,1,2,2,0,1,1},{0,0,0,1,2,2,1,0,1},{0,0,0,1,2,2,1,1,0},{0,0,0,1,2,2,1,2,2}, {0,0,0,1,2,2,2,1,2},{0,0,0,1,2,2,2,2,1},{0,0,0,2,0,1,1,1,2},{0,0,0,2,0,1,1,2,1}, {0,0,0,2,0,1,2,1,1},{0,0,0,2,1,0,1,1,2},{0,0,0,2,1,0,1,2,1},{0,0,0,2,1,0,2,1,1}, {0,0,0,2,1,1,0,1,2},{0,0,0,2,1,1,0,2,1},{0,0,0,2,1,1,1,0,2},{0,0,0,2,1,1,1,2,0}, {0,0,0,2,1,1,2,0,1},{0,0,0,2,1,1,2,1,0},{0,0,0,2,1,1,2,2,2},{0,0,0,2,1,2,0,1,1}, {0,0,0,2,1,2,1,0,1},{0,0,0,2,1,2,1,1,0},{0,0,0,2,1,2,1,2,2},{0,0,0,2,1,2,2,1,2}, {0,0,0,2,1,2,2,2,1},{0,0,0,2,2,1,0,1,1},{0,0,0,2,2,1,1,0,1},{0,0,0,2,2,1,1,1,0}, {0,0,0,2,2,1,1,2,2},{0,0,0,2,2,1,2,1,2},{0,0,0,2,2,1,2,2,1},{0,0,0,2,2,2,1,1,2}, {0,0,0,2,2,2,1,2,1},{0,0,0,2,2,2,2,1,1},{0,0,1,0,0,1,1,1,0},{0,0,1,0,1,0,0,1,1}, {0,0,1,0,1,1,0,1,0},{0,0,1,0,1,1,0,2,2},{0,0,1,0,1,2,0,1,2},{0,0,1,0,1,2,0,2,1}, {0,0,1,0,2,1,0,1,2},{0,0,1,0,2,2,0,1,1},{0,0,1,1,0,0,1,0,1},{0,0,1,1,0,0,1,1,0}, {0,0,1,1,0,1,0,1,0},{0,0,1,1,0,1,1,0,0},{0,0,1,1,0,1,2,0,2},{0,0,1,1,0,1,2,2,0}, {0,0,1,1,0,2,1,0,2},{0,0,1,1,0,2,1,2,0},{0,0,1,1,0,2,2,0,1},{0,0,1,1,0,2,2,1,0}, {0,0,1,2,0,1,1,0,2},{0,0,1,2,0,1,1,2,0},{0,0,1,2,0,1,2,1,0},{0,0,1,2,0,2,1,0,1}, {0,0,1,2,0,2,1,1,0},{0,0,2,0,1,1,0,1,2},{0,0,2,0,1,1,0,2,1},{0,0,2,0,1,2,0,1,1}, {0,0,2,0,2,1,0,1,1},{0,0,2,1,0,1,1,0,2},{0,0,2,1,0,1,1,2,0},{0,0,2,1,0,1,2,0,1}, {0,0,2,1,0,1,2,1,0},{0,0,2,1,0,2,1,0,1},{0,0,2,1,0,2,1,1,0},{0,0,2,2,0,1,1,0,1}, {0,0,2,2,0,1,1,1,0},{0,1,0,0,0,1,0,1,1},{0,1,0,0,0,1,1,1,0},{0,1,0,0,1,1,0,0,1}, {0,1,0,0,1,1,0,2,2},{0,1,0,0,1,2,0,2,1},{0,1,0,0,2,1,0,1,2},{0,1,0,0,2,1,0,2,1}, {0,1,0,0,2,2,0,1,1},{0,1,0,1,0,0,0,1,1},{0,1,0,1,0,0,1,1,0},{0,1,0,1,0,1,0,0,1}, {0,1,0,1,0,1,0,1,0},{0,1,0,1,0,1,0,2,2},{0,1,0,1,0,1,1,0,0},{0,1,0,1,0,1,2,2,0}, {0,1,0,1,0,2,0,1,2},{0,1,0,1,0,2,0,2,1},{0,1,0,1,0,2,1,2,0},{0,1,0,1,0,2,2,1,0}, {0,1,0,1,1,0,1,0,0},{0,1,0,1,1,0,2,2,0},{0,1,0,1,2,0,1,2,0},{0,1,0,1,2,0,2,1,0}, {0,1,0,2,0,1,0,1,2},{0,1,0,2,0,1,0,2,1},{0,1,0,2,0,1,1,2,0},{0,1,0,2,0,1,2,1,0}, {0,1,0,2,0,2,0,1,1},{0,1,0,2,0,2,1,1,0},{0,1,0,2,1,0,1,2,0},{0,1,0,2,2,0,1,1,0}, {0,1,1,0,0,0,0,1,1},{0,1,1,0,0,0,1,0,1},{0,1,1,0,0,0,1,1,0},{0,1,1,0,0,0,1,2,2}, {0,1,1,0,0,0,2,1,2},{0,1,1,0,0,0,2,2,1},{0,1,1,0,0,1,0,1,0},{0,1,1,0,0,1,0,2,2}, {0,1,1,0,0,1,1,0,0},{0,1,1,0,0,1,2,2,0},{0,1,1,0,0,2,0,1,2},{0,1,1,0,0,2,0,2,1}, {0,1,1,0,0,2,1,2,0},{0,1,1,0,0,2,2,1,0},{0,1,1,0,1,0,0,0,1},{0,1,1,0,1,0,0,2,2}, {0,1,1,0,1,1,0,0,0},{0,1,1,0,1,2,0,0,2},{0,1,1,0,1,2,0,2,0},{0,1,1,0,2,0,0,1,2}, {0,1,1,0,2,0,0,2,1},{0,1,1,0,2,1,0,0,2},{0,1,1,0,2,1,0,2,0},{0,1,1,0,2,2,0,0,1}, {0,1,1,0,2,2,0,1,0},{0,1,1,0,2,2,0,2,2},{0,1,1,1,0,0,0,1,0},{0,1,1,1,0,0,1,0,0}, {0,1,1,1,0,0,2,2,0},{0,1,1,1,0,1,0,0,0},{0,1,1,1,0,2,0,2,0},{0,1,1,1,0,2,2,0,0}, {0,1,1,1,1,0,0,0,0},{0,1,1,1,2,2,0,0,0},{0,1,1,2,0,0,1,2,0},{0,1,1,2,0,0,2,1,0}, {0,1,1,2,0,1,0,2,0},{0,1,1,2,0,1,2,0,0},{0,1,1,2,0,2,0,1,0},{0,1,1,2,0,2,1,0,0}, {0,1,1,2,0,2,2,2,0},{0,1,1,2,1,2,0,0,0},{0,1,1,2,2,1,0,0,0},{0,1,2,0,0,0,1,1,2}, {0,1,2,0,0,0,1,2,1},{0,1,2,0,0,0,2,1,1},{0,1,2,0,0,1,0,1,2},{0,1,2,0,0,1,0,2,1}, {0,1,2,0,0,1,1,2,0},{0,1,2,0,0,1,2,1,0},{0,1,2,0,0,2,0,1,1},{0,1,2,0,0,2,1,1,0}, {0,1,2,0,1,0,0,2,1},{0,1,2,0,1,1,0,0,2},{0,1,2,0,1,1,0,2,0},{0,1,2,0,1,2,0,0,1}, {0,1,2,0,1,2,0,2,2},{0,1,2,0,2,0,0,1,1},{0,1,2,0,2,1,0,0,1},{0,1,2,0,2,1,0,1,0}, {0,1,2,0,2,1,0,2,2},{0,1,2,0,2,2,0,1,2},{0,1,2,0,2,2,0,2,1},{0,1,2,1,0,0,1,2,0}, {0,1,2,1,0,0,2,1,0},{0,1,2,1,0,1,0,2,0},{0,1,2,1,0,1,2,0,0},{0,1,2,1,0,2,0,1,0}, {0,1,2,1,0,2,1,0,0},{0,1,2,1,0,2,2,2,0},{0,1,2,1,1,2,0,0,0},{0,1,2,1,2,1,0,0,0}, {0,1,2,2,0,0,1,1,0},{0,1,2,2,0,1,0,1,0},{0,1,2,2,0,1,1,0,0},{0,1,2,2,0,1,2,2,0}, {0,1,2,2,0,2,1,2,0},{0,1,2,2,0,2,2,1,0},{0,1,2,2,1,1,0,0,0},{0,2,0,0,1,1,0,1,2}, {0,2,0,0,1,1,0,2,1},{0,2,0,0,1,2,0,1,1},{0,2,0,0,2,1,0,1,1},{0,2,0,1,0,1,0,1,2}, {0,2,0,1,0,1,0,2,1},{0,2,0,1,0,1,1,2,0},{0,2,0,1,0,1,2,1,0},{0,2,0,1,0,2,0,1,1}, {0,2,0,1,0,2,1,1,0},{0,2,0,1,1,0,1,2,0},{0,2,0,1,1,0,2,1,0},{0,2,0,1,2,0,1,1,0}, {0,2,0,2,0,1,0,1,1},{0,2,0,2,0,1,1,1,0},{0,2,0,2,1,0,1,1,0},{0,2,1,0,0,0,1,1,2}, {0,2,1,0,0,0,1,2,1},{0,2,1,0,0,0,2,1,1},{0,2,1,0,0,1,0,1,2},{0,2,1,0,0,1,1,2,0}, {0,2,1,0,0,1,2,1,0},{0,2,1,0,0,2,0,1,1},{0,2,1,0,0,2,1,1,0},{0,2,1,0,1,0,0,1,2}, {0,2,1,0,1,0,0,2,1},{0,2,1,0,1,1,0,0,2},{0,2,1,0,1,1,0,2,0},{0,2,1,0,1,2,0,0,1}, {0,2,1,0,1,2,0,1,0},{0,2,1,0,1,2,0,2,2},{0,2,1,0,2,0,0,1,1},{0,2,1,0,2,1,0,1,0}, {0,2,1,0,2,1,0,2,2},{0,2,1,0,2,2,0,1,2},{0,2,1,0,2,2,0,2,1},{0,2,1,1,0,0,1,2,0}, {0,2,1,1,0,0,2,1,0},{0,2,1,1,0,1,0,2,0},{0,2,1,1,0,1,2,0,0},{0,2,1,1,0,2,0,1,0}, {0,2,1,1,0,2,1,0,0},{0,2,1,1,0,2,2,2,0},{0,2,1,1,1,2,0,0,0},{0,2,1,1,2,1,0,0,0}, {0,2,1,2,0,0,1,1,0},{0,2,1,2,0,1,0,1,0},{0,2,1,2,0,1,1,0,0},{0,2,1,2,0,1,2,2,0}, {0,2,1,2,0,2,1,2,0},{0,2,1,2,0,2,2,1,0},{0,2,1,2,1,1,0,0,0},{0,2,2,0,0,1,0,1,1}, {0,2,2,0,0,1,1,1,0},{0,2,2,0,1,0,0,1,1},{0,2,2,0,1,1,0,0,1},{0,2,2,0,1,1,0,1,0}, {0,2,2,0,1,1,0,2,2},{0,2,2,0,1,2,0,1,2},{0,2,2,0,1,2,0,2,1},{0,2,2,0,2,1,0,1,2}, {0,2,2,0,2,1,0,2,1},{0,2,2,0,2,2,0,1,1},{0,2,2,1,0,0,1,1,0},{0,2,2,1,0,1,0,1,0}, {0,2,2,1,0,1,1,0,0},{0,2,2,1,0,1,2,2,0},{0,2,2,1,0,2,1,2,0},{0,2,2,1,0,2,2,1,0}, {0,2,2,2,0,1,1,2,0},{0,2,2,2,0,1,2,1,0},{0,2,2,2,0,2,1,1,0},{1,0,0,0,0,1,0,1,1}, {1,0,0,0,0,1,1,0,1},{1,0,0,0,1,0,1,1,0},{1,0,0,1,0,0,0,1,1},{1,0,0,1,0,1,0,0,1}, {1,0,0,1,0,1,0,1,0},{1,0,0,1,0,1,0,2,2},{1,0,0,1,0,1,2,0,2},{1,0,0,1,0,2,0,1,2}, {1,0,0,1,0,2,0,2,1},{1,0,0,1,0,2,2,0,1},{1,0,0,1,1,0,0,1,0},{1,0,0,1,1,0,2,2,0}, {1,0,0,1,2,0,2,1,0},{1,0,0,2,0,1,0,1,2},{1,0,0,2,0,1,0,2,1},{1,0,0,2,0,1,1,0,2}, {1,0,0,2,0,1,2,0,1},{1,0,0,2,0,2,0,1,1},{1,0,0,2,0,2,1,0,1},{1,0,0,2,1,0,1,2,0}, {1,0,0,2,1,0,2,1,0},{1,0,0,2,2,0,1,1,0},{1,0,1,0,0,0,0,1,1},{1,0,1,0,0,0,1,0,1}, {1,0,1,0,0,0,1,1,0},{1,0,1,0,0,0,1,2,2},{1,0,1,0,0,0,2,1,2},{1,0,1,0,0,0,2,2,1}, {1,0,1,0,0,1,1,0,0},{1,0,1,0,0,1,2,0,2},{1,0,1,0,0,2,1,0,2},{1,0,1,0,0,2,2,0,1}, {1,0,1,0,1,1,0,0,0},{1,0,1,1,0,0,0,0,1},{1,0,1,1,0,0,2,0,2},{1,0,1,1,0,1,0,0,0}, {1,0,1,1,0,2,0,0,2},{1,0,1,1,0,2,2,0,0},{1,0,1,1,1,0,0,0,0},{1,0,1,1,2,2,0,0,0}, {1,0,1,2,0,0,1,0,2},{1,0,1,2,0,0,2,0,1},{1,0,1,2,0,1,0,0,2},{1,0,1,2,0,1,2,0,0}, {1,0,1,2,0,2,0,0,1},{1,0,1,2,0,2,1,0,0},{1,0,1,2,0,2,2,0,2},{1,0,1,2,1,2,0,0,0}, {1,0,1,2,2,1,0,0,0},{1,0,2,0,0,0,1,1,2},{1,0,2,0,0,0,1,2,1},{1,0,2,0,0,0,2,1,1}, {1,0,2,0,0,1,1,0,2},{1,0,2,0,0,1,2,0,1},{1,0,2,0,0,2,1,0,1},{1,0,2,1,0,0,2,0,1}, {1,0,2,1,0,1,0,0,2},{1,0,2,1,0,1,2,0,0},{1,0,2,1,0,2,0,0,1},{1,0,2,1,0,2,2,0,2}, {1,0,2,1,1,2,0,0,0},{1,0,2,1,2,1,0,0,0},{1,0,2,2,0,0,1,0,1},{1,0,2,2,0,1,0,0,1}, {1,0,2,2,0,1,1,0,0},{1,0,2,2,0,1,2,0,2},{1,0,2,2,0,2,1,0,2},{1,0,2,2,0,2,2,0,1}, {1,0,2,2,1,1,0,0,0},{1,1,0,0,0,0,0,1,1},{1,1,0,0,0,0,1,0,1},{1,1,0,0,0,0,1,1,0}, {1,1,0,0,0,0,1,2,2},{1,1,0,0,0,0,2,1,2},{1,1,0,0,0,0,2,2,1},{1,1,0,0,0,1,0,0,1}, {1,1,0,0,0,1,0,1,0},{1,1,0,0,0,1,0,2,2},{1,1,0,0,0,2,0,1,2},{1,1,0,0,0,2,0,2,1}, {1,1,0,0,1,0,1,0,0},{1,1,0,0,1,0,2,2,0},{1,1,0,0,1,1,0,0,0},{1,1,0,0,2,0,1,2,0}, {1,1,0,0,2,0,2,1,0},{1,1,0,1,0,0,0,0,1},{1,1,0,1,0,0,0,1,0},{1,1,0,1,0,0,0,2,2}, {1,1,0,1,0,0,2,2,0},{1,1,0,1,0,1,0,0,0},{1,1,0,1,0,2,0,0,2},{1,1,0,1,0,2,0,2,0}, {1,1,0,1,1,0,0,0,0},{1,1,0,1,2,0,0,2,0},{1,1,0,1,2,0,2,0,0},{1,1,0,1,2,2,0,0,0}, {1,1,0,2,0,0,0,1,2},{1,1,0,2,0,0,0,2,1},{1,1,0,2,0,0,1,2,0},{1,1,0,2,0,0,2,1,0}, {1,1,0,2,0,1,0,0,2},{1,1,0,2,0,1,0,2,0},{1,1,0,2,0,2,0,0,1},{1,1,0,2,0,2,0,1,0}, {1,1,0,2,0,2,0,2,2},{1,1,0,2,1,0,0,2,0},{1,1,0,2,1,0,2,0,0},{1,1,0,2,1,2,0,0,0}, {1,1,0,2,2,0,0,1,0},{1,1,0,2,2,0,1,0,0},{1,1,0,2,2,0,2,2,0},{1,1,0,2,2,1,0,0,0}, {1,1,2,0,0,0,0,1,2},{1,1,2,0,0,0,0,2,1},{1,1,2,0,0,0,1,0,2},{1,1,2,0,0,0,1,2,0}, {1,1,2,0,0,0,2,0,1},{1,1,2,0,0,0,2,1,0},{1,1,2,0,0,0,2,2,2},{1,1,2,0,1,2,0,0,0}, {1,1,2,0,2,1,0,0,0},{1,1,2,1,0,2,0,0,0},{1,1,2,1,2,0,0,0,0},{1,1,2,2,0,1,0,0,0}, {1,1,2,2,1,0,0,0,0},{1,1,2,2,2,2,0,0,0},{1,2,0,0,0,0,1,1,2},{1,2,0,0,0,0,1,2,1}, {1,2,0,0,0,0,2,1,1},{1,2,0,0,0,1,0,1,2},{1,2,0,0,0,1,0,2,1},{1,2,0,0,0,2,0,1,1}, {1,2,0,0,1,0,1,2,0},{1,2,0,0,1,0,2,1,0},{1,2,0,0,2,0,1,1,0},{1,2,0,1,0,0,0,1,2}, {1,2,0,1,0,0,0,2,1},{1,2,0,1,0,0,2,1,0},{1,2,0,1,0,1,0,0,2},{1,2,0,1,0,1,0,2,0}, {1,2,0,1,0,2,0,0,1},{1,2,0,1,0,2,0,1,0},{1,2,0,1,0,2,0,2,2},{1,2,0,1,1,0,0,2,0}, {1,2,0,1,1,0,2,0,0},{1,2,0,1,1,2,0,0,0},{1,2,0,1,2,0,0,1,0},{1,2,0,1,2,0,2,2,0}, {1,2,0,1,2,1,0,0,0},{1,2,0,2,0,0,0,1,1},{1,2,0,2,0,0,1,1,0},{1,2,0,2,0,1,0,0,1}, {1,2,0,2,0,1,0,1,0},{1,2,0,2,0,1,0,2,2},{1,2,0,2,0,2,0,1,2},{1,2,0,2,0,2,0,2,1}, {1,2,0,2,1,0,0,1,0},{1,2,0,2,1,0,1,0,0},{1,2,0,2,1,0,2,2,0},{1,2,0,2,1,1,0,0,0}, {1,2,0,2,2,0,1,2,0},{1,2,0,2,2,0,2,1,0},{1,2,1,0,0,0,0,1,2},{1,2,1,0,0,0,0,2,1}, {1,2,1,0,0,0,1,0,2},{1,2,1,0,0,0,1,2,0},{1,2,1,0,0,0,2,0,1},{1,2,1,0,0,0,2,1,0}, {1,2,1,0,0,0,2,2,2},{1,2,1,0,1,2,0,0,0},{1,2,1,0,2,1,0,0,0},{1,2,1,1,0,2,0,0,0}, {1,2,1,1,2,0,0,0,0},{1,2,1,2,0,1,0,0,0},{1,2,1,2,1,0,0,0,0},{1,2,1,2,2,2,0,0,0}, {1,2,2,0,0,0,0,1,1},{1,2,2,0,0,0,1,0,1},{1,2,2,0,0,0,1,1,0},{1,2,2,0,0,0,1,2,2}, {1,2,2,0,0,0,2,1,2},{1,2,2,0,0,0,2,2,1},{1,2,2,0,1,1,0,0,0},{1,2,2,1,0,1,0,0,0}, {1,2,2,1,1,0,0,0,0},{1,2,2,1,2,2,0,0,0},{1,2,2,2,1,2,0,0,0},{1,2,2,2,2,1,0,0,0}, {2,0,0,1,0,1,0,1,2},{2,0,0,1,0,1,0,2,1},{2,0,0,1,0,1,1,0,2},{2,0,0,1,0,1,2,0,1}, {2,0,0,1,0,2,0,1,1},{2,0,0,1,0,2,1,0,1},{2,0,0,1,1,0,1,2,0},{2,0,0,1,1,0,2,1,0}, {2,0,0,1,2,0,1,1,0},{2,0,0,2,0,1,0,1,1},{2,0,0,2,0,1,1,0,1},{2,0,0,2,1,0,1,1,0}, {2,0,1,0,0,0,1,1,2},{2,0,1,0,0,0,1,2,1},{2,0,1,0,0,0,2,1,1},{2,0,1,0,0,1,1,0,2}, {2,0,1,0,0,2,1,0,1},{2,0,1,1,0,0,1,0,2},{2,0,1,1,0,0,2,0,1},{2,0,1,1,0,1,0,0,2}, {2,0,1,1,0,1,2,0,0},{2,0,1,1,0,2,0,0,1},{2,0,1,1,0,2,1,0,0},{2,0,1,1,0,2,2,0,2}, {2,0,1,1,1,2,0,0,0},{2,0,1,1,2,1,0,0,0},{2,0,1,2,0,0,1,0,1},{2,0,1,2,0,1,1,0,0}, {2,0,1,2,0,1,2,0,2},{2,0,1,2,0,2,1,0,2},{2,0,1,2,0,2,2,0,1},{2,0,1,2,1,1,0,0,0}, {2,0,2,0,0,1,1,0,1},{2,0,2,1,0,0,1,0,1},{2,0,2,1,0,1,0,0,1},{2,0,2,1,0,1,1,0,0}, {2,0,2,1,0,1,2,0,2},{2,0,2,1,0,2,1,0,2},{2,0,2,1,0,2,2,0,1},{2,0,2,2,0,1,1,0,2}, {2,0,2,2,0,1,2,0,1},{2,0,2,2,0,2,1,0,1},{2,1,0,0,0,0,1,1,2},{2,1,0,0,0,0,1,2,1}, {2,1,0,0,0,0,2,1,1},{2,1,0,0,0,1,0,1,2},{2,1,0,0,0,1,0,2,1},{2,1,0,0,0,2,0,1,1}, {2,1,0,0,1,0,1,2,0},{2,1,0,0,2,0,1,1,0},{2,1,0,1,0,0,0,1,2},{2,1,0,1,0,0,0,2,1}, {2,1,0,1,0,0,1,2,0},{2,1,0,1,0,0,2,1,0},{2,1,0,1,0,1,0,0,2},{2,1,0,1,0,1,0,2,0}, {2,1,0,1,0,2,0,0,1},{2,1,0,1,0,2,0,1,0},{2,1,0,1,0,2,0,2,2},{2,1,0,1,1,0,0,2,0}, {2,1,0,1,1,0,2,0,0},{2,1,0,1,1,2,0,0,0},{2,1,0,1,2,0,0,1,0},{2,1,0,1,2,0,1,0,0}, {2,1,0,1,2,0,2,2,0},{2,1,0,1,2,1,0,0,0},{2,1,0,2,0,0,0,1,1},{2,1,0,2,0,0,1,1,0}, {2,1,0,2,0,1,0,0,1},{2,1,0,2,0,1,0,1,0},{2,1,0,2,0,1,0,2,2},{2,1,0,2,0,2,0,1,2}, {2,1,0,2,0,2,0,2,1},{2,1,0,2,1,0,1,0,0},{2,1,0,2,1,0,2,2,0},{2,1,0,2,1,1,0,0,0}, {2,1,0,2,2,0,1,2,0},{2,1,0,2,2,0,2,1,0},{2,1,1,0,0,0,0,1,2},{2,1,1,0,0,0,0,2,1}, {2,1,1,0,0,0,1,0,2},{2,1,1,0,0,0,1,2,0},{2,1,1,0,0,0,2,0,1},{2,1,1,0,0,0,2,1,0}, {2,1,1,0,0,0,2,2,2},{2,1,1,0,1,2,0,0,0},{2,1,1,0,2,1,0,0,0},{2,1,1,1,0,2,0,0,0}, {2,1,1,1,2,0,0,0,0},{2,1,1,2,0,1,0,0,0},{2,1,1,2,1,0,0,0,0},{2,1,1,2,2,2,0,0,0}, {2,1,2,0,0,0,0,1,1},{2,1,2,0,0,0,1,0,1},{2,1,2,0,0,0,1,1,0},{2,1,2,0,0,0,1,2,2}, {2,1,2,0,0,0,2,1,2},{2,1,2,0,0,0,2,2,1},{2,1,2,0,1,1,0,0,0},{2,1,2,1,0,1,0,0,0}, {2,1,2,1,1,0,0,0,0},{2,1,2,1,2,2,0,0,0},{2,1,2,2,1,2,0,0,0},{2,1,2,2,2,1,0,0,0}, {2,2,0,0,0,1,0,1,1},{2,2,0,0,1,0,1,1,0},{2,2,0,1,0,0,0,1,1},{2,2,0,1,0,0,1,1,0}, {2,2,0,1,0,1,0,0,1},{2,2,0,1,0,1,0,1,0},{2,2,0,1,0,1,0,2,2},{2,2,0,1,0,2,0,1,2}, {2,2,0,1,0,2,0,2,1},{2,2,0,1,1,0,0,1,0},{2,2,0,1,1,0,1,0,0},{2,2,0,1,1,0,2,2,0}, {2,2,0,1,2,0,1,2,0},{2,2,0,1,2,0,2,1,0},{2,2,0,2,0,1,0,1,2},{2,2,0,2,0,1,0,2,1}, {2,2,0,2,0,2,0,1,1},{2,2,0,2,1,0,1,2,0},{2,2,0,2,1,0,2,1,0},{2,2,0,2,2,0,1,1,0}, {2,2,1,0,0,0,0,1,1},{2,2,1,0,0,0,1,0,1},{2,2,1,0,0,0,1,1,0},{2,2,1,0,0,0,1,2,2}, {2,2,1,0,0,0,2,1,2},{2,2,1,0,0,0,2,2,1},{2,2,1,0,1,1,0,0,0},{2,2,1,1,0,1,0,0,0}, {2,2,1,1,1,0,0,0,0},{2,2,1,1,2,2,0,0,0},{2,2,1,2,1,2,0,0,0},{2,2,1,2,2,1,0,0,0}, {2,2,2,0,0,0,1,1,2},{2,2,2,0,0,0,1,2,1},{2,2,2,0,0,0,2,1,1},{2,2,2,1,1,2,0,0,0}, {2,2,2,1,2,1,0,0,0},{2,2,2,2,1,1,0,0,0},{0,0,1,0,0,1,1,2,1},{0,0,1,0,0,1,2,1,1}, {0,0,1,0,0,2,1,1,1},{0,0,1,0,1,0,1,1,2},{0,0,1,0,1,0,1,2,1},{0,0,1,0,1,1,1,0,2}, {0,0,1,0,1,1,1,2,0},{0,0,1,0,1,1,2,0,1},{0,0,1,0,1,2,1,0,1},{0,0,1,0,1,2,1,1,0}, {0,0,1,0,1,2,1,2,2},{0,0,1,0,2,0,1,1,1},{0,0,1,0,2,1,1,0,1},{0,0,1,0,2,1,2,2,1}, {0,0,1,1,0,1,0,2,1},{0,0,1,1,1,0,1,0,2},{0,0,1,1,1,0,1,2,0},{0,0,1,1,1,1,0,0,2}, {0,0,1,1,1,1,0,2,0},{0,0,1,1,1,1,2,0,0},{0,0,1,1,1,2,1,0,0},{0,0,1,1,2,1,0,0,1}, {0,0,1,2,0,0,1,1,1},{0,0,1,2,0,1,0,1,1},{0,0,1,2,0,1,2,2,1},{0,0,1,2,1,0,1,0,1}, {0,0,1,2,1,0,1,1,0},{0,0,1,2,1,0,1,2,2},{0,0,1,2,1,1,0,0,1},{0,0,1,2,1,1,1,0,0}, {0,0,1,2,1,2,1,0,2},{0,0,1,2,1,2,1,2,0},{0,0,1,2,2,1,0,2,1},{0,0,1,2,2,1,2,0,1}, {0,0,2,0,0,1,1,1,1},{0,0,2,0,1,0,1,1,1},{0,0,2,0,2,2,1,1,1},{0,0,2,1,0,0,1,1,1}, {0,0,2,1,1,1,0,0,1},{0,0,2,1,1,1,0,1,0},{0,0,2,1,1,1,0,2,2},{0,0,2,1,1,1,1,0,0}, {0,0,2,1,1,1,2,0,2},{0,0,2,1,1,1,2,2,0},{0,0,2,2,0,2,1,1,1},{0,0,2,2,2,0,1,1,1}, {0,1,0,0,0,2,1,1,1},{0,1,0,0,1,0,1,1,2},{0,1,0,0,1,0,2,1,1},{0,1,0,0,1,1,2,1,0}, {0,1,0,0,1,2,1,1,0},{0,1,0,0,1,2,2,1,2},{0,1,0,0,2,0,1,1,1},{0,1,0,1,1,0,0,1,2}, {0,1,0,1,1,1,0,0,2},{0,1,0,1,1,1,0,2,0},{0,1,0,1,1,1,2,0,0},{0,1,0,1,1,2,0,1,0}, {0,1,0,2,0,0,1,1,1},{0,1,0,2,1,0,0,1,1},{0,1,0,2,1,0,2,1,2},{0,1,0,2,1,1,0,1,0}, {0,1,0,2,1,2,0,1,2},{0,1,0,2,1,2,2,1,0},{0,1,1,0,0,1,2,0,1},{0,1,1,0,1,0,1,0,2}, {0,1,1,0,1,0,1,2,0},{0,1,1,0,1,0,2,1,0},{0,1,1,0,1,2,1,0,0},{0,1,1,2,0,1,0,0,1}, {0,1,1,2,1,0,0,1,0},{0,1,1,2,1,0,1,0,0},{0,1,2,0,1,0,1,1,0},{0,1,2,0,1,0,2,1,2}, {0,1,2,0,1,2,2,1,0},{0,1,2,1,1,0,0,1,0},{0,1,2,2,1,0,0,1,2},{0,1,2,2,1,0,2,1,0}, {0,1,2,2,1,2,0,1,0},{0,2,0,0,0,1,1,1,1},{0,2,0,0,1,0,1,1,1},{0,2,0,0,2,2,1,1,1}, {0,2,0,1,0,0,1,1,1},{0,2,0,1,1,1,0,0,1},{0,2,0,1,1,1,0,1,0},{0,2,0,1,1,1,0,2,2}, {0,2,0,1,1,1,1,0,0},{0,2,0,1,1,1,2,0,2},{0,2,0,1,1,1,2,2,0},{0,2,0,2,0,2,1,1,1}, {0,2,0,2,2,0,1,1,1},{0,2,1,0,0,1,1,0,1},{0,2,1,0,0,1,2,2,1},{0,2,1,0,1,0,1,0,1}, {0,2,1,0,1,0,1,1,0},{0,2,1,0,1,0,1,2,2},{0,2,1,0,1,1,1,0,0},{0,2,1,0,1,2,1,0,2}, {0,2,1,0,1,2,1,2,0},{0,2,1,0,2,1,2,0,1},{0,2,1,1,0,1,0,0,1},{0,2,1,1,1,0,1,0,0}, {0,2,1,2,0,1,0,2,1},{0,2,1,2,0,1,2,0,1},{0,2,1,2,1,0,1,0,2},{0,2,1,2,1,0,1,2,0}, {0,2,1,2,1,2,1,0,0},{0,2,1,2,2,1,0,0,1},{0,2,2,0,0,2,1,1,1},{0,2,2,0,2,0,1,1,1}, {0,2,2,1,1,1,0,0,2},{0,2,2,1,1,1,0,2,0},{0,2,2,1,1,1,2,0,0},{0,2,2,2,0,0,1,1,1}, {1,0,0,0,0,2,1,1,1},{1,0,0,0,1,0,1,2,1},{1,0,0,0,1,0,2,1,1},{1,0,0,0,1,1,0,2,1}, {1,0,0,0,1,1,2,0,1},{1,0,0,0,1,2,0,1,1},{1,0,0,0,1,2,1,0,1},{1,0,0,0,1,2,2,2,1}, {1,0,0,0,2,0,1,1,1},{1,0,0,1,0,0,1,1,2},{1,0,0,1,0,0,1,2,1},{1,0,0,1,0,1,1,2,0}, {1,0,0,1,0,2,1,1,0},{1,0,0,1,0,2,1,2,2},{1,0,0,1,1,0,0,2,1},{1,0,0,1,1,0,1,0,2}, {1,0,0,1,1,0,2,0,1},{1,0,0,1,1,1,0,0,2},{1,0,0,1,1,1,0,2,0},{1,0,0,1,1,1,2,0,0}, {1,0,0,1,1,2,0,0,1},{1,0,0,1,1,2,1,0,0},{1,0,0,1,2,0,1,0,1},{1,0,0,1,2,0,1,2,2}, {1,0,0,1,2,1,1,0,0},{1,0,0,1,2,2,1,0,2},{1,0,0,1,2,2,1,2,0},{1,0,0,2,0,0,1,1,1}, {1,0,0,2,1,0,0,1,1},{1,0,0,2,1,0,1,0,1},{1,0,0,2,1,0,2,2,1},{1,0,0,2,1,1,0,0,1}, {1,0,0,2,1,2,0,2,1},{1,0,0,2,1,2,2,0,1},{1,0,1,0,0,1,0,2,1},{1,0,1,0,1,0,0,2,1}, {1,0,1,0,1,0,1,0,2},{1,0,1,0,1,0,1,2,0},{1,0,1,0,1,0,2,0,1},{1,0,1,0,1,2,0,0,1}, {1,0,1,0,1,2,1,0,0},{1,0,1,0,2,1,0,0,1},{1,0,1,1,0,0,1,2,0},{1,0,1,1,2,0,1,0,0}, {1,0,1,2,1,0,0,0,1},{1,0,1,2,1,0,1,0,0},{1,0,2,0,1,0,0,1,1},{1,0,2,0,1,0,1,0,1}, {1,0,2,0,1,0,2,2,1},{1,0,2,0,1,1,0,0,1},{1,0,2,0,1,2,0,2,1},{1,0,2,0,1,2,2,0,1}, {1,0,2,1,0,0,1,1,0},{1,0,2,1,0,0,1,2,2},{1,0,2,1,0,2,1,2,0},{1,0,2,1,1,0,0,0,1}, {1,0,2,1,1,0,1,0,0},{1,0,2,1,2,0,1,0,2},{1,0,2,1,2,0,1,2,0},{1,0,2,1,2,2,1,0,0}, {1,0,2,2,1,0,0,2,1},{1,0,2,2,1,0,2,0,1},{1,0,2,2,1,2,0,0,1},{1,1,0,0,1,0,0,1,2}, {1,1,0,0,1,0,0,2,1},{1,1,0,0,1,0,2,0,1},{1,1,0,0,1,2,0,0,1},{1,1,0,0,1,2,0,1,0}, {1,1,0,1,0,0,1,0,2},{1,1,0,1,0,2,1,0,0},{1,1,0,2,1,0,0,0,1},{1,1,1,0,0,1,0,0,2}, {1,1,1,0,0,1,0,2,0},{1,1,1,0,0,1,2,0,0},{1,1,1,0,0,2,0,0,1},{1,1,1,0,0,2,0,1,0}, {1,1,1,0,0,2,0,2,2},{1,1,1,0,0,2,1,0,0},{1,1,1,0,0,2,2,0,2},{1,1,1,0,0,2,2,2,0}, {1,1,1,0,1,0,0,0,2},{1,1,1,0,1,0,0,2,0},{1,1,1,0,1,0,2,0,0},{1,1,1,0,2,0,0,0,1}, {1,1,1,0,2,0,0,1,0},{1,1,1,0,2,0,0,2,2},{1,1,1,0,2,0,1,0,0},{1,1,1,0,2,0,2,0,2}, {1,1,1,0,2,0,2,2,0},{1,1,1,0,2,2,0,0,2},{1,1,1,0,2,2,0,2,0},{1,1,1,0,2,2,2,0,0}, {1,1,1,1,0,0,0,0,2},{1,1,1,1,0,0,0,2,0},{1,1,1,1,0,0,2,0,0},{1,1,1,2,0,0,0,0,1}, {1,1,1,2,0,0,0,1,0},{1,1,1,2,0,0,0,2,2},{1,1,1,2,0,0,1,0,0},{1,1,1,2,0,0,2,0,2}, {1,1,1,2,0,0,2,2,0},{1,1,1,2,0,2,0,0,2},{1,1,1,2,0,2,0,2,0},{1,1,1,2,0,2,2,0,0}, {1,1,1,2,2,0,0,0,2},{1,1,1,2,2,0,0,2,0},{1,1,1,2,2,0,2,0,0},{1,1,2,0,1,0,0,0,1}, {1,1,2,0,1,0,0,1,0},{1,1,2,1,0,0,1,0,0},{1,2,0,0,1,0,0,1,1},{1,2,0,0,1,0,1,0,1}, {1,2,0,0,1,0,2,2,1},{1,2,0,0,1,1,0,0,1},{1,2,0,0,1,2,0,2,1},{1,2,0,0,1,2,2,0,1}, {1,2,0,1,0,0,1,0,1},{1,2,0,1,0,0,1,2,2},{1,2,0,1,0,1,1,0,0},{1,2,0,1,0,2,1,0,2}, {1,2,0,1,0,2,1,2,0},{1,2,0,1,1,0,0,0,1},{1,2,0,1,2,0,1,0,2},{1,2,0,1,2,2,1,0,0}, {1,2,0,2,1,0,0,2,1},{1,2,0,2,1,0,2,0,1},{1,2,0,2,1,2,0,0,1},{1,2,1,0,0,1,0,0,1}, {1,2,1,0,1,0,0,0,1},{1,2,1,0,1,0,1,0,0},{1,2,1,1,0,0,1,0,0},{1,2,2,0,1,0,0,2,1}, {1,2,2,0,1,0,2,0,1},{1,2,2,0,1,2,0,0,1},{1,2,2,1,0,0,1,0,2},{1,2,2,1,0,0,1,2,0}, {1,2,2,1,0,2,1,0,0},{1,2,2,1,2,0,1,0,0},{1,2,2,2,1,0,0,0,1},{2,0,0,0,0,1,1,1,1}, {2,0,0,0,1,0,1,1,1},{2,0,0,0,2,2,1,1,1},{2,0,0,1,0,0,1,1,1},{2,0,0,1,1,1,0,0,1}, {2,0,0,1,1,1,0,1,0},{2,0,0,1,1,1,0,2,2},{2,0,0,1,1,1,1,0,0},{2,0,0,1,1,1,2,0,2}, {2,0,0,1,1,1,2,2,0},{2,0,0,2,0,2,1,1,1},{2,0,0,2,2,0,1,1,1},{2,0,1,0,0,1,0,1,1}, {2,0,1,0,0,1,2,2,1},{2,0,1,0,1,0,1,0,1},{2,0,1,0,1,0,1,1,0},{2,0,1,0,1,0,1,2,2}, {2,0,1,0,1,1,0,0,1},{2,0,1,0,1,1,1,0,0},{2,0,1,0,1,2,1,0,2},{2,0,1,0,1,2,1,2,0}, {2,0,1,0,2,1,0,2,1},{2,0,1,0,2,1,2,0,1},{2,0,1,1,1,0,1,0,0},{2,0,1,2,0,1,0,2,1}, {2,0,1,2,1,0,1,0,2},{2,0,1,2,1,0,1,2,0},{2,0,1,2,1,2,1,0,0},{2,0,1,2,2,1,0,0,1}, {2,0,2,0,0,2,1,1,1},{2,0,2,0,2,0,1,1,1},{2,0,2,1,1,1,0,0,2},{2,0,2,1,1,1,0,2,0}, {2,0,2,1,1,1,2,0,0},{2,0,2,2,0,0,1,1,1},{2,1,0,0,1,0,0,1,1},{2,1,0,0,1,0,2,1,2}, {2,1,0,0,1,1,0,1,0},{2,1,0,0,1,2,0,1,2},{2,1,0,0,1,2,2,1,0},{2,1,0,2,1,0,0,1,2}, {2,1,0,2,1,2,0,1,0},{2,1,1,0,0,1,0,0,1},{2,1,1,0,1,0,0,1,0},{2,1,1,0,1,0,1,0,0}, {2,1,2,0,1,0,0,1,2},{2,1,2,0,1,0,2,1,0},{2,1,2,0,1,2,0,1,0},{2,1,2,2,1,0,0,1,0}, {2,2,0,0,0,2,1,1,1},{2,2,0,0,2,0,1,1,1},{2,2,0,1,1,1,0,0,2},{2,2,0,1,1,1,0,2,0}, {2,2,0,1,1,1,2,0,0},{2,2,0,2,0,0,1,1,1},{2,2,1,0,0,1,0,2,1},{2,2,1,0,0,1,2,0,1}, {2,2,1,0,1,0,1,0,2},{2,2,1,0,1,0,1,2,0},{2,2,1,0,1,2,1,0,0},{2,2,1,0,2,1,0,0,1}, {2,2,1,2,0,1,0,0,1},{2,2,1,2,1,0,1,0,0},{0,0,1,1,0,0,0,1,1},{0,0,1,1,1,0,0,0,1}, {0,0,1,1,1,0,0,1,0},{0,1,0,0,0,1,1,0,1},{0,1,0,0,1,0,1,0,1},{0,1,0,0,1,1,1,0,0}, {0,1,0,1,0,0,1,0,1},{0,1,0,1,1,0,0,0,1},{0,1,1,1,0,0,0,0,1},{1,0,0,0,0,1,1,1,0}, {1,0,0,0,1,1,0,1,0},{1,0,0,0,1,1,1,0,0},{1,0,1,0,0,1,0,1,0},{1,0,1,0,1,0,0,1,0}, {1,0,1,1,0,0,0,1,0},{1,1,0,0,0,1,1,0,0} }; private double categoricalAtt[][]; private static double weights[] = { -0.00000000000000063401, 0.00000000000000055700, 0.00000000000000012769, -0.52573653474162341000, 0.43427498705107342000, 0.09146154769055023200, 0.00000000000000138130, -0.00000000000000118053, -0.00000000000000050631, 0.52573653474162607000, -0.43427498705107603000, -0.09146154769055094000, -0.00000000000000057743, 0.00000000000000037314, -0.00000000000000023441, 0.52573653474162907000, -0.43427498705107787000, -0.09146154769055155100, -0.00000000000000405476, 0.00000000000000339568, 0.00000000000000053496, -0.52573653474162763000, 0.43427498705107587000, 0.09146154769055155100, -0.00000000000000116499, 0.00000000000000111960, 0.00000000000000004464, 0.59181480684449950000, -0.48617039139374285000, -0.10564441545075645000, 0.33659693927260309000, -0.28023189914604213000, -0.05636504012656110000, -0.00000000000000339401, 0.00000000000000312093, 0.00000000000000057542, 0.33659693927260292000, -0.28023189914604213000, -0.05636504012656087800, 0.00000000000000099480, -0.00000000000000067295, -0.00000000000000003901, -0.33659693927260537000, 0.28023189914604435000, 0.05636504012656118300, -0.00000000000000284785, 0.00000000000000269180, 0.00000000000000026089, -0.33659693927260426000, 0.28023189914604330000, 0.05636504012656121800, -0.59181480684449039000, 0.48617039139373414000, 0.10564441545075609000, 0.00000000000000098567, -0.00000000000000095474, -0.00000000000000021207, -0.33659693927260698000, 0.28023189914604579000, 0.05636504012656142600, -0.59181480684449372000, 0.48617039139373774000, 0.10564441545075645000, 0.33659693927260514000, -0.28023189914604435000, -0.05636504012656100300, -0.00000000000000010012, 0.00000000000000001702, 0.00000000000000012437, -0.33659693927260204000, 0.28023189914604152000, 0.05636504012656010100, 0.59181480684449428000, -0.48617039139373813000, -0.10564441545075638000, 0.33659693927260081000, -0.28023189914603991000, -0.05636504012656074600, 0.00000000000000216976, -0.00000000000000195478, -0.00000000000000023527, 0.39961448116107012000, -0.35734834346184241000, -0.04226613769922773400, -0.33634249144114892000, 0.28239332896420155000, 0.05394916247694748300, 0.39961448116106396000, -0.35734834346183769000, -0.04226613769922723400, -0.33634249144114703000, 0.28239332896420027000, 0.05394916247694724100, -0.21667948075941171000, 0.12935693076722185000, 0.08732254999219028800, -0.33634249144114398000, 0.28239332896419722000, 0.05394916247694688700, 0.39961448116106157000, -0.35734834346183453000, -0.04226613769922710200, -0.33634249144114919000, 0.28239332896420105000, 0.05394916247694810100, 0.39961448116107307000, -0.35734834346184485000, -0.04226613769922824700, -0.54188833749531484000, 0.49456532031183192000, 0.04732301718348254400, 0.00000000000000042643, -0.00000000000000052416, -0.00000000000000028161, 0.54188833749532672000, -0.49456532031184147000, -0.04732301718348516700, 0.00000000000000208148, -0.00000000000000170526, -0.00000000000000039120, -0.00000000000001165642, 0.00000000000000998830, 0.00000000000000133016, -0.00000000000000389738, 0.00000000000000286692, 0.00000000000000081238, 0.54188833749532805000, -0.49456532031184208000, -0.04732301718348581200, -0.00000000000000308117, 0.00000000000000212213, 0.00000000000000117840, -0.54188833749532439000, 0.49456532031183975000, 0.04732301718348420900, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.20000000000000001000, 0.33333333333333331000, 0.33333333333333331000, 0.33333333333333331000, 0.00000000000000093850, -0.00000000000000054323, -0.00000000000000011761, -0.03290466729806285100, 0.00000000000000063771, 0.00000000000000000000, 0.00000000000000000000, 0.00000000000000000000, 0.00000000000000000000}; // ********************************************************************** // MAIN // ********************************************************************** public static void main(String[] args) throws Exception { double xData[][]; // Input Attributes for Trainer int yData[]; // Output Attributes for Trainer int i, j; // array indicies int nWeights = 0; // Number of weights obtained from network String trainLogName = "BinaryClassificationNetworkEx2.log"; int[][] z; // ********************************************************************** // PREPROCESS TRAINING PATTERNS // ********************************************************************** long t0 = System.currentTimeMillis(); xData = new double[nObs][nInputs]; yData = new int[nObs]; /* Perform Binary Filtering. */ for (i=0;i<data.length;i++) { for (j=0;j<data[0].length;j++) { data[i][j]++; } } int xx[] = new int[nObs]; UnsupervisedNominalFilter filter = new UnsupervisedNominalFilter(3); for (i=0; i<9; i++) { // Copy each variable to a temp var for (j=0; j<nObs; j++) { xx[j] = data[j][i]; } // Perform binary filter on temp var z = filter.encode(xx); // Copy binary encoded var to xData for (j=0; j<nObs; j++) { for (int k=0; k<3; k++) { xData[j][k+(i*3)] = (double) z[j][k]; } } } for (i=0; i < nObs; i++) { yData[i] = (i >= 626 ? 0 : 1); } // ********************************************************************** // CREATE FEEDFORWARD NETWORK // ********************************************************************** FeedForwardNetwork network = new FeedForwardNetwork(); network.getInputLayer().createInputs(nInputs); network.createHiddenLayer().createPerceptrons(nPerceptrons1); network.createHiddenLayer().createPerceptrons(nPerceptrons2); network.getOutputLayer().createPerceptrons(nOutputs); network.linkAll(); network.setWeights(weights); Perceptron perceptrons[] = network.getPerceptrons(); for (i=0; i < perceptrons.length-1; i++) { perceptrons[i].setActivation(hiddenLayerActivation); } // ********************************************************************** // SET OUTPUT LAYER ACTIVATION FUNCTION TO LOGISTIC FOR BINARY CLASSIFICATION // ********************************************************************** perceptrons[perceptrons.length-1].setActivation(outputLayerActivation); BinaryClassification classification = new BinaryClassification(network); QuasiNewtonTrainer stageITrainer = new QuasiNewtonTrainer(); QuasiNewtonTrainer stageIITrainer = new QuasiNewtonTrainer(); stageITrainer.setError(classification.getError()); stageIITrainer.setError(classification.getError()); stageITrainer.setMaximumTrainingIterations(8000); stageITrainer.setMaximumStepsize(10.0); stageIITrainer.setMaximumStepsize(10.0); stageITrainer.setRelativeTolerance(10e-20); stageIITrainer.setRelativeTolerance(10e-20); stageIITrainer.setMaximumTrainingIterations(8000); EpochTrainer trainer = new EpochTrainer(stageITrainer, stageIITrainer); // Set Training Parameters trainer.setNumberOfEpochs(20); trainer.setEpochSize(nObs); // Set random number seeds to produce repeatable output trainer.setRandom(new Random(5555)); trainer.setRandomSamples(new Random(5555), new Random(5555)); // If tracing is requested setup training logger if (trace) { try { Handler handler = new FileHandler(trainLogName); Logger logger = Logger.getLogger("com.imsl.datamining.neural"); logger.setLevel(Level.FINEST); logger.addHandler(handler); handler.setFormatter(QuasiNewtonTrainer.getFormatter()); System.out.println("--> Training Log Created in "+ trainLogName); } catch (Exception e) { System.out.println("--> Cannot Create Training Log."); } } classification.train(trainer, xData, yData); System.out.println("trainer.getErrorValue = "+trainer.getErrorValue()); System.out.println("StageITrainer.getErrorValue = "+stageITrainer.getErrorValue()); System.out.println("StageIITrainer.getErrorValue = "+stageIITrainer.getErrorValue()); // ********************************************************************** // DISPLAY TRAINING STATISTICS // ********************************************************************** double stats[] = classification.computeStatistics(xData, yData); System.out.println("***********************************************"); System.out.println("--> Cross-entropy error: "+(float)stats[0]); System.out.println("--> Classification error rate: "+(float)stats[1]); System.out.println("***********************************************"); System.out.println(""); // ********************************************************************** // OBTAIN AND DISPLAY NETWORK WEIGHTS AND GRADIENTS // ********************************************************************** double weight[] = network.getWeights(); double gradient[] = trainer.getErrorGradient(); double wg[][] = new double[weight.length][2]; for(i = 0; i < weight.length; i++) { wg[i][0] = weight[i]; wg[i][1] = gradient[i]; } PrintMatrixFormat pmf = new PrintMatrixFormat(); pmf.setNumberFormat(new java.text.DecimalFormat("0.000000")); pmf.setColumnLabels(new String[]{"Weights", "Gradients"}); new PrintMatrix().print(pmf,wg); // ********************************************************************** // forecast the network // ********************************************************************** double report[][] = new double[nObs][2]; for ( i = 0; i < 50; i++) { report[i][0] = yData[i]; report[i][1] = classification.predictedClass(xData[i]); } pmf = new PrintMatrixFormat(); pmf.setColumnLabels(new String[]{"Expected","Predicted"}); new PrintMatrix("Forecast").print(pmf, report); long t1 = System.currentTimeMillis(); double small = 1.e-7; double time = t1-t0; //Math.max(small, (double)(t1-t0)/(double)iters); time = time/1000; System.out.println("****************Time: "+time); System.out.println("trainer.getErrorValue = "+trainer.getErrorValue()); System.out.println("StageITrainer.getErrorValue = "+stageITrainer.getErrorValue()); System.out.println("StageIITrainer.getErrorValue = "+stageIITrainer.getErrorValue()); } }
--> Training Log Created in BinaryClassificationNetworkEx2.log trainer.getErrorValue = 1.4572899893203097 StageITrainer.getErrorValue = 482.27809835973795 StageIITrainer.getErrorValue = 1.4572899893203097 *********************************************** --> Cross-entropy error: 1.4572899 --> Classification error rate: 0.0020876827 *********************************************** Weights Gradients 0 2.944218 -0.000103 1 11.572133 -0.002104 2 -10.464978 0.031618 3 18.197968 -0.099090 4 26.552980 -0.007697 5 -12.948492 -0.004065 6 -6.920502 0.026458 7 12.449166 0.002808 8 -22.044311 -0.101337 9 -28.603049 -0.000001 10 11.236107 0.000069 11 2.983088 -0.000000 12 -10.165526 0.000501 13 1.947292 -0.000411 14 -19.153976 -0.000000 15 -8.400962 -0.004047 16 -12.026586 -0.000877 17 -6.175538 0.034892 18 16.752667 -0.302597 19 27.202764 -0.007699 20 7.846400 -0.000000 21 9.415102 0.000000 22 -0.717963 -0.000000 23 -22.044410 0.000000 24 -36.633994 0.000001 25 2.960843 -0.000053 26 9.591344 0.025231 27 -0.050062 0.000035 28 4.260128 0.101759 29 -11.478470 0.000000 30 -10.507361 0.000000 31 17.400312 0.024341 32 -4.365829 0.030967 33 20.318348 -0.002424 34 -40.598205 -0.007753 35 11.547888 -0.004180 36 -13.955145 0.000013 37 -12.967388 0.003914 38 -24.426023 -0.198309 39 28.236830 0.000055 40 1.006560 0.000081 41 4.508082 -0.000000 42 7.094010 0.000046 43 2.986456 -0.000105 44 -9.215039 -0.000001 45 -18.006465 -0.000063 46 6.899860 0.026472 47 -9.835696 0.002747 48 17.886021 -0.214726 49 1.824013 0.000000 50 12.255996 -0.003981 51 -2.444224 -0.002118 52 4.933440 0.032021 53 -22.233104 0.013710 54 -11.061314 -0.007698 55 6.440296 -0.000056 56 3.000394 0.000000 57 -4.155388 0.000159 58 2.669732 0.000177 59 -11.757717 0.000000 60 27.524414 -0.003981 61 55.921417 -0.000001 62 -1.707040 0.004514 63 20.846132 -0.135316 64 -7.685032 -0.000001 65 -27.369955 -0.000119 66 -40.908826 0.026472 67 -4.823267 0.030411 68 -23.581984 -0.065522 69 3.181360 -0.007697 70 1.828543 -0.000000 71 -6.090504 -0.002116 72 -0.988539 0.000002 73 0.790178 -0.000000 74 -17.522973 -0.000000 75 -25.682496 -0.003981 76 -11.943730 0.025216 77 -6.535236 0.031614 78 19.237103 0.115771 79 -23.303947 -0.007697 80 20.785871 -0.000061 81 -0.398901 -0.000875 82 4.220983 0.002898 83 -21.805541 -0.316308 84 5.385592 0.000000 85 7.992170 -0.000058 86 20.561026 0.000014 87 -5.836442 0.000415 88 -0.392213 -0.000302 89 -3.883298 -0.000001 90 -6.908613 -0.004065 91 10.546036 0.001241 92 -39.677236 -0.000000 93 18.095781 0.056472 94 9.597760 0.000000 95 3.294999 -0.000034 96 -21.179800 -0.002104 97 31.118283 0.034927 98 -22.059079 -0.358786 99 8.161085 -0.007698 100 5.313321 -0.000000 101 19.102103 0.025217 102 0.740958 -0.000000 103 2.578075 0.101476 104 -38.607568 0.000000 105 -8.339988 -0.000035 106 1.639758 0.000013 107 -3.726085 0.034889 108 15.554956 -0.422305 109 42.980892 -0.007752 110 2.181836 -0.004065 111 4.443285 0.001241 112 2.587092 0.000036 113 -22.489842 0.119719 114 -36.432494 -0.000000 115 8.075659 -0.000000 116 0.979978 0.023101 117 -7.488272 0.000002 118 5.319442 0.101748 119 -26.819508 0.000054 120 3.541003 -0.004128 121 4.189919 -0.000875 122 -9.620799 0.002783 123 19.398187 -0.202984 124 -3.194663 0.000055 125 -5.228130 0.000083 126 -3.757340 0.025216 127 9.792260 0.032081 128 -21.755460 0.002231 129 -7.715387 -0.007753 130 2.769206 -0.000055 131 7.901853 0.000014 132 -10.435488 0.000062 133 1.168304 -0.000086 134 -9.954674 -0.000001 135 -51.555283 -0.071057 136 4.958954 -0.000000 137 -7.408263 0.000000 138 39.517621 -0.070072 139 13.036423 -0.000000 140 24.059711 0.000000 141 -20.165068 -0.061182 142 2.978425 0.000000 143 -3.346216 -0.000001 144 91.294581 -0.053340 145 4.700837 0.000000 146 33.962649 -0.000005 147 58.702284 -0.401485 148 3.416411 0.000000 149 4.415371 -0.002499 150 171.784942 -0.005808 151 -45.805688 -0.010427 152 12.976783 -0.010230 153 1.348388 -0.004100 154 7.967453 0.024354 155 -8.634125 0.034927 156 -1.937680 -0.200838 157 -21.314065 -0.007698 158 -58.810144 -0.445640 159 13.151796 0.000000 160 -0.728858 -0.002499 161 -56.918496 -0.010427 Forecast Expected Predicted 0 1 1 1 1 1 2 1 1 3 1 1 4 1 1 5 1 1 6 1 1 7 1 1 8 1 1 9 1 1 10 1 1 11 1 1 12 1 1 13 1 1 14 1 1 15 1 1 16 1 1 17 1 1 18 1 1 19 1 1 20 1 1 21 1 1 22 1 1 23 1 1 24 1 1 25 1 1 26 1 1 27 1 1 28 1 1 29 1 1 30 1 1 31 1 1 32 1 1 33 1 1 34 1 1 35 1 1 36 1 1 37 1 1 38 1 1 39 1 1 40 1 1 41 1 1 42 1 1 43 1 1 44 1 1 45 1 1 46 1 1 47 1 1 48 1 1 49 1 1 50 0 0 51 0 0 52 0 0 53 0 0 54 0 0 55 0 0 56 0 0 57 0 0 58 0 0 59 0 0 60 0 0 61 0 0 62 0 0 63 0 0 64 0 0 65 0 0 66 0 0 67 0 0 68 0 0 69 0 0 70 0 0 71 0 0 72 0 0 73 0 0 74 0 0 75 0 0 76 0 0 77 0 0 78 0 0 79 0 0 80 0 0 81 0 0 82 0 0 83 0 0 84 0 0 85 0 0 86 0 0 87 0 0 88 0 0 89 0 0 90 0 0 91 0 0 92 0 0 93 0 0 94 0 0 95 0 0 96 0 0 97 0 0 98 0 0 99 0 0 100 0 0 101 0 0 102 0 0 103 0 0 104 0 0 105 0 0 106 0 0 107 0 0 108 0 0 109 0 0 110 0 0 111 0 0 112 0 0 113 0 0 114 0 0 115 0 0 116 0 0 117 0 0 118 0 0 119 0 0 120 0 0 121 0 0 122 0 0 123 0 0 124 0 0 125 0 0 126 0 0 127 0 0 128 0 0 129 0 0 130 0 0 131 0 0 132 0 0 133 0 0 134 0 0 135 0 0 136 0 0 137 0 0 138 0 0 139 0 0 140 0 0 141 0 0 142 0 0 143 0 0 144 0 0 145 0 0 146 0 0 147 0 0 148 0 0 149 0 0 150 0 0 151 0 0 152 0 0 153 0 0 154 0 0 155 0 0 156 0 0 157 0 0 158 0 0 159 0 0 160 0 0 161 0 0 162 0 0 163 0 0 164 0 0 165 0 0 166 0 0 167 0 0 168 0 0 169 0 0 170 0 0 171 0 0 172 0 0 173 0 0 174 0 0 175 0 0 176 0 0 177 0 0 178 0 0 179 0 0 180 0 0 181 0 0 182 0 0 183 0 0 184 0 0 185 0 0 186 0 0 187 0 0 188 0 0 189 0 0 190 0 0 191 0 0 192 0 0 193 0 0 194 0 0 195 0 0 196 0 0 197 0 0 198 0 0 199 0 0 200 0 0 201 0 0 202 0 0 203 0 0 204 0 0 205 0 0 206 0 0 207 0 0 208 0 0 209 0 0 210 0 0 211 0 0 212 0 0 213 0 0 214 0 0 215 0 0 216 0 0 217 0 0 218 0 0 219 0 0 220 0 0 221 0 0 222 0 0 223 0 0 224 0 0 225 0 0 226 0 0 227 0 0 228 0 0 229 0 0 230 0 0 231 0 0 232 0 0 233 0 0 234 0 0 235 0 0 236 0 0 237 0 0 238 0 0 239 0 0 240 0 0 241 0 0 242 0 0 243 0 0 244 0 0 245 0 0 246 0 0 247 0 0 248 0 0 249 0 0 250 0 0 251 0 0 252 0 0 253 0 0 254 0 0 255 0 0 256 0 0 257 0 0 258 0 0 259 0 0 260 0 0 261 0 0 262 0 0 263 0 0 264 0 0 265 0 0 266 0 0 267 0 0 268 0 0 269 0 0 270 0 0 271 0 0 272 0 0 273 0 0 274 0 0 275 0 0 276 0 0 277 0 0 278 0 0 279 0 0 280 0 0 281 0 0 282 0 0 283 0 0 284 0 0 285 0 0 286 0 0 287 0 0 288 0 0 289 0 0 290 0 0 291 0 0 292 0 0 293 0 0 294 0 0 295 0 0 296 0 0 297 0 0 298 0 0 299 0 0 300 0 0 301 0 0 302 0 0 303 0 0 304 0 0 305 0 0 306 0 0 307 0 0 308 0 0 309 0 0 310 0 0 311 0 0 312 0 0 313 0 0 314 0 0 315 0 0 316 0 0 317 0 0 318 0 0 319 0 0 320 0 0 321 0 0 322 0 0 323 0 0 324 0 0 325 0 0 326 0 0 327 0 0 328 0 0 329 0 0 330 0 0 331 0 0 332 0 0 333 0 0 334 0 0 335 0 0 336 0 0 337 0 0 338 0 0 339 0 0 340 0 0 341 0 0 342 0 0 343 0 0 344 0 0 345 0 0 346 0 0 347 0 0 348 0 0 349 0 0 350 0 0 351 0 0 352 0 0 353 0 0 354 0 0 355 0 0 356 0 0 357 0 0 358 0 0 359 0 0 360 0 0 361 0 0 362 0 0 363 0 0 364 0 0 365 0 0 366 0 0 367 0 0 368 0 0 369 0 0 370 0 0 371 0 0 372 0 0 373 0 0 374 0 0 375 0 0 376 0 0 377 0 0 378 0 0 379 0 0 380 0 0 381 0 0 382 0 0 383 0 0 384 0 0 385 0 0 386 0 0 387 0 0 388 0 0 389 0 0 390 0 0 391 0 0 392 0 0 393 0 0 394 0 0 395 0 0 396 0 0 397 0 0 398 0 0 399 0 0 400 0 0 401 0 0 402 0 0 403 0 0 404 0 0 405 0 0 406 0 0 407 0 0 408 0 0 409 0 0 410 0 0 411 0 0 412 0 0 413 0 0 414 0 0 415 0 0 416 0 0 417 0 0 418 0 0 419 0 0 420 0 0 421 0 0 422 0 0 423 0 0 424 0 0 425 0 0 426 0 0 427 0 0 428 0 0 429 0 0 430 0 0 431 0 0 432 0 0 433 0 0 434 0 0 435 0 0 436 0 0 437 0 0 438 0 0 439 0 0 440 0 0 441 0 0 442 0 0 443 0 0 444 0 0 445 0 0 446 0 0 447 0 0 448 0 0 449 0 0 450 0 0 451 0 0 452 0 0 453 0 0 454 0 0 455 0 0 456 0 0 457 0 0 458 0 0 459 0 0 460 0 0 461 0 0 462 0 0 463 0 0 464 0 0 465 0 0 466 0 0 467 0 0 468 0 0 469 0 0 470 0 0 471 0 0 472 0 0 473 0 0 474 0 0 475 0 0 476 0 0 477 0 0 478 0 0 479 0 0 480 0 0 481 0 0 482 0 0 483 0 0 484 0 0 485 0 0 486 0 0 487 0 0 488 0 0 489 0 0 490 0 0 491 0 0 492 0 0 493 0 0 494 0 0 495 0 0 496 0 0 497 0 0 498 0 0 499 0 0 500 0 0 501 0 0 502 0 0 503 0 0 504 0 0 505 0 0 506 0 0 507 0 0 508 0 0 509 0 0 510 0 0 511 0 0 512 0 0 513 0 0 514 0 0 515 0 0 516 0 0 517 0 0 518 0 0 519 0 0 520 0 0 521 0 0 522 0 0 523 0 0 524 0 0 525 0 0 526 0 0 527 0 0 528 0 0 529 0 0 530 0 0 531 0 0 532 0 0 533 0 0 534 0 0 535 0 0 536 0 0 537 0 0 538 0 0 539 0 0 540 0 0 541 0 0 542 0 0 543 0 0 544 0 0 545 0 0 546 0 0 547 0 0 548 0 0 549 0 0 550 0 0 551 0 0 552 0 0 553 0 0 554 0 0 555 0 0 556 0 0 557 0 0 558 0 0 559 0 0 560 0 0 561 0 0 562 0 0 563 0 0 564 0 0 565 0 0 566 0 0 567 0 0 568 0 0 569 0 0 570 0 0 571 0 0 572 0 0 573 0 0 574 0 0 575 0 0 576 0 0 577 0 0 578 0 0 579 0 0 580 0 0 581 0 0 582 0 0 583 0 0 584 0 0 585 0 0 586 0 0 587 0 0 588 0 0 589 0 0 590 0 0 591 0 0 592 0 0 593 0 0 594 0 0 595 0 0 596 0 0 597 0 0 598 0 0 599 0 0 600 0 0 601 0 0 602 0 0 603 0 0 604 0 0 605 0 0 606 0 0 607 0 0 608 0 0 609 0 0 610 0 0 611 0 0 612 0 0 613 0 0 614 0 0 615 0 0 616 0 0 617 0 0 618 0 0 619 0 0 620 0 0 621 0 0 622 0 0 623 0 0 624 0 0 625 0 0 626 0 0 627 0 0 628 0 0 629 0 0 630 0 0 631 0 0 632 0 0 633 0 0 634 0 0 635 0 0 636 0 0 637 0 0 638 0 0 639 0 0 640 0 0 641 0 0 642 0 0 643 0 0 644 0 0 645 0 0 646 0 0 647 0 0 648 0 0 649 0 0 650 0 0 651 0 0 652 0 0 653 0 0 654 0 0 655 0 0 656 0 0 657 0 0 658 0 0 659 0 0 660 0 0 661 0 0 662 0 0 663 0 0 664 0 0 665 0 0 666 0 0 667 0 0 668 0 0 669 0 0 670 0 0 671 0 0 672 0 0 673 0 0 674 0 0 675 0 0 676 0 0 677 0 0 678 0 0 679 0 0 680 0 0 681 0 0 682 0 0 683 0 0 684 0 0 685 0 0 686 0 0 687 0 0 688 0 0 689 0 0 690 0 0 691 0 0 692 0 0 693 0 0 694 0 0 695 0 0 696 0 0 697 0 0 698 0 0 699 0 0 700 0 0 701 0 0 702 0 0 703 0 0 704 0 0 705 0 0 706 0 0 707 0 0 708 0 0 709 0 0 710 0 0 711 0 0 712 0 0 713 0 0 714 0 0 715 0 0 716 0 0 717 0 0 718 0 0 719 0 0 720 0 0 721 0 0 722 0 0 723 0 0 724 0 0 725 0 0 726 0 0 727 0 0 728 0 0 729 0 0 730 0 0 731 0 0 732 0 0 733 0 0 734 0 0 735 0 0 736 0 0 737 0 0 738 0 0 739 0 0 740 0 0 741 0 0 742 0 0 743 0 0 744 0 0 745 0 0 746 0 0 747 0 0 748 0 0 749 0 0 750 0 0 751 0 0 752 0 0 753 0 0 754 0 0 755 0 0 756 0 0 757 0 0 758 0 0 759 0 0 760 0 0 761 0 0 762 0 0 763 0 0 764 0 0 765 0 0 766 0 0 767 0 0 768 0 0 769 0 0 770 0 0 771 0 0 772 0 0 773 0 0 774 0 0 775 0 0 776 0 0 777 0 0 778 0 0 779 0 0 780 0 0 781 0 0 782 0 0 783 0 0 784 0 0 785 0 0 786 0 0 787 0 0 788 0 0 789 0 0 790 0 0 791 0 0 792 0 0 793 0 0 794 0 0 795 0 0 796 0 0 797 0 0 798 0 0 799 0 0 800 0 0 801 0 0 802 0 0 803 0 0 804 0 0 805 0 0 806 0 0 807 0 0 808 0 0 809 0 0 810 0 0 811 0 0 812 0 0 813 0 0 814 0 0 815 0 0 816 0 0 817 0 0 818 0 0 819 0 0 820 0 0 821 0 0 822 0 0 823 0 0 824 0 0 825 0 0 826 0 0 827 0 0 828 0 0 829 0 0 830 0 0 831 0 0 832 0 0 833 0 0 834 0 0 835 0 0 836 0 0 837 0 0 838 0 0 839 0 0 840 0 0 841 0 0 842 0 0 843 0 0 844 0 0 845 0 0 846 0 0 847 0 0 848 0 0 849 0 0 850 0 0 851 0 0 852 0 0 853 0 0 854 0 0 855 0 0 856 0 0 857 0 0 858 0 0 859 0 0 860 0 0 861 0 0 862 0 0 863 0 0 864 0 0 865 0 0 866 0 0 867 0 0 868 0 0 869 0 0 870 0 0 871 0 0 872 0 0 873 0 0 874 0 0 875 0 0 876 0 0 877 0 0 878 0 0 879 0 0 880 0 0 881 0 0 882 0 0 883 0 0 884 0 0 885 0 0 886 0 0 887 0 0 888 0 0 889 0 0 890 0 0 891 0 0 892 0 0 893 0 0 894 0 0 895 0 0 896 0 0 897 0 0 898 0 0 899 0 0 900 0 0 901 0 0 902 0 0 903 0 0 904 0 0 905 0 0 906 0 0 907 0 0 908 0 0 909 0 0 910 0 0 911 0 0 912 0 0 913 0 0 914 0 0 915 0 0 916 0 0 917 0 0 918 0 0 919 0 0 920 0 0 921 0 0 922 0 0 923 0 0 924 0 0 925 0 0 926 0 0 927 0 0 928 0 0 929 0 0 930 0 0 931 0 0 932 0 0 933 0 0 934 0 0 935 0 0 936 0 0 937 0 0 938 0 0 939 0 0 940 0 0 941 0 0 942 0 0 943 0 0 944 0 0 945 0 0 946 0 0 947 0 0 948 0 0 949 0 0 950 0 0 951 0 0 952 0 0 953 0 0 954 0 0 955 0 0 956 0 0 957 0 0 ****************Time: 10.157 trainer.getErrorValue = 1.4572899893203097 StageITrainer.getErrorValue = 482.27809835973795 StageIITrainer.getErrorValue = 1.4572899893203097Link to Java source.