GridSearchCreateTInput, TOutput, TRange, TModel, TLearner Method (TRange, CreateLearnerFromParameterTLearner, TRange, ComputeLossTOutput, TModel, LearnNewModelTLearner, TInput, TOutput, TModel, TInput, TOutput) |
Namespace: Accord.MachineLearning.Performance
public static GridSearch<TModel, TRange, TLearner, TInput, TOutput> Create<TInput, TOutput, TRange, TModel, TLearner>( TRange ranges, CreateLearnerFromParameter<TLearner, TRange> learner, ComputeLoss<TOutput, TModel> loss, LearnNewModel<TLearner, TInput, TOutput, TModel> fit, TInput[] x, TOutput[] y ) where TModel : class, Object, ITransform<TInput, TOutput> where TLearner : Object, ISupervisedLearning<TModel, TInput, TOutput>
// Ensure results are reproducible Accord.Math.Random.Generator.Seed = 0; // This is a sample code showing how to use Grid-Search in combination with // Cross-Validation to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying to learn a XOR // problem and see how well does SVMs perform on this data. double[][] inputs = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Grid-Search with Cross-Validation algorithm. Even though the // generic, strongly-typed approach used accross the framework is most of the // time easier to handle, meta-algorithms such as grid-search can be a bit hard // to setup. For this reason. the framework offers a specialized method for it: var gridsearch = GridSearch<double[], int>.Create( // Here we can specify the range of the parameters to be included in the search ranges: new { Kernel = GridSearch.Values<IKernel>(new Linear(), new ChiSquare(), new Gaussian(), new Sigmoid()), Complexity = GridSearch.Values(0.00000001, 5.20, 0.30, 0.50), Tolerance = GridSearch.Range(1e-10, 1.0, stepSize: 0.05) }, // Indicate how learning algorithms for the models should be created learner: (p) => new SequentialMinimalOptimization<IKernel> { Complexity = p.Complexity, Kernel = p.Kernel.Value, Tolerance = p.Tolerance }, // Define how the model should be learned, if needed fit: (teacher, x, y, w) => teacher.Learn(x, y, w), // Define how the performance of the models should be measured loss: (actual, expected, m) => new ZeroOneLoss(expected).Loss(actual) ); // If needed, control the degree of CPU parallelization gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Search for the best model parameters var result = gridsearch.Learn(inputs, xor); // Get the best SVM: SupportVectorMachine<IKernel> svm = result.BestModel; // Estimate its error: double bestError = result.BestModelError; // Get the best values for the parameters: double bestC = result.BestParameters.Complexity; double bestTolerance = result.BestParameters.Tolerance; IKernel bestKernel = result.BestParameters.Kernel.Value;