Switch to side-by-side view

--- a
+++ b/combinedDeepLearningActiveContour/functions/mrTrain.m
@@ -0,0 +1,47 @@
+function [softmaxModel] = mrTrain(inputSize, outSize, lambda, inputData, labels, options)
+%softmaxTrain Train a softmax model with the given parameters on the given
+% data. Returns softmaxOptTheta, a vector containing the trained parameters
+% for the model.
+%
+% inputSize: the size of an input vector x^(i)
+% numClasses: the number of classes 
+% lambda: weight decay parameter
+% inputData: an N by M matrix containing the input data, such that
+%            inputData(:, c) is the cth input
+% labels: M by 1 matrix containing the class labels for the
+%            corresponding inputs. labels(c) is the class label for
+%            the cth input
+% options (optional): options
+%   options.maxIter: number of iterations to train for
+
+if ~exist('options', 'var')
+    options = struct;
+end
+
+if ~isfield(options, 'maxIter')
+    options.maxIter = 400;
+end
+
+% initialize parameters
+theta = 0.005 * randn(outSize * inputSize, 1);
+
+% Use minFunc to minimize the function
+addpath minFunc/
+options.Method = 'lbfgs'; % Here, we use L-BFGS to optimize our cost
+                          % function. Generally, for minFunc to work, you
+                          % need a function pointer with two outputs: the
+                          % function value and the gradient. In our problem,
+                          % softmaxCost.m satisfies this.
+options.display = 'on';
+
+[softmaxOptTheta, cost] = minFunc( @(p) mrCost(p, ...
+                                   outSize, inputSize, lambda, ...
+                                   inputData, labels), ...                                   
+                              theta, options);
+
+% Fold softmaxOptTheta into a nicer format
+softmaxModel.optTheta = reshape(softmaxOptTheta, outSize, inputSize);
+softmaxModel.inputSize = inputSize;
+softmaxModel.numClasses = outSize;
+                          
+end