core/vnl/vnl_least_squares_cost_function.cxx
Go to the documentation of this file.
00001 // This is core/vnl/vnl_least_squares_cost_function.cxx
00002 #ifdef VCL_NEEDS_PRAGMA_INTERFACE
00003 #pragma implementation
00004 #endif
00005 //
00006 // vnl_least_squares_cost_function
00007 // Author: Andrew W. Fitzgibbon, Oxford RRG
00008 // Created: 20 Aug 99
00009 //
00010 //-----------------------------------------------------------------------------
00011 
00012 #include "vnl_least_squares_cost_function.h"
00013 
00014 vnl_least_squares_cost_function::vnl_least_squares_cost_function(vnl_least_squares_function* func):
00015   vnl_cost_function(func->get_number_of_unknowns()),
00016   storage_(func->get_number_of_residuals()),
00017   jacobian_(func->get_number_of_residuals(), func->get_number_of_unknowns()),
00018   f_(func)
00019 {
00020 }
00021 
00022 double vnl_least_squares_cost_function::f(const vnl_vector<double>& x)
00023 {
00024   f_->f(x, storage_);
00025   return storage_.squared_magnitude();
00026 }
00027 
00028 void vnl_least_squares_cost_function::gradf(const vnl_vector<double>& x, vnl_vector<double>& gradient)
00029 {
00030   // residuals = a, b, c, ...
00031   // params = x, y, z, ...
00032   // f = a^2 + b^2 + c^2 + ...
00033   // df/dx = 2a*da/dx + 2b*db/dx + ...
00034 
00035   if (f_->has_gradient()) {
00036     f_->f(x,storage_);
00037     f_->gradf(x, jacobian_);
00038     for (unsigned int c=0; c<jacobian_.columns(); ++c) {
00039       gradient[c] = 0.0;
00040       for (unsigned int r=0; r<jacobian_.rows(); ++r)
00041         gradient[c] += storage_[r] * jacobian_(r,c);
00042       gradient[c] *= 2;
00043     }
00044   }
00045 }