Actual source code: brgn.c
petsc-3.15.0 2021-03-30
1: #include <../src/tao/leastsquares/impls/brgn/brgn.h>
3: #define BRGN_REGULARIZATION_USER 0
4: #define BRGN_REGULARIZATION_L2PROX 1
5: #define BRGN_REGULARIZATION_L2PURE 2
6: #define BRGN_REGULARIZATION_L1DICT 3
7: #define BRGN_REGULARIZATION_LM 4
8: #define BRGN_REGULARIZATION_TYPES 5
10: static const char *BRGN_REGULARIZATION_TABLE[64] = {"user","l2prox","l2pure","l1dict","lm"};
12: static PetscErrorCode GNHessianProd(Mat H,Vec in,Vec out)
13: {
14: TAO_BRGN *gn;
15: PetscErrorCode ierr;
18: MatShellGetContext(H,&gn);
19: MatMult(gn->subsolver->ls_jac,in,gn->r_work);
20: MatMultTranspose(gn->subsolver->ls_jac,gn->r_work,out);
21: switch (gn->reg_type) {
22: case BRGN_REGULARIZATION_USER:
23: MatMult(gn->Hreg,in,gn->x_work);
24: VecAXPY(out,gn->lambda,gn->x_work);
25: break;
26: case BRGN_REGULARIZATION_L2PURE:
27: VecAXPY(out,gn->lambda,in);
28: break;
29: case BRGN_REGULARIZATION_L2PROX:
30: VecAXPY(out,gn->lambda,in);
31: break;
32: case BRGN_REGULARIZATION_L1DICT:
33: /* out = out + lambda*D'*(diag.*(D*in)) */
34: if (gn->D) {
35: MatMult(gn->D,in,gn->y);/* y = D*in */
36: } else {
37: VecCopy(in,gn->y);
38: }
39: VecPointwiseMult(gn->y_work,gn->diag,gn->y); /* y_work = diag.*(D*in), where diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3 */
40: if (gn->D) {
41: MatMultTranspose(gn->D,gn->y_work,gn->x_work); /* x_work = D'*(diag.*(D*in)) */
42: } else {
43: VecCopy(gn->y_work,gn->x_work);
44: }
45: VecAXPY(out,gn->lambda,gn->x_work);
46: break;
47: case BRGN_REGULARIZATION_LM:
48: VecPointwiseMult(gn->x_work,gn->damping,in);
49: VecAXPY(out,1,gn->x_work);
50: break;
51: }
52: return(0);
53: }
54: static PetscErrorCode ComputeDamping(TAO_BRGN *gn)
55: {
56: const PetscScalar *diag_ary;
57: PetscScalar *damping_ary;
58: PetscInt i,n;
59: PetscErrorCode ierr;
62: /* update damping */
63: VecGetArray(gn->damping,&damping_ary);
64: VecGetArrayRead(gn->diag,&diag_ary);
65: VecGetLocalSize(gn->damping,&n);
66: for (i=0; i<n; i++) {
67: damping_ary[i] = PetscClipInterval(diag_ary[i],PETSC_SQRT_MACHINE_EPSILON,PetscSqrtReal(PETSC_MAX_REAL));
68: }
69: VecScale(gn->damping,gn->lambda);
70: VecRestoreArray(gn->damping,&damping_ary);
71: VecRestoreArrayRead(gn->diag,&diag_ary);
72: return(0);
73: }
75: PetscErrorCode TaoBRGNGetDampingVector(Tao tao,Vec *d)
76: {
77: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
80: if (gn->reg_type != BRGN_REGULARIZATION_LM) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_SUP,"Damping vector is only available if regularization type is lm.");
81: *d = gn->damping;
82: return(0);
83: }
85: static PetscErrorCode GNObjectiveGradientEval(Tao tao,Vec X,PetscReal *fcn,Vec G,void *ptr)
86: {
87: TAO_BRGN *gn = (TAO_BRGN *)ptr;
88: PetscInt K; /* dimension of D*X */
89: PetscScalar yESum;
90: PetscErrorCode ierr;
91: PetscReal f_reg;
94: /* compute objective *fcn*/
95: /* compute first term 0.5*||ls_res||_2^2 */
96: TaoComputeResidual(tao,X,tao->ls_res);
97: VecDot(tao->ls_res,tao->ls_res,fcn);
98: *fcn *= 0.5;
99: /* compute gradient G */
100: TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
101: MatMultTranspose(tao->ls_jac,tao->ls_res,G);
102: /* add the regularization contribution */
103: switch (gn->reg_type) {
104: case BRGN_REGULARIZATION_USER:
105: (*gn->regularizerobjandgrad)(tao,X,&f_reg,gn->x_work,gn->reg_obj_ctx);
106: *fcn += gn->lambda*f_reg;
107: VecAXPY(G,gn->lambda,gn->x_work);
108: break;
109: case BRGN_REGULARIZATION_L2PURE:
110: /* compute f = f + lambda*0.5*xk'*xk */
111: VecDot(X,X,&f_reg);
112: *fcn += gn->lambda*0.5*f_reg;
113: /* compute G = G + lambda*xk */
114: VecAXPY(G,gn->lambda,X);
115: break;
116: case BRGN_REGULARIZATION_L2PROX:
117: /* compute f = f + lambda*0.5*(xk - xkm1)'*(xk - xkm1) */
118: VecAXPBYPCZ(gn->x_work,1.0,-1.0,0.0,X,gn->x_old);
119: VecDot(gn->x_work,gn->x_work,&f_reg);
120: *fcn += gn->lambda*0.5*f_reg;
121: /* compute G = G + lambda*(xk - xkm1) */
122: VecAXPBYPCZ(G,gn->lambda,-gn->lambda,1.0,X,gn->x_old);
123: break;
124: case BRGN_REGULARIZATION_L1DICT:
125: /* compute f = f + lambda*sum(sqrt(y.^2+epsilon^2) - epsilon), where y = D*x*/
126: if (gn->D) {
127: MatMult(gn->D,X,gn->y);/* y = D*x */
128: } else {
129: VecCopy(X,gn->y);
130: }
131: VecPointwiseMult(gn->y_work,gn->y,gn->y);
132: VecShift(gn->y_work,gn->epsilon*gn->epsilon);
133: VecSqrtAbs(gn->y_work); /* gn->y_work = sqrt(y.^2+epsilon^2) */
134: VecSum(gn->y_work,&yESum);
135: VecGetSize(gn->y,&K);
136: *fcn += gn->lambda*(yESum - K*gn->epsilon);
137: /* compute G = G + lambda*D'*(y./sqrt(y.^2+epsilon^2)),where y = D*x */
138: VecPointwiseDivide(gn->y_work,gn->y,gn->y_work); /* reuse y_work = y./sqrt(y.^2+epsilon^2) */
139: if (gn->D) {
140: MatMultTranspose(gn->D,gn->y_work,gn->x_work);
141: } else {
142: VecCopy(gn->y_work,gn->x_work);
143: }
144: VecAXPY(G,gn->lambda,gn->x_work);
145: break;
146: }
147: return(0);
148: }
150: static PetscErrorCode GNComputeHessian(Tao tao,Vec X,Mat H,Mat Hpre,void *ptr)
151: {
152: TAO_BRGN *gn = (TAO_BRGN *)ptr;
153: PetscInt i,n,cstart,cend;
154: PetscScalar *cnorms,*diag_ary;
158: TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
159: if (gn->mat_explicit) {
160: MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_REUSE_MATRIX, PETSC_DEFAULT, &gn->H);
161: }
163: switch (gn->reg_type) {
164: case BRGN_REGULARIZATION_USER:
165: (*gn->regularizerhessian)(tao,X,gn->Hreg,gn->reg_hess_ctx);
166: if (gn->mat_explicit) {
167: MatAXPY(gn->H, 1.0, gn->Hreg, DIFFERENT_NONZERO_PATTERN);
168: }
169: break;
170: case BRGN_REGULARIZATION_L2PURE:
171: if (gn->mat_explicit) {
172: MatShift(gn->H, gn->lambda);
173: }
174: break;
175: case BRGN_REGULARIZATION_L2PROX:
176: if (gn->mat_explicit) {
177: MatShift(gn->H, gn->lambda);
178: }
179: break;
180: case BRGN_REGULARIZATION_L1DICT:
181: /* calculate and store diagonal matrix as a vector: diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3* --> diag = epsilon^2 ./ sqrt(y.^2+epsilon^2).^3,where y = D*x */
182: if (gn->D) {
183: MatMult(gn->D,X,gn->y);/* y = D*x */
184: } else {
185: VecCopy(X,gn->y);
186: }
187: VecPointwiseMult(gn->y_work,gn->y,gn->y);
188: VecShift(gn->y_work,gn->epsilon*gn->epsilon);
189: VecCopy(gn->y_work,gn->diag); /* gn->diag = y.^2+epsilon^2 */
190: VecSqrtAbs(gn->y_work); /* gn->y_work = sqrt(y.^2+epsilon^2) */
191: VecPointwiseMult(gn->diag,gn->y_work,gn->diag);/* gn->diag = sqrt(y.^2+epsilon^2).^3 */
192: VecReciprocal(gn->diag);
193: VecScale(gn->diag,gn->epsilon*gn->epsilon);
194: if (gn->mat_explicit) {
195: MatDiagonalSet(gn->H, gn->diag, ADD_VALUES);
196: }
197: break;
198: case BRGN_REGULARIZATION_LM:
199: /* compute diagonal of J^T J */
200: MatGetSize(gn->parent->ls_jac,NULL,&n);
201: PetscMalloc1(n,&cnorms);
202: MatGetColumnNorms(gn->parent->ls_jac,NORM_2,cnorms);
203: MatGetOwnershipRangeColumn(gn->parent->ls_jac,&cstart,&cend);
204: VecGetArray(gn->diag,&diag_ary);
205: for (i = 0; i < cend-cstart; i++) {
206: diag_ary[i] = cnorms[cstart+i] * cnorms[cstart+i];
207: }
208: VecRestoreArray(gn->diag,&diag_ary);
209: PetscFree(cnorms);
210: ComputeDamping(gn);
211: if (gn->mat_explicit) {
212: MatDiagonalSet(gn->H, gn->damping, ADD_VALUES);
213: }
214: break;
215: }
216: return(0);
217: }
219: static PetscErrorCode GNHookFunction(Tao tao,PetscInt iter, void *ctx)
220: {
221: TAO_BRGN *gn = (TAO_BRGN *)ctx;
222: PetscErrorCode ierr;
225: /* Update basic tao information from the subsolver */
226: gn->parent->nfuncs = tao->nfuncs;
227: gn->parent->ngrads = tao->ngrads;
228: gn->parent->nfuncgrads = tao->nfuncgrads;
229: gn->parent->nhess = tao->nhess;
230: gn->parent->niter = tao->niter;
231: gn->parent->ksp_its = tao->ksp_its;
232: gn->parent->ksp_tot_its = tao->ksp_tot_its;
233: gn->parent->fc = tao->fc;
234: TaoGetConvergedReason(tao,&gn->parent->reason);
235: /* Update the solution vectors */
236: if (iter == 0) {
237: VecSet(gn->x_old,0.0);
238: } else {
239: VecCopy(tao->solution,gn->x_old);
240: VecCopy(tao->solution,gn->parent->solution);
241: }
242: /* Update the gradient */
243: VecCopy(tao->gradient,gn->parent->gradient);
245: /* Update damping parameter for LM */
246: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
247: if (iter > 0) {
248: if (gn->fc_old > tao->fc) {
249: gn->lambda = gn->lambda * gn->downhill_lambda_change;
250: } else {
251: /* uphill step */
252: gn->lambda = gn->lambda * gn->uphill_lambda_change;
253: }
254: }
255: gn->fc_old = tao->fc;
256: }
258: /* Call general purpose update function */
259: if (gn->parent->ops->update) {
260: (*gn->parent->ops->update)(gn->parent,gn->parent->niter,gn->parent->user_update);
261: }
262: return(0);
263: }
265: static PetscErrorCode TaoSolve_BRGN(Tao tao)
266: {
267: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
268: PetscErrorCode ierr;
271: TaoSolve(gn->subsolver);
272: /* Update basic tao information from the subsolver */
273: tao->nfuncs = gn->subsolver->nfuncs;
274: tao->ngrads = gn->subsolver->ngrads;
275: tao->nfuncgrads = gn->subsolver->nfuncgrads;
276: tao->nhess = gn->subsolver->nhess;
277: tao->niter = gn->subsolver->niter;
278: tao->ksp_its = gn->subsolver->ksp_its;
279: tao->ksp_tot_its = gn->subsolver->ksp_tot_its;
280: TaoGetConvergedReason(gn->subsolver,&tao->reason);
281: /* Update vectors */
282: VecCopy(gn->subsolver->solution,tao->solution);
283: VecCopy(gn->subsolver->gradient,tao->gradient);
284: return(0);
285: }
287: static PetscErrorCode TaoSetFromOptions_BRGN(PetscOptionItems *PetscOptionsObject,Tao tao)
288: {
289: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
290: TaoLineSearch ls;
291: PetscErrorCode ierr;
294: PetscOptionsHead(PetscOptionsObject,"least-squares problems with regularizer: ||f(x)||^2 + lambda*g(x), g(x) = ||xk-xkm1||^2 or ||Dx||_1 or user defined function.");
295: PetscOptionsBool("-tao_brgn_mat_explicit","switches the Hessian construction to be an explicit matrix rather than MATSHELL","",gn->mat_explicit,&gn->mat_explicit,NULL);
296: PetscOptionsReal("-tao_brgn_regularizer_weight","regularizer weight (default 1e-4)","",gn->lambda,&gn->lambda,NULL);
297: PetscOptionsReal("-tao_brgn_l1_smooth_epsilon","L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)","",gn->epsilon,&gn->epsilon,NULL);
298: PetscOptionsReal("-tao_brgn_lm_downhill_lambda_change","Factor to decrease trust region by on downhill steps","",gn->downhill_lambda_change,&gn->downhill_lambda_change,NULL);
299: PetscOptionsReal("-tao_brgn_lm_uphill_lambda_change","Factor to increase trust region by on uphill steps","",gn->uphill_lambda_change,&gn->uphill_lambda_change,NULL);
300: PetscOptionsEList("-tao_brgn_regularization_type","regularization type", "",BRGN_REGULARIZATION_TABLE,BRGN_REGULARIZATION_TYPES,BRGN_REGULARIZATION_TABLE[gn->reg_type],&gn->reg_type,NULL);
301: PetscOptionsTail();
302: /* set unit line search direction as the default when using the lm regularizer */
303: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
304: TaoGetLineSearch(gn->subsolver,&ls);
305: TaoLineSearchSetType(ls,TAOLINESEARCHUNIT);
306: }
307: TaoSetFromOptions(gn->subsolver);
308: return(0);
309: }
311: static PetscErrorCode TaoView_BRGN(Tao tao,PetscViewer viewer)
312: {
313: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
314: PetscErrorCode ierr;
317: PetscViewerASCIIPushTab(viewer);
318: TaoView(gn->subsolver,viewer);
319: PetscViewerASCIIPopTab(viewer);
320: return(0);
321: }
323: static PetscErrorCode TaoSetUp_BRGN(Tao tao)
324: {
325: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
326: PetscErrorCode ierr;
327: PetscBool is_bnls,is_bntr,is_bntl;
328: PetscInt i,n,N,K; /* dict has size K*N*/
331: if (!tao->ls_res) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ORDER,"TaoSetResidualRoutine() must be called before setup!");
332: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNLS,&is_bnls);
333: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTR,&is_bntr);
334: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTL,&is_bntl);
335: if ((is_bnls || is_bntr || is_bntl) && !tao->ls_jac) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ORDER,"TaoSetResidualJacobianRoutine() must be called before setup!");
336: if (!tao->gradient) {
337: VecDuplicate(tao->solution,&tao->gradient);
338: }
339: if (!gn->x_work) {
340: VecDuplicate(tao->solution,&gn->x_work);
341: }
342: if (!gn->r_work) {
343: VecDuplicate(tao->ls_res,&gn->r_work);
344: }
345: if (!gn->x_old) {
346: VecDuplicate(tao->solution,&gn->x_old);
347: VecSet(gn->x_old,0.0);
348: }
350: if (BRGN_REGULARIZATION_L1DICT == gn->reg_type) {
351: if (gn->D) {
352: MatGetSize(gn->D,&K,&N); /* Shell matrices still must have sizes defined. K = N for identity matrix, K=N-1 or N for gradient matrix */
353: } else {
354: VecGetSize(tao->solution,&K); /* If user does not setup dict matrix, use identiy matrix, K=N */
355: }
356: if (!gn->y) {
357: VecCreate(PETSC_COMM_SELF,&gn->y);
358: VecSetSizes(gn->y,PETSC_DECIDE,K);
359: VecSetFromOptions(gn->y);
360: VecSet(gn->y,0.0);
362: }
363: if (!gn->y_work) {
364: VecDuplicate(gn->y,&gn->y_work);
365: }
366: if (!gn->diag) {
367: VecDuplicate(gn->y,&gn->diag);
368: VecSet(gn->diag,0.0);
369: }
370: }
371: if (BRGN_REGULARIZATION_LM == gn->reg_type) {
372: if (!gn->diag) {
373: MatCreateVecs(tao->ls_jac,&gn->diag,NULL);
374: }
375: if (!gn->damping) {
376: MatCreateVecs(tao->ls_jac,&gn->damping,NULL);
377: }
378: }
380: if (!tao->setupcalled) {
381: /* Hessian setup */
382: if (gn->mat_explicit) {
383: TaoComputeResidualJacobian(tao,tao->solution,tao->ls_jac,tao->ls_jac_pre);
384: MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &gn->H);
385: } else {
386: VecGetLocalSize(tao->solution,&n);
387: VecGetSize(tao->solution,&N);
388: MatCreate(PetscObjectComm((PetscObject)tao),&gn->H);
389: MatSetSizes(gn->H,n,n,N,N);
390: MatSetType(gn->H,MATSHELL);
391: MatSetOption(gn->H, MAT_SYMMETRIC, PETSC_TRUE);
392: MatShellSetOperation(gn->H,MATOP_MULT,(void (*)(void))GNHessianProd);
393: MatShellSetContext(gn->H,(void*)gn);
394: }
395: MatSetUp(gn->H);
396: /* Subsolver setup,include initial vector and dicttionary D */
397: TaoSetUpdate(gn->subsolver,GNHookFunction,(void*)gn);
398: TaoSetInitialVector(gn->subsolver,tao->solution);
399: if (tao->bounded) {
400: TaoSetVariableBounds(gn->subsolver,tao->XL,tao->XU);
401: }
402: TaoSetResidualRoutine(gn->subsolver,tao->ls_res,tao->ops->computeresidual,tao->user_lsresP);
403: TaoSetJacobianResidualRoutine(gn->subsolver,tao->ls_jac,tao->ls_jac,tao->ops->computeresidualjacobian,tao->user_lsjacP);
404: TaoSetObjectiveAndGradientRoutine(gn->subsolver,GNObjectiveGradientEval,(void*)gn);
405: TaoSetHessianRoutine(gn->subsolver,gn->H,gn->H,GNComputeHessian,(void*)gn);
406: /* Propagate some options down */
407: TaoSetTolerances(gn->subsolver,tao->gatol,tao->grtol,tao->gttol);
408: TaoSetMaximumIterations(gn->subsolver,tao->max_it);
409: TaoSetMaximumFunctionEvaluations(gn->subsolver,tao->max_funcs);
410: for (i=0; i<tao->numbermonitors; ++i) {
411: TaoSetMonitor(gn->subsolver,tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i]);
412: PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
413: }
414: TaoSetUp(gn->subsolver);
415: }
416: return(0);
417: }
419: static PetscErrorCode TaoDestroy_BRGN(Tao tao)
420: {
421: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
422: PetscErrorCode ierr;
425: if (tao->setupcalled) {
426: VecDestroy(&tao->gradient);
427: VecDestroy(&gn->x_work);
428: VecDestroy(&gn->r_work);
429: VecDestroy(&gn->x_old);
430: VecDestroy(&gn->diag);
431: VecDestroy(&gn->y);
432: VecDestroy(&gn->y_work);
433: }
434: VecDestroy(&gn->damping);
435: VecDestroy(&gn->diag);
436: MatDestroy(&gn->H);
437: MatDestroy(&gn->D);
438: MatDestroy(&gn->Hreg);
439: TaoDestroy(&gn->subsolver);
440: gn->parent = NULL;
441: PetscFree(tao->data);
442: return(0);
443: }
445: /*MC
446: TAOBRGN - Bounded Regularized Gauss-Newton method for solving nonlinear least-squares
447: problems with bound constraints. This algorithm is a thin wrapper around TAOBNTL
448: that constructs the Gauss-Newton problem with the user-provided least-squares
449: residual and Jacobian. The algorithm offers an L2-norm ("l2pure"), L2-norm proximal point ("l2prox")
450: regularizer, and L1-norm dictionary regularizer ("l1dict"), where we approximate the
451: L1-norm ||x||_1 by sum_i(sqrt(x_i^2+epsilon^2)-epsilon) with a small positive number epsilon.
452: Also offered is the "lm" regularizer which uses a scaled diagonal of J^T J.
453: With the "lm" regularizer, BRGN is a Levenberg-Marquardt optimizer.
454: The user can also provide own regularization function.
456: Options Database Keys:
457: + -tao_brgn_regularization_type - regularization type ("user", "l2prox", "l2pure", "l1dict", "lm") (default "l2prox")
458: . -tao_brgn_regularizer_weight - regularizer weight (default 1e-4)
459: - -tao_brgn_l1_smooth_epsilon - L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)
461: Level: beginner
462: M*/
463: PETSC_EXTERN PetscErrorCode TaoCreate_BRGN(Tao tao)
464: {
465: TAO_BRGN *gn;
469: PetscNewLog(tao,&gn);
471: tao->ops->destroy = TaoDestroy_BRGN;
472: tao->ops->setup = TaoSetUp_BRGN;
473: tao->ops->setfromoptions = TaoSetFromOptions_BRGN;
474: tao->ops->view = TaoView_BRGN;
475: tao->ops->solve = TaoSolve_BRGN;
477: tao->data = (void*)gn;
478: gn->reg_type = BRGN_REGULARIZATION_L2PROX;
479: gn->lambda = 1e-4;
480: gn->epsilon = 1e-6;
481: gn->downhill_lambda_change = 1./5.;
482: gn->uphill_lambda_change = 1.5;
483: gn->parent = tao;
485: TaoCreate(PetscObjectComm((PetscObject)tao),&gn->subsolver);
486: TaoSetType(gn->subsolver,TAOBNLS);
487: TaoSetOptionsPrefix(gn->subsolver,"tao_brgn_subsolver_");
488: return(0);
489: }
491: /*@
492: TaoBRGNGetSubsolver - Get the pointer to the subsolver inside BRGN
494: Collective on Tao
496: Level: advanced
498: Input Parameters:
499: + tao - the Tao solver context
500: - subsolver - the Tao sub-solver context
501: @*/
502: PetscErrorCode TaoBRGNGetSubsolver(Tao tao,Tao *subsolver)
503: {
504: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
507: *subsolver = gn->subsolver;
508: return(0);
509: }
511: /*@
512: TaoBRGNSetRegularizerWeight - Set the regularizer weight for the Gauss-Newton least-squares algorithm
514: Collective on Tao
516: Input Parameters:
517: + tao - the Tao solver context
518: - lambda - L1-norm regularizer weight
520: Level: beginner
521: @*/
522: PetscErrorCode TaoBRGNSetRegularizerWeight(Tao tao,PetscReal lambda)
523: {
524: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
526: /* Initialize lambda here */
529: gn->lambda = lambda;
530: return(0);
531: }
533: /*@
534: TaoBRGNSetL1SmoothEpsilon - Set the L1-norm smooth approximation parameter for L1-regularized least-squares algorithm
536: Collective on Tao
538: Input Parameters:
539: + tao - the Tao solver context
540: - epsilon - L1-norm smooth approximation parameter
542: Level: advanced
543: @*/
544: PetscErrorCode TaoBRGNSetL1SmoothEpsilon(Tao tao,PetscReal epsilon)
545: {
546: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
548: /* Initialize epsilon here */
551: gn->epsilon = epsilon;
552: return(0);
553: }
555: /*@
556: TaoBRGNSetDictionaryMatrix - bind the dictionary matrix from user application context to gn->D, for compressed sensing (with least-squares problem)
558: Input Parameters:
559: + tao - the Tao context
560: - dict - the user specified dictionary matrix. We allow to set a null dictionary, which means identity matrix by default
562: Level: advanced
563: @*/
564: PetscErrorCode TaoBRGNSetDictionaryMatrix(Tao tao,Mat dict)
565: {
566: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
570: if (dict) {
573: PetscObjectReference((PetscObject)dict);
574: }
575: MatDestroy(&gn->D);
576: gn->D = dict;
577: return(0);
578: }
580: /*@C
581: TaoBRGNSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back
582: function into the algorithm.
584: Input Parameters:
585: + tao - the Tao context
586: . func - function pointer for the regularizer value and gradient evaluation
587: - ctx - user context for the regularizer
589: Level: advanced
590: @*/
591: PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(Tao tao,PetscErrorCode (*func)(Tao,Vec,PetscReal *,Vec,void*),void *ctx)
592: {
593: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
597: if (ctx) {
598: gn->reg_obj_ctx = ctx;
599: }
600: if (func) {
601: gn->regularizerobjandgrad = func;
602: }
603: return(0);
604: }
606: /*@C
607: TaoBRGNSetRegularizerHessianRoutine - Sets the user-defined regularizer call-back
608: function into the algorithm.
610: Input Parameters:
611: + tao - the Tao context
612: . Hreg - user-created matrix for the Hessian of the regularization term
613: . func - function pointer for the regularizer Hessian evaluation
614: - ctx - user context for the regularizer Hessian
616: Level: advanced
617: @*/
618: PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(Tao tao,Mat Hreg,PetscErrorCode (*func)(Tao,Vec,Mat,void*),void *ctx)
619: {
620: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
625: if (Hreg) {
628: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONG,"NULL Hessian detected! User must provide valid Hessian for the regularizer.");
629: if (ctx) {
630: gn->reg_hess_ctx = ctx;
631: }
632: if (func) {
633: gn->regularizerhessian = func;
634: }
635: if (Hreg) {
636: PetscObjectReference((PetscObject)Hreg);
637: MatDestroy(&gn->Hreg);
638: gn->Hreg = Hreg;
639: }
640: return(0);
641: }