Actual source code: ex5adj.c

petsc-3.14.2 2020-12-03
Report Typos and Errors
  1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";

  3: /*
  4:   See ex5.c for details on the equation.
  5:   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
  6:   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
  7:   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.

  9:   Runtime options:
 10:     -forwardonly  - run the forward simulation without adjoint
 11:     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
 12:     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
 13: */

 15: #include <petscsys.h>
 16: #include <petscdm.h>
 17: #include <petscdmda.h>
 18: #include <petscts.h>

 20: typedef struct {
 21:   PetscScalar u,v;
 22: } Field;

 24: typedef struct {
 25:   PetscReal D1,D2,gamma,kappa;
 26:   PetscBool aijpc;
 27: } AppCtx;

 29: /*
 30:    User-defined routines
 31: */
 32: PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 33: PetscErrorCode InitialConditions(DM,Vec);
 34: PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 35: PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*);
 36: PetscErrorCode IJacobian(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);

 38: PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
 39: {
 40:    PetscInt i,j,Mx,My,xs,ys,xm,ym;
 42:    Field **l;

 45:    DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
 46:    /* locate the global i index for x and j index for y */
 47:    i = (PetscInt)(x*(Mx-1));
 48:    j = (PetscInt)(y*(My-1));
 49:    DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

 51:    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
 52:      /* the i,j vertex is on this process */
 53:      DMDAVecGetArray(da,lambda,&l);
 54:      l[j][i].u = 1.0;
 55:      l[j][i].v = 1.0;
 56:      DMDAVecRestoreArray(da,lambda,&l);
 57:    }
 58:    return(0);
 59: }

 61: int main(int argc,char **argv)
 62: {
 63:   TS             ts;                  /* ODE integrator */
 64:   Vec            x;                   /* solution */
 66:   DM             da;
 67:   AppCtx         appctx;
 68:   Vec            lambda[1];
 69:   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE;

 71:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 72:   PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);
 73:   PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);
 74:   appctx.aijpc = PETSC_FALSE;
 75:   PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL);

 77:   appctx.D1    = 8.0e-5;
 78:   appctx.D2    = 4.0e-5;
 79:   appctx.gamma = .024;
 80:   appctx.kappa = .06;

 82:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 83:      Create distributed array (DMDA) to manage parallel grid and vectors
 84:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 85:   DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
 86:   DMSetFromOptions(da);
 87:   DMSetUp(da);
 88:   DMDASetFieldName(da,0,"u");
 89:   DMDASetFieldName(da,1,"v");

 91:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Extract global vectors from DMDA; then duplicate for remaining
 93:      vectors that are the same types
 94:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 95:   DMCreateGlobalVector(da,&x);

 97:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 98:      Create timestepping solver context
 99:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100:   TSCreate(PETSC_COMM_WORLD,&ts);
101:   TSSetDM(ts,da);
102:   TSSetProblemType(ts,TS_NONLINEAR);
103:   TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
104:   if (!implicitform) {
105:     TSSetType(ts,TSRK);
106:     TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
107:     TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
108:   } else {
109:     TSSetType(ts,TSCN);
110:     TSSetIFunction(ts,NULL,IFunction,&appctx);
111:     if (appctx.aijpc) {
112:       Mat                    A,B;

114:       DMSetMatType(da,MATSELL);
115:       DMCreateMatrix(da,&A);
116:       MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B);
117:       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
118:       TSSetIJacobian(ts,A,B,IJacobian,&appctx);
119:       MatDestroy(&A);
120:       MatDestroy(&B);
121:     } else {
122:       TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx);
123:     }
124:   }

126:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127:      Set initial conditions
128:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129:   InitialConditions(da,x);
130:   TSSetSolution(ts,x);

132:   /*
133:     Have the TS save its trajectory so that TSAdjointSolve() may be used
134:   */
135:   if (!forwardonly) { TSSetSaveTrajectory(ts); }

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138:      Set solver options
139:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140:   TSSetMaxTime(ts,200.0);
141:   TSSetTimeStep(ts,0.5);
142:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP);
143:   TSSetFromOptions(ts);

145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146:      Solve ODE system
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148:   TSSolve(ts,x);
149:   if (!forwardonly) {
150:     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151:        Start the Adjoint model
152:        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153:     VecDuplicate(x,&lambda[0]);
154:     /*   Reset initial conditions for the adjoint integration */
155:     InitializeLambda(da,lambda[0],0.5,0.5);
156:     TSSetCostGradients(ts,1,lambda,NULL);
157:     TSAdjointSolve(ts);
158:     VecDestroy(&lambda[0]);
159:   }
160:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161:      Free work space.  All PETSc objects should be destroyed when they
162:      are no longer needed.
163:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
164:   VecDestroy(&x);
165:   TSDestroy(&ts);
166:   DMDestroy(&da);
167:   PetscFinalize();
168:   return ierr;
169: }

171: /* ------------------------------------------------------------------- */
172: /*
173:    RHSFunction - Evaluates nonlinear function, F(x).

175:    Input Parameters:
176: .  ts - the TS context
177: .  X - input vector
178: .  ptr - optional user-defined context, as set by TSSetRHSFunction()

180:    Output Parameter:
181: .  F - function vector
182:  */
183: PetscErrorCode RHSFunction(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
184: {
185:   AppCtx         *appctx = (AppCtx*)ptr;
186:   DM             da;
188:   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
189:   PetscReal      hx,hy,sx,sy;
190:   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
191:   Field          **u,**f;
192:   Vec            localU;

195:   TSGetDM(ts,&da);
196:   DMGetLocalVector(da,&localU);
197:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
198:   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
199:   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);

201:   /*
202:      Scatter ghost points to local vector,using the 2-step process
203:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
204:      By placing code between these two statements, computations can be
205:      done while messages are in transition.
206:   */
207:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
208:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

210:   /*
211:      Get pointers to vector data
212:   */
213:   DMDAVecGetArrayRead(da,localU,&u);
214:   DMDAVecGetArray(da,F,&f);

216:   /*
217:      Get local grid boundaries
218:   */
219:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

221:   /*
222:      Compute function over the locally owned part of the grid
223:   */
224:   for (j=ys; j<ys+ym; j++) {
225:     for (i=xs; i<xs+xm; i++) {
226:       uc        = u[j][i].u;
227:       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
228:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
229:       vc        = u[j][i].v;
230:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
231:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
232:       f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
233:       f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
234:     }
235:   }
236:   PetscLogFlops(16.0*xm*ym);

238:   /*
239:      Restore vectors
240:   */
241:   DMDAVecRestoreArrayRead(da,localU,&u);
242:   DMDAVecRestoreArray(da,F,&f);
243:   DMRestoreLocalVector(da,&localU);
244:   return(0);
245: }

247: /* ------------------------------------------------------------------- */
248: PetscErrorCode InitialConditions(DM da,Vec U)
249: {
251:   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
252:   Field          **u;
253:   PetscReal      hx,hy,x,y;

256:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

258:   hx = 2.5/(PetscReal)Mx;
259:   hy = 2.5/(PetscReal)My;

261:   /*
262:      Get pointers to vector data
263:   */
264:   DMDAVecGetArray(da,U,&u);

266:   /*
267:      Get local grid boundaries
268:   */
269:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

271:   /*
272:      Compute function over the locally owned part of the grid
273:   */
274:   for (j=ys; j<ys+ym; j++) {
275:     y = j*hy;
276:     for (i=xs; i<xs+xm; i++) {
277:       x = i*hx;
278:       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
279:       else u[j][i].v = 0.0;

281:       u[j][i].u = 1.0 - 2.0*u[j][i].v;
282:     }
283:   }

285:   /*
286:      Restore vectors
287:   */
288:   DMDAVecRestoreArray(da,U,&u);
289:   return(0);
290: }

292: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
293: {
294:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
295:   DM             da;
297:   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
298:   PetscReal      hx,hy,sx,sy;
299:   PetscScalar    uc,vc;
300:   Field          **u;
301:   Vec            localU;
302:   MatStencil     stencil[6],rowstencil;
303:   PetscScalar    entries[6];

306:   TSGetDM(ts,&da);
307:   DMGetLocalVector(da,&localU);
308:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

310:   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
311:   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);

313:   /*
314:      Scatter ghost points to local vector,using the 2-step process
315:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
316:      By placing code between these two statements, computations can be
317:      done while messages are in transition.
318:   */
319:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
320:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

322:   /*
323:      Get pointers to vector data
324:   */
325:   DMDAVecGetArrayRead(da,localU,&u);

327:   /*
328:      Get local grid boundaries
329:   */
330:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

332:   stencil[0].k = 0;
333:   stencil[1].k = 0;
334:   stencil[2].k = 0;
335:   stencil[3].k = 0;
336:   stencil[4].k = 0;
337:   stencil[5].k = 0;
338:   rowstencil.k = 0;
339:   /*
340:      Compute function over the locally owned part of the grid
341:   */
342:   for (j=ys; j<ys+ym; j++) {

344:     stencil[0].j = j-1;
345:     stencil[1].j = j+1;
346:     stencil[2].j = j;
347:     stencil[3].j = j;
348:     stencil[4].j = j;
349:     stencil[5].j = j;
350:     rowstencil.k = 0; rowstencil.j = j;
351:     for (i=xs; i<xs+xm; i++) {
352:       uc = u[j][i].u;
353:       vc = u[j][i].v;

355:       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
356:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;

358:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
359:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
360:        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/

362:       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
363:       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
364:       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
365:       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
366:       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
367:       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
368:       rowstencil.i = i; rowstencil.c = 0;

370:       MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
371:       if (appctx->aijpc) {
372:         MatSetValuesStencil(BB,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
373:       }
374:       stencil[0].c = 1; entries[0] = appctx->D2*sy;
375:       stencil[1].c = 1; entries[1] = appctx->D2*sy;
376:       stencil[2].c = 1; entries[2] = appctx->D2*sx;
377:       stencil[3].c = 1; entries[3] = appctx->D2*sx;
378:       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
379:       stencil[5].c = 0; entries[5] = vc*vc;
380:       rowstencil.c = 1;

382:       MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
383:       if (appctx->aijpc) {
384:         MatSetValuesStencil(BB,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
385:       }
386:       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
387:     }
388:   }

390:   /*
391:      Restore vectors
392:   */
393:   PetscLogFlops(19.0*xm*ym);
394:   DMDAVecRestoreArrayRead(da,localU,&u);
395:   DMRestoreLocalVector(da,&localU);
396:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
397:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
398:   MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
399:   if (appctx->aijpc) {
400:     MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
401:     MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
402:     MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
403:   }

405:   return(0);
406: }

408: /*
409:    IFunction - Evaluates implicit nonlinear function, xdot - F(x).

411:    Input Parameters:
412: .  ts - the TS context
413: .  U - input vector
414: .  Udot - input vector
415: .  ptr - optional user-defined context, as set by TSSetRHSFunction()

417:    Output Parameter:
418: .  F - function vector
419:  */
420: PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
421: {
422:   AppCtx         *appctx = (AppCtx*)ptr;
423:   DM             da;
425:   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
426:   PetscReal      hx,hy,sx,sy;
427:   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
428:   Field          **u,**f,**udot;
429:   Vec            localU;

432:   TSGetDM(ts,&da);
433:   DMGetLocalVector(da,&localU);
434:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
435:   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
436:   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);

438:   /*
439:      Scatter ghost points to local vector,using the 2-step process
440:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
441:      By placing code between these two statements, computations can be
442:      done while messages are in transition.
443:   */
444:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
445:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

447:   /*
448:      Get pointers to vector data
449:   */
450:   DMDAVecGetArrayRead(da,localU,&u);
451:   DMDAVecGetArray(da,F,&f);
452:   DMDAVecGetArrayRead(da,Udot,&udot);

454:   /*
455:      Get local grid boundaries
456:   */
457:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

459:   /*
460:      Compute function over the locally owned part of the grid
461:   */
462:   for (j=ys; j<ys+ym; j++) {
463:     for (i=xs; i<xs+xm; i++) {
464:       uc        = u[j][i].u;
465:       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
466:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
467:       vc        = u[j][i].v;
468:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
469:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
470:       f[j][i].u = udot[j][i].u - ( appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc));
471:       f[j][i].v = udot[j][i].v - ( appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc);
472:     }
473:   }
474:   PetscLogFlops(16.0*xm*ym);

476:   /*
477:      Restore vectors
478:   */
479:   DMDAVecRestoreArrayRead(da,localU,&u);
480:   DMDAVecRestoreArray(da,F,&f);
481:   DMDAVecRestoreArrayRead(da,Udot,&udot);
482:   DMRestoreLocalVector(da,&localU);
483:   return(0);
484: }

486: PetscErrorCode IJacobian(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,void *ctx)
487: {
488:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
489:   DM             da;
491:   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
492:   PetscReal      hx,hy,sx,sy;
493:   PetscScalar    uc,vc;
494:   Field          **u;
495:   Vec            localU;
496:   MatStencil     stencil[6],rowstencil;
497:   PetscScalar    entries[6];

500:   TSGetDM(ts,&da);
501:   DMGetLocalVector(da,&localU);
502:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

504:   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
505:   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);

507:   /*
508:      Scatter ghost points to local vector,using the 2-step process
509:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
510:      By placing code between these two statements, computations can be
511:      done while messages are in transition.
512:   */
513:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
514:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

516:   /*
517:      Get pointers to vector data
518:   */
519:   DMDAVecGetArrayRead(da,localU,&u);

521:   /*
522:      Get local grid boundaries
523:   */
524:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

526:   stencil[0].k = 0;
527:   stencil[1].k = 0;
528:   stencil[2].k = 0;
529:   stencil[3].k = 0;
530:   stencil[4].k = 0;
531:   stencil[5].k = 0;
532:   rowstencil.k = 0;
533:   /*
534:      Compute function over the locally owned part of the grid
535:   */
536:   for (j=ys; j<ys+ym; j++) {

538:     stencil[0].j = j-1;
539:     stencil[1].j = j+1;
540:     stencil[2].j = j;
541:     stencil[3].j = j;
542:     stencil[4].j = j;
543:     stencil[5].j = j;
544:     rowstencil.k = 0; rowstencil.j = j;
545:     for (i=xs; i<xs+xm; i++) {
546:       uc = u[j][i].u;
547:       vc = u[j][i].v;

549:       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
550:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;

552:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
553:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
554:        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/

556:       stencil[0].i = i; stencil[0].c = 0; entries[0] = -appctx->D1*sy;
557:       stencil[1].i = i; stencil[1].c = 0; entries[1] = -appctx->D1*sy;
558:       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = -appctx->D1*sx;
559:       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = -appctx->D1*sx;
560:       stencil[4].i = i; stencil[4].c = 0; entries[4] = 2.0*appctx->D1*(sx + sy) + vc*vc + appctx->gamma + a;
561:       stencil[5].i = i; stencil[5].c = 1; entries[5] = 2.0*uc*vc;
562:       rowstencil.i = i; rowstencil.c = 0;

564:       MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
565:       if (appctx->aijpc) {
566:         MatSetValuesStencil(BB,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
567:       }
568:       stencil[0].c = 1; entries[0] = -appctx->D2*sy;
569:       stencil[1].c = 1; entries[1] = -appctx->D2*sy;
570:       stencil[2].c = 1; entries[2] = -appctx->D2*sx;
571:       stencil[3].c = 1; entries[3] = -appctx->D2*sx;
572:       stencil[4].c = 1; entries[4] = 2.0*appctx->D2*(sx + sy) - 2.0*uc*vc + appctx->gamma + appctx->kappa + a;
573:       stencil[5].c = 0; entries[5] = -vc*vc;
574:       rowstencil.c = 1;

576:       MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
577:       if (appctx->aijpc) {
578:         MatSetValuesStencil(BB,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
579:       }
580:       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
581:     }
582:   }

584:   /*
585:      Restore vectors
586:   */
587:   PetscLogFlops(19.0*xm*ym);
588:   DMDAVecRestoreArrayRead(da,localU,&u);
589:   DMRestoreLocalVector(da,&localU);
590:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
591:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
592:   MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
593:   if (appctx->aijpc) {
594:     MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
595:     MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
596:     MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
597:   }
598:   return(0);
599: }


602: /*TEST

604:    build:
605:       requires: !complex !single

607:    test:
608:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
609:       output_file: output/ex5adj_1.out

611:    test:
612:       suffix: 2
613:       nsize: 2
614:       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp

616:    test:
617:       suffix: 3
618:       nsize: 2
619:       args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi

621:    test:
622:       suffix: 4
623:       nsize: 2
624:       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
625:       output_file: output/ex5adj_2.out

627:    test:
628:       suffix: 5
629:       nsize: 2
630:       args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
631:       output_file: output/ex5adj_1.out

633:    test:
634:       suffix: knl
635:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
636:       output_file: output/ex5adj_3.out
637:       requires: knl

639:    test:
640:       suffix: sell
641:       nsize: 4
642:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
643:       output_file: output/ex5adj_sell_1.out

645:    test:
646:       suffix: aijsell
647:       nsize: 4
648:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
649:       output_file: output/ex5adj_sell_1.out

651:    test:
652:       suffix: sell2
653:       nsize: 4
654:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
655:       output_file: output/ex5adj_sell_2.out

657:    test:
658:       suffix: aijsell2
659:       nsize: 4
660:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
661:       output_file: output/ex5adj_sell_2.out

663:    test:
664:       suffix: sell3
665:       nsize: 4
666:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
667:       output_file: output/ex5adj_sell_3.out

669:    test:
670:       suffix: sell4
671:       nsize: 4
672:       args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
673:       output_file: output/ex5adj_sell_4.out

675:    test:
676:       suffix: sell5
677:       nsize: 4
678:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
679:       output_file: output/ex5adj_sell_5.out

681:    test:
682:       suffix: aijsell5
683:       nsize: 4
684:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
685:       output_file: output/ex5adj_sell_5.out

687:    test:
688:       suffix: sell6
689:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
690:       output_file: output/ex5adj_sell_6.out

692: TEST*/