Actual source code: ex9bus.c
1: static char help[] = "Power grid stability analysis of WECC 9 bus system.\n\
2: This example is based on the 9-bus (node) example given in the book Power\n\
3: Systems Dynamics and Stability (Chapter 7) by P. Sauer and M. A. Pai.\n\
4: The power grid in this example consists of 9 buses (nodes), 3 generators,\n\
5: 3 loads, and 9 transmission lines. The network equations are written\n\
6: in current balance form using rectangular coordinates.\n\n";
8: /*
9: The equations for the stability analysis are described by the DAE
11: \dot{x} = f(x,y,t)
12: 0 = g(x,y,t)
14: where the generators are described by differential equations, while the algebraic
15: constraints define the network equations.
17: The generators are modeled with a 4th order differential equation describing the electrical
18: and mechanical dynamics. Each generator also has an exciter system modeled by 3rd order
19: diff. eqns. describing the exciter, voltage regulator, and the feedback stabilizer
20: mechanism.
22: The network equations are described by nodal current balance equations.
23: I(x,y) - Y*V = 0
25: where:
26: I(x,y) is the current injected from generators and loads.
27: Y is the admittance matrix, and
28: V is the voltage vector
29: */
31: /*
32: Include "petscts.h" so that we can use TS solvers. Note that this
33: file automatically includes:
34: petscsys.h - base PETSc routines petscvec.h - vectors
35: petscmat.h - matrices
36: petscis.h - index sets petscksp.h - Krylov subspace methods
37: petscviewer.h - viewers petscpc.h - preconditioners
38: petscksp.h - linear solvers
39: */
41: #include <petscts.h>
42: #include <petscdm.h>
43: #include <petscdmda.h>
44: #include <petscdmcomposite.h>
46: #define freq 60
47: #define w_s (2 * PETSC_PI * freq)
49: /* Sizes and indices */
50: const PetscInt nbus = 9; /* Number of network buses */
51: const PetscInt ngen = 3; /* Number of generators */
52: const PetscInt nload = 3; /* Number of loads */
53: const PetscInt gbus[3] = {0, 1, 2}; /* Buses at which generators are incident */
54: const PetscInt lbus[3] = {4, 5, 7}; /* Buses at which loads are incident */
56: /* Generator real and reactive powers (found via loadflow) */
57: const PetscScalar PG[3] = {0.716786142395021, 1.630000000000000, 0.850000000000000};
58: const PetscScalar QG[3] = {0.270702180178785, 0.066120127797275, -0.108402221791588};
59: /* Generator constants */
60: const PetscScalar H[3] = {23.64, 6.4, 3.01}; /* Inertia constant */
61: const PetscScalar Rs[3] = {0.0, 0.0, 0.0}; /* Stator Resistance */
62: const PetscScalar Xd[3] = {0.146, 0.8958, 1.3125}; /* d-axis reactance */
63: const PetscScalar Xdp[3] = {0.0608, 0.1198, 0.1813}; /* d-axis transient reactance */
64: const PetscScalar Xq[3] = {0.4360, 0.8645, 1.2578}; /* q-axis reactance Xq(1) set to 0.4360, value given in text 0.0969 */
65: const PetscScalar Xqp[3] = {0.0969, 0.1969, 0.25}; /* q-axis transient reactance */
66: const PetscScalar Td0p[3] = {8.96, 6.0, 5.89}; /* d-axis open circuit time constant */
67: const PetscScalar Tq0p[3] = {0.31, 0.535, 0.6}; /* q-axis open circuit time constant */
68: PetscScalar M[3]; /* M = 2*H/w_s */
69: PetscScalar D[3]; /* D = 0.1*M */
71: PetscScalar TM[3]; /* Mechanical Torque */
72: /* Exciter system constants */
73: const PetscScalar KA[3] = {20.0, 20.0, 20.0}; /* Voltage regulartor gain constant */
74: const PetscScalar TA[3] = {0.2, 0.2, 0.2}; /* Voltage regulator time constant */
75: const PetscScalar KE[3] = {1.0, 1.0, 1.0}; /* Exciter gain constant */
76: const PetscScalar TE[3] = {0.314, 0.314, 0.314}; /* Exciter time constant */
77: const PetscScalar KF[3] = {0.063, 0.063, 0.063}; /* Feedback stabilizer gain constant */
78: const PetscScalar TF[3] = {0.35, 0.35, 0.35}; /* Feedback stabilizer time constant */
79: const PetscScalar k1[3] = {0.0039, 0.0039, 0.0039};
80: const PetscScalar k2[3] = {1.555, 1.555, 1.555}; /* k1 and k2 for calculating the saturation function SE = k1*exp(k2*Efd) */
81: const PetscScalar VRMIN[3] = {-4.0, -4.0, -4.0};
82: const PetscScalar VRMAX[3] = {7.0, 7.0, 7.0};
83: PetscInt VRatmin[3];
84: PetscInt VRatmax[3];
86: PetscScalar Vref[3];
87: /* Load constants
88: We use a composite load model that describes the load and reactive powers at each time instant as follows
89: P(t) = \sum\limits_{i=0}^ld_nsegsp \ld_alphap_i*P_D0(\frac{V_m(t)}{V_m0})^\ld_betap_i
90: Q(t) = \sum\limits_{i=0}^ld_nsegsq \ld_alphaq_i*Q_D0(\frac{V_m(t)}{V_m0})^\ld_betaq_i
91: where
92: ld_nsegsp,ld_nsegsq - Number of individual load models for real and reactive power loads
93: ld_alphap,ld_alphap - Percentage contribution (weights) or loads
94: P_D0 - Real power load
95: Q_D0 - Reactive power load
96: V_m(t) - Voltage magnitude at time t
97: V_m0 - Voltage magnitude at t = 0
98: ld_betap, ld_betaq - exponents describing the load model for real and reactive part
100: Note: All loads have the same characteristic currently.
101: */
102: const PetscScalar PD0[3] = {1.25, 0.9, 1.0};
103: const PetscScalar QD0[3] = {0.5, 0.3, 0.35};
104: const PetscInt ld_nsegsp[3] = {3, 3, 3};
105: const PetscScalar ld_alphap[3] = {1.0, 0.0, 0.0};
106: const PetscScalar ld_betap[3] = {2.0, 1.0, 0.0};
107: const PetscInt ld_nsegsq[3] = {3, 3, 3};
108: const PetscScalar ld_alphaq[3] = {1.0, 0.0, 0.0};
109: const PetscScalar ld_betaq[3] = {2.0, 1.0, 0.0};
111: typedef struct {
112: DM dmgen, dmnet; /* DMs to manage generator and network subsystem */
113: DM dmpgrid; /* Composite DM to manage the entire power grid */
114: Mat Ybus; /* Network admittance matrix */
115: Vec V0; /* Initial voltage vector (Power flow solution) */
116: PetscReal tfaulton, tfaultoff; /* Fault on and off times */
117: PetscInt faultbus; /* Fault bus */
118: PetscScalar Rfault;
119: PetscReal t0, tmax;
120: PetscInt neqs_gen, neqs_net, neqs_pgrid;
121: Mat Sol; /* Matrix to save solution at each time step */
122: PetscInt stepnum;
123: PetscReal t;
124: SNES snes_alg;
125: IS is_diff; /* indices for differential equations */
126: IS is_alg; /* indices for algebraic equations */
127: PetscBool setisdiff; /* TS computes truncation error based only on the differential variables */
128: PetscBool semiexplicit; /* If the flag is set then a semi-explicit method is used using TSRK */
129: } Userctx;
131: /*
132: The first two events are for fault on and off, respectively. The following events are
133: to check the min/max limits on the state variable VR. A non windup limiter is used for
134: the VR limits.
135: */
136: PetscErrorCode EventFunction(TS ts, PetscReal t, Vec X, PetscScalar *fvalue, void *ctx)
137: {
138: Userctx *user = (Userctx *)ctx;
139: Vec Xgen, Xnet;
140: PetscInt i, idx = 0;
141: const PetscScalar *xgen, *xnet;
142: PetscScalar Efd, RF, VR, Vr, Vi, Vm;
144: PetscFunctionBegin;
146: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
147: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
149: PetscCall(VecGetArrayRead(Xgen, &xgen));
150: PetscCall(VecGetArrayRead(Xnet, &xnet));
152: /* Event for fault-on time */
153: fvalue[0] = t - user->tfaulton;
154: /* Event for fault-off time */
155: fvalue[1] = t - user->tfaultoff;
157: for (i = 0; i < ngen; i++) {
158: Efd = xgen[idx + 6];
159: RF = xgen[idx + 7];
160: VR = xgen[idx + 8];
162: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
163: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
164: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
166: if (!VRatmax[i]) {
167: fvalue[2 + 2 * i] = VRMAX[i] - VR;
168: } else {
169: fvalue[2 + 2 * i] = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
170: }
171: if (!VRatmin[i]) {
172: fvalue[2 + 2 * i + 1] = VRMIN[i] - VR;
173: } else {
174: fvalue[2 + 2 * i + 1] = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
175: }
176: idx = idx + 9;
177: }
178: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
179: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
181: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
183: PetscFunctionReturn(PETSC_SUCCESS);
184: }
186: PetscErrorCode PostEventFunction(TS ts, PetscInt nevents, PetscInt event_list[], PetscReal t, Vec X, PetscBool forwardsolve, void *ctx)
187: {
188: Userctx *user = (Userctx *)ctx;
189: Vec Xgen, Xnet;
190: PetscScalar *xgen, *xnet;
191: PetscInt row_loc, col_loc;
192: PetscScalar val;
193: PetscInt i, idx = 0, event_num;
194: PetscScalar fvalue;
195: PetscScalar Efd, RF, VR;
196: PetscScalar Vr, Vi, Vm;
198: PetscFunctionBegin;
200: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
201: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
203: PetscCall(VecGetArray(Xgen, &xgen));
204: PetscCall(VecGetArray(Xnet, &xnet));
206: for (i = 0; i < nevents; i++) {
207: if (event_list[i] == 0) {
208: /* Apply disturbance - resistive fault at user->faultbus */
209: /* This is done by adding shunt conductance to the diagonal location
210: in the Ybus matrix */
211: row_loc = 2 * user->faultbus;
212: col_loc = 2 * user->faultbus + 1; /* Location for G */
213: val = 1 / user->Rfault;
214: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
215: row_loc = 2 * user->faultbus + 1;
216: col_loc = 2 * user->faultbus; /* Location for G */
217: val = 1 / user->Rfault;
218: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
220: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
221: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
223: /* Solve the algebraic equations */
224: PetscCall(SNESSolve(user->snes_alg, NULL, X));
225: } else if (event_list[i] == 1) {
226: /* Remove the fault */
227: row_loc = 2 * user->faultbus;
228: col_loc = 2 * user->faultbus + 1;
229: val = -1 / user->Rfault;
230: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
231: row_loc = 2 * user->faultbus + 1;
232: col_loc = 2 * user->faultbus;
233: val = -1 / user->Rfault;
234: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
236: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
237: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
239: /* Solve the algebraic equations */
240: PetscCall(SNESSolve(user->snes_alg, NULL, X));
242: /* Check the VR derivatives and reset flags if needed */
243: for (i = 0; i < ngen; i++) {
244: Efd = xgen[idx + 6];
245: RF = xgen[idx + 7];
246: VR = xgen[idx + 8];
248: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
249: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
250: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
252: if (VRatmax[i]) {
253: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
254: if (fvalue < 0) {
255: VRatmax[i] = 0;
256: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went negative on fault clearing at time %g\n", i, (double)t));
257: }
258: }
259: if (VRatmin[i]) {
260: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
262: if (fvalue > 0) {
263: VRatmin[i] = 0;
264: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went positive on fault clearing at time %g\n", i, (double)t));
265: }
266: }
267: idx = idx + 9;
268: }
269: } else {
270: idx = (event_list[i] - 2) / 2;
271: event_num = (event_list[i] - 2) % 2;
272: if (event_num == 0) { /* Max VR */
273: if (!VRatmax[idx]) {
274: VRatmax[idx] = 1;
275: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit upper limit at time %g\n", idx, (double)t));
276: } else {
277: VRatmax[idx] = 0;
278: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is negative at time %g\n", idx, (double)t));
279: }
280: } else {
281: if (!VRatmin[idx]) {
282: VRatmin[idx] = 1;
283: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit lower limit at time %g\n", idx, (double)t));
284: } else {
285: VRatmin[idx] = 0;
286: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is positive at time %g\n", idx, (double)t));
287: }
288: }
289: }
290: }
292: PetscCall(VecRestoreArray(Xgen, &xgen));
293: PetscCall(VecRestoreArray(Xnet, &xnet));
295: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
297: PetscFunctionReturn(PETSC_SUCCESS);
298: }
300: /* Converts from machine frame (dq) to network (phase a real,imag) reference frame */
301: PetscErrorCode dq2ri(PetscScalar Fd, PetscScalar Fq, PetscScalar delta, PetscScalar *Fr, PetscScalar *Fi)
302: {
303: PetscFunctionBegin;
304: *Fr = Fd * PetscSinScalar(delta) + Fq * PetscCosScalar(delta);
305: *Fi = -Fd * PetscCosScalar(delta) + Fq * PetscSinScalar(delta);
306: PetscFunctionReturn(PETSC_SUCCESS);
307: }
309: /* Converts from network frame ([phase a real,imag) to machine (dq) reference frame */
310: PetscErrorCode ri2dq(PetscScalar Fr, PetscScalar Fi, PetscScalar delta, PetscScalar *Fd, PetscScalar *Fq)
311: {
312: PetscFunctionBegin;
313: *Fd = Fr * PetscSinScalar(delta) - Fi * PetscCosScalar(delta);
314: *Fq = Fr * PetscCosScalar(delta) + Fi * PetscSinScalar(delta);
315: PetscFunctionReturn(PETSC_SUCCESS);
316: }
318: /* Saves the solution at each time to a matrix */
319: PetscErrorCode SaveSolution(TS ts)
320: {
321: Userctx *user;
322: Vec X;
323: const PetscScalar *x;
324: PetscScalar *mat;
325: PetscInt idx;
326: PetscReal t;
328: PetscFunctionBegin;
329: PetscCall(TSGetApplicationContext(ts, &user));
330: PetscCall(TSGetTime(ts, &t));
331: PetscCall(TSGetSolution(ts, &X));
332: idx = user->stepnum * (user->neqs_pgrid + 1);
333: PetscCall(MatDenseGetArray(user->Sol, &mat));
334: PetscCall(VecGetArrayRead(X, &x));
335: mat[idx] = t;
336: PetscCall(PetscArraycpy(mat + idx + 1, x, user->neqs_pgrid));
337: PetscCall(MatDenseRestoreArray(user->Sol, &mat));
338: PetscCall(VecRestoreArrayRead(X, &x));
339: user->stepnum++;
340: PetscFunctionReturn(PETSC_SUCCESS);
341: }
343: PetscErrorCode SetInitialGuess(Vec X, Userctx *user)
344: {
345: Vec Xgen, Xnet;
346: PetscScalar *xgen;
347: const PetscScalar *xnet;
348: PetscInt i, idx = 0;
349: PetscScalar Vr, Vi, IGr, IGi, Vm, Vm2;
350: PetscScalar Eqp, Edp, delta;
351: PetscScalar Efd, RF, VR; /* Exciter variables */
352: PetscScalar Id, Iq; /* Generator dq axis currents */
353: PetscScalar theta, Vd, Vq, SE;
355: PetscFunctionBegin;
356: M[0] = 2 * H[0] / w_s;
357: M[1] = 2 * H[1] / w_s;
358: M[2] = 2 * H[2] / w_s;
359: D[0] = 0.1 * M[0];
360: D[1] = 0.1 * M[1];
361: D[2] = 0.1 * M[2];
363: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
365: /* Network subsystem initialization */
366: PetscCall(VecCopy(user->V0, Xnet));
368: /* Generator subsystem initialization */
369: PetscCall(VecGetArrayWrite(Xgen, &xgen));
370: PetscCall(VecGetArrayRead(Xnet, &xnet));
372: for (i = 0; i < ngen; i++) {
373: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
374: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
375: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
376: Vm2 = Vm * Vm;
377: IGr = (Vr * PG[i] + Vi * QG[i]) / Vm2;
378: IGi = (Vi * PG[i] - Vr * QG[i]) / Vm2;
380: delta = PetscAtan2Real(Vi + Xq[i] * IGr, Vr - Xq[i] * IGi); /* Machine angle */
382: theta = PETSC_PI / 2.0 - delta;
384: Id = IGr * PetscCosScalar(theta) - IGi * PetscSinScalar(theta); /* d-axis stator current */
385: Iq = IGr * PetscSinScalar(theta) + IGi * PetscCosScalar(theta); /* q-axis stator current */
387: Vd = Vr * PetscCosScalar(theta) - Vi * PetscSinScalar(theta);
388: Vq = Vr * PetscSinScalar(theta) + Vi * PetscCosScalar(theta);
390: Edp = Vd + Rs[i] * Id - Xqp[i] * Iq; /* d-axis transient EMF */
391: Eqp = Vq + Rs[i] * Iq + Xdp[i] * Id; /* q-axis transient EMF */
393: TM[i] = PG[i];
395: /* The generator variables are ordered as [Eqp,Edp,delta,w,Id,Iq] */
396: xgen[idx] = Eqp;
397: xgen[idx + 1] = Edp;
398: xgen[idx + 2] = delta;
399: xgen[idx + 3] = w_s;
401: idx = idx + 4;
403: xgen[idx] = Id;
404: xgen[idx + 1] = Iq;
406: idx = idx + 2;
408: /* Exciter */
409: Efd = Eqp + (Xd[i] - Xdp[i]) * Id;
410: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
411: VR = KE[i] * Efd + SE;
412: RF = KF[i] * Efd / TF[i];
414: xgen[idx] = Efd;
415: xgen[idx + 1] = RF;
416: xgen[idx + 2] = VR;
418: Vref[i] = Vm + (VR / KA[i]);
420: VRatmin[i] = VRatmax[i] = 0;
422: idx = idx + 3;
423: }
424: PetscCall(VecRestoreArrayWrite(Xgen, &xgen));
425: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
427: /* PetscCall(VecView(Xgen,0)); */
428: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, X, Xgen, Xnet));
429: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
430: PetscFunctionReturn(PETSC_SUCCESS);
431: }
433: /* Computes F = [f(x,y);g(x,y)] */
434: PetscErrorCode ResidualFunction(Vec X, Vec F, Userctx *user)
435: {
436: Vec Xgen, Xnet, Fgen, Fnet;
437: const PetscScalar *xgen, *xnet;
438: PetscScalar *fgen, *fnet;
439: PetscInt i, idx = 0;
440: PetscScalar Vr, Vi, Vm, Vm2;
441: PetscScalar Eqp, Edp, delta, w; /* Generator variables */
442: PetscScalar Efd, RF, VR; /* Exciter variables */
443: PetscScalar Id, Iq; /* Generator dq axis currents */
444: PetscScalar Vd, Vq, SE;
445: PetscScalar IGr, IGi, IDr, IDi;
446: PetscScalar Zdq_inv[4], det;
447: PetscScalar PD, QD, Vm0, *v0;
448: PetscInt k;
450: PetscFunctionBegin;
451: PetscCall(VecZeroEntries(F));
452: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
453: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Fgen, &Fnet));
454: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
455: PetscCall(DMCompositeScatter(user->dmpgrid, F, Fgen, Fnet));
457: /* Network current balance residual IG + Y*V + IL = 0. Only YV is added here.
458: The generator current injection, IG, and load current injection, ID are added later
459: */
460: /* Note that the values in Ybus are stored assuming the imaginary current balance
461: equation is ordered first followed by real current balance equation for each bus.
462: Thus imaginary current contribution goes in location 2*i, and
463: real current contribution in 2*i+1
464: */
465: PetscCall(MatMult(user->Ybus, Xnet, Fnet));
467: PetscCall(VecGetArrayRead(Xgen, &xgen));
468: PetscCall(VecGetArrayRead(Xnet, &xnet));
469: PetscCall(VecGetArrayWrite(Fgen, &fgen));
470: PetscCall(VecGetArrayWrite(Fnet, &fnet));
472: /* Generator subsystem */
473: for (i = 0; i < ngen; i++) {
474: Eqp = xgen[idx];
475: Edp = xgen[idx + 1];
476: delta = xgen[idx + 2];
477: w = xgen[idx + 3];
478: Id = xgen[idx + 4];
479: Iq = xgen[idx + 5];
480: Efd = xgen[idx + 6];
481: RF = xgen[idx + 7];
482: VR = xgen[idx + 8];
484: /* Generator differential equations */
485: fgen[idx] = (-Eqp - (Xd[i] - Xdp[i]) * Id + Efd) / Td0p[i];
486: fgen[idx + 1] = (-Edp + (Xq[i] - Xqp[i]) * Iq) / Tq0p[i];
487: fgen[idx + 2] = w - w_s;
488: fgen[idx + 3] = (TM[i] - Edp * Id - Eqp * Iq - (Xqp[i] - Xdp[i]) * Id * Iq - D[i] * (w - w_s)) / M[i];
490: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
491: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
493: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
494: /* Algebraic equations for stator currents */
495: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
497: Zdq_inv[0] = Rs[i] / det;
498: Zdq_inv[1] = Xqp[i] / det;
499: Zdq_inv[2] = -Xdp[i] / det;
500: Zdq_inv[3] = Rs[i] / det;
502: fgen[idx + 4] = Zdq_inv[0] * (-Edp + Vd) + Zdq_inv[1] * (-Eqp + Vq) + Id;
503: fgen[idx + 5] = Zdq_inv[2] * (-Edp + Vd) + Zdq_inv[3] * (-Eqp + Vq) + Iq;
505: /* Add generator current injection to network */
506: PetscCall(dq2ri(Id, Iq, delta, &IGr, &IGi));
508: fnet[2 * gbus[i]] -= IGi;
509: fnet[2 * gbus[i] + 1] -= IGr;
511: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
513: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
515: /* Exciter differential equations */
516: fgen[idx + 6] = (-KE[i] * Efd - SE + VR) / TE[i];
517: fgen[idx + 7] = (-RF + KF[i] * Efd / TF[i]) / TF[i];
518: if (VRatmax[i]) fgen[idx + 8] = VR - VRMAX[i];
519: else if (VRatmin[i]) fgen[idx + 8] = VRMIN[i] - VR;
520: else fgen[idx + 8] = (-VR + KA[i] * RF - KA[i] * KF[i] * Efd / TF[i] + KA[i] * (Vref[i] - Vm)) / TA[i];
522: idx = idx + 9;
523: }
525: PetscCall(VecGetArray(user->V0, &v0));
526: for (i = 0; i < nload; i++) {
527: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
528: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
529: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
530: Vm2 = Vm * Vm;
531: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
532: PD = QD = 0.0;
533: for (k = 0; k < ld_nsegsp[i]; k++) PD += ld_alphap[k] * PD0[i] * PetscPowScalar((Vm / Vm0), ld_betap[k]);
534: for (k = 0; k < ld_nsegsq[i]; k++) QD += ld_alphaq[k] * QD0[i] * PetscPowScalar((Vm / Vm0), ld_betaq[k]);
536: /* Load currents */
537: IDr = (PD * Vr + QD * Vi) / Vm2;
538: IDi = (-QD * Vr + PD * Vi) / Vm2;
540: fnet[2 * lbus[i]] += IDi;
541: fnet[2 * lbus[i] + 1] += IDr;
542: }
543: PetscCall(VecRestoreArray(user->V0, &v0));
545: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
546: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
547: PetscCall(VecRestoreArrayWrite(Fgen, &fgen));
548: PetscCall(VecRestoreArrayWrite(Fnet, &fnet));
550: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, F, Fgen, Fnet));
551: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
552: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Fgen, &Fnet));
553: PetscFunctionReturn(PETSC_SUCCESS);
554: }
556: /* f(x,y)
557: g(x,y)
558: */
559: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec X, Vec F, void *ctx)
560: {
561: Userctx *user = (Userctx *)ctx;
563: PetscFunctionBegin;
564: user->t = t;
565: PetscCall(ResidualFunction(X, F, user));
566: PetscFunctionReturn(PETSC_SUCCESS);
567: }
569: /* f(x,y) - \dot{x}
570: g(x,y) = 0
571: */
572: PetscErrorCode IFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx)
573: {
574: PetscScalar *f;
575: const PetscScalar *xdot;
576: PetscInt i;
578: PetscFunctionBegin;
579: PetscCall(RHSFunction(ts, t, X, F, ctx));
580: PetscCall(VecScale(F, -1.0));
581: PetscCall(VecGetArray(F, &f));
582: PetscCall(VecGetArrayRead(Xdot, &xdot));
583: for (i = 0; i < ngen; i++) {
584: f[9 * i] += xdot[9 * i];
585: f[9 * i + 1] += xdot[9 * i + 1];
586: f[9 * i + 2] += xdot[9 * i + 2];
587: f[9 * i + 3] += xdot[9 * i + 3];
588: f[9 * i + 6] += xdot[9 * i + 6];
589: f[9 * i + 7] += xdot[9 * i + 7];
590: f[9 * i + 8] += xdot[9 * i + 8];
591: }
592: PetscCall(VecRestoreArray(F, &f));
593: PetscCall(VecRestoreArrayRead(Xdot, &xdot));
594: PetscFunctionReturn(PETSC_SUCCESS);
595: }
597: /* This function is used for solving the algebraic system only during fault on and
598: off times. It computes the entire F and then zeros out the part corresponding to
599: differential equations
600: F = [0;g(y)];
601: */
602: PetscErrorCode AlgFunction(SNES snes, Vec X, Vec F, void *ctx)
603: {
604: Userctx *user = (Userctx *)ctx;
605: PetscScalar *f;
606: PetscInt i;
608: PetscFunctionBegin;
609: PetscCall(ResidualFunction(X, F, user));
610: PetscCall(VecGetArray(F, &f));
611: for (i = 0; i < ngen; i++) {
612: f[9 * i] = 0;
613: f[9 * i + 1] = 0;
614: f[9 * i + 2] = 0;
615: f[9 * i + 3] = 0;
616: f[9 * i + 6] = 0;
617: f[9 * i + 7] = 0;
618: f[9 * i + 8] = 0;
619: }
620: PetscCall(VecRestoreArray(F, &f));
621: PetscFunctionReturn(PETSC_SUCCESS);
622: }
624: PetscErrorCode PostStage(TS ts, PetscReal t, PetscInt i, Vec *X)
625: {
626: Userctx *user;
628: PetscFunctionBegin;
629: PetscCall(TSGetApplicationContext(ts, &user));
630: PetscCall(SNESSolve(user->snes_alg, NULL, X[i]));
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: PetscErrorCode PostEvaluate(TS ts)
635: {
636: Userctx *user;
637: Vec X;
639: PetscFunctionBegin;
640: PetscCall(TSGetApplicationContext(ts, &user));
641: PetscCall(TSGetSolution(ts, &X));
642: PetscCall(SNESSolve(user->snes_alg, NULL, X));
643: PetscFunctionReturn(PETSC_SUCCESS);
644: }
646: PetscErrorCode PreallocateJacobian(Mat J, Userctx *user)
647: {
648: PetscInt *d_nnz;
649: PetscInt i, idx = 0, start = 0;
650: PetscInt ncols;
652: PetscFunctionBegin;
653: PetscCall(PetscMalloc1(user->neqs_pgrid, &d_nnz));
654: for (i = 0; i < user->neqs_pgrid; i++) d_nnz[i] = 0;
655: /* Generator subsystem */
656: for (i = 0; i < ngen; i++) {
657: d_nnz[idx] += 3;
658: d_nnz[idx + 1] += 2;
659: d_nnz[idx + 2] += 2;
660: d_nnz[idx + 3] += 5;
661: d_nnz[idx + 4] += 6;
662: d_nnz[idx + 5] += 6;
664: d_nnz[user->neqs_gen + 2 * gbus[i]] += 3;
665: d_nnz[user->neqs_gen + 2 * gbus[i] + 1] += 3;
667: d_nnz[idx + 6] += 2;
668: d_nnz[idx + 7] += 2;
669: d_nnz[idx + 8] += 5;
671: idx = idx + 9;
672: }
673: start = user->neqs_gen;
675: for (i = 0; i < nbus; i++) {
676: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
677: d_nnz[start + 2 * i] += ncols;
678: d_nnz[start + 2 * i + 1] += ncols;
679: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
680: }
681: PetscCall(MatSeqAIJSetPreallocation(J, 0, d_nnz));
682: PetscCall(PetscFree(d_nnz));
683: PetscFunctionReturn(PETSC_SUCCESS);
684: }
686: /*
687: J = [df_dx, df_dy
688: dg_dx, dg_dy]
689: */
690: PetscErrorCode ResidualJacobian(Vec X, Mat J, Mat B, void *ctx)
691: {
692: Userctx *user = (Userctx *)ctx;
693: Vec Xgen, Xnet;
694: const PetscScalar *xgen, *xnet;
695: PetscInt i, idx = 0;
696: PetscScalar Vr, Vi, Vm, Vm2;
697: PetscScalar Eqp, Edp, delta; /* Generator variables */
698: PetscScalar Efd;
699: PetscScalar Id, Iq; /* Generator dq axis currents */
700: PetscScalar Vd, Vq;
701: PetscScalar val[10];
702: PetscInt row[2], col[10];
703: PetscInt net_start = user->neqs_gen;
704: PetscScalar Zdq_inv[4], det;
705: PetscScalar dVd_dVr, dVd_dVi, dVq_dVr, dVq_dVi, dVd_ddelta, dVq_ddelta;
706: PetscScalar dIGr_ddelta, dIGi_ddelta, dIGr_dId, dIGr_dIq, dIGi_dId, dIGi_dIq;
707: PetscScalar dSE_dEfd;
708: PetscScalar dVm_dVd, dVm_dVq, dVm_dVr, dVm_dVi;
709: PetscInt ncols;
710: const PetscInt *cols;
711: const PetscScalar *yvals;
712: PetscInt k;
713: PetscScalar PD, QD, Vm0, Vm4;
714: const PetscScalar *v0;
715: PetscScalar dPD_dVr, dPD_dVi, dQD_dVr, dQD_dVi;
716: PetscScalar dIDr_dVr, dIDr_dVi, dIDi_dVr, dIDi_dVi;
718: PetscFunctionBegin;
719: PetscCall(MatZeroEntries(B));
720: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
721: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
723: PetscCall(VecGetArrayRead(Xgen, &xgen));
724: PetscCall(VecGetArrayRead(Xnet, &xnet));
726: /* Generator subsystem */
727: for (i = 0; i < ngen; i++) {
728: Eqp = xgen[idx];
729: Edp = xgen[idx + 1];
730: delta = xgen[idx + 2];
731: Id = xgen[idx + 4];
732: Iq = xgen[idx + 5];
733: Efd = xgen[idx + 6];
735: /* fgen[idx] = (-Eqp - (Xd[i] - Xdp[i])*Id + Efd)/Td0p[i]; */
736: row[0] = idx;
737: col[0] = idx;
738: col[1] = idx + 4;
739: col[2] = idx + 6;
740: val[0] = -1 / Td0p[i];
741: val[1] = -(Xd[i] - Xdp[i]) / Td0p[i];
742: val[2] = 1 / Td0p[i];
744: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
746: /* fgen[idx+1] = (-Edp + (Xq[i] - Xqp[i])*Iq)/Tq0p[i]; */
747: row[0] = idx + 1;
748: col[0] = idx + 1;
749: col[1] = idx + 5;
750: val[0] = -1 / Tq0p[i];
751: val[1] = (Xq[i] - Xqp[i]) / Tq0p[i];
752: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
754: /* fgen[idx+2] = w - w_s; */
755: row[0] = idx + 2;
756: col[0] = idx + 2;
757: col[1] = idx + 3;
758: val[0] = 0;
759: val[1] = 1;
760: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
762: /* fgen[idx+3] = (TM[i] - Edp*Id - Eqp*Iq - (Xqp[i] - Xdp[i])*Id*Iq - D[i]*(w - w_s))/M[i]; */
763: row[0] = idx + 3;
764: col[0] = idx;
765: col[1] = idx + 1;
766: col[2] = idx + 3;
767: col[3] = idx + 4;
768: col[4] = idx + 5;
769: val[0] = -Iq / M[i];
770: val[1] = -Id / M[i];
771: val[2] = -D[i] / M[i];
772: val[3] = (-Edp - (Xqp[i] - Xdp[i]) * Iq) / M[i];
773: val[4] = (-Eqp - (Xqp[i] - Xdp[i]) * Id) / M[i];
774: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
776: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
777: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
778: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
780: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
782: Zdq_inv[0] = Rs[i] / det;
783: Zdq_inv[1] = Xqp[i] / det;
784: Zdq_inv[2] = -Xdp[i] / det;
785: Zdq_inv[3] = Rs[i] / det;
787: dVd_dVr = PetscSinScalar(delta);
788: dVd_dVi = -PetscCosScalar(delta);
789: dVq_dVr = PetscCosScalar(delta);
790: dVq_dVi = PetscSinScalar(delta);
791: dVd_ddelta = Vr * PetscCosScalar(delta) + Vi * PetscSinScalar(delta);
792: dVq_ddelta = -Vr * PetscSinScalar(delta) + Vi * PetscCosScalar(delta);
794: /* fgen[idx+4] = Zdq_inv[0]*(-Edp + Vd) + Zdq_inv[1]*(-Eqp + Vq) + Id; */
795: row[0] = idx + 4;
796: col[0] = idx;
797: col[1] = idx + 1;
798: col[2] = idx + 2;
799: val[0] = -Zdq_inv[1];
800: val[1] = -Zdq_inv[0];
801: val[2] = Zdq_inv[0] * dVd_ddelta + Zdq_inv[1] * dVq_ddelta;
802: col[3] = idx + 4;
803: col[4] = net_start + 2 * gbus[i];
804: col[5] = net_start + 2 * gbus[i] + 1;
805: val[3] = 1;
806: val[4] = Zdq_inv[0] * dVd_dVr + Zdq_inv[1] * dVq_dVr;
807: val[5] = Zdq_inv[0] * dVd_dVi + Zdq_inv[1] * dVq_dVi;
808: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
810: /* fgen[idx+5] = Zdq_inv[2]*(-Edp + Vd) + Zdq_inv[3]*(-Eqp + Vq) + Iq; */
811: row[0] = idx + 5;
812: col[0] = idx;
813: col[1] = idx + 1;
814: col[2] = idx + 2;
815: val[0] = -Zdq_inv[3];
816: val[1] = -Zdq_inv[2];
817: val[2] = Zdq_inv[2] * dVd_ddelta + Zdq_inv[3] * dVq_ddelta;
818: col[3] = idx + 5;
819: col[4] = net_start + 2 * gbus[i];
820: col[5] = net_start + 2 * gbus[i] + 1;
821: val[3] = 1;
822: val[4] = Zdq_inv[2] * dVd_dVr + Zdq_inv[3] * dVq_dVr;
823: val[5] = Zdq_inv[2] * dVd_dVi + Zdq_inv[3] * dVq_dVi;
824: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
826: dIGr_ddelta = Id * PetscCosScalar(delta) - Iq * PetscSinScalar(delta);
827: dIGi_ddelta = Id * PetscSinScalar(delta) + Iq * PetscCosScalar(delta);
828: dIGr_dId = PetscSinScalar(delta);
829: dIGr_dIq = PetscCosScalar(delta);
830: dIGi_dId = -PetscCosScalar(delta);
831: dIGi_dIq = PetscSinScalar(delta);
833: /* fnet[2*gbus[i]] -= IGi; */
834: row[0] = net_start + 2 * gbus[i];
835: col[0] = idx + 2;
836: col[1] = idx + 4;
837: col[2] = idx + 5;
838: val[0] = -dIGi_ddelta;
839: val[1] = -dIGi_dId;
840: val[2] = -dIGi_dIq;
841: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
843: /* fnet[2*gbus[i]+1] -= IGr; */
844: row[0] = net_start + 2 * gbus[i] + 1;
845: col[0] = idx + 2;
846: col[1] = idx + 4;
847: col[2] = idx + 5;
848: val[0] = -dIGr_ddelta;
849: val[1] = -dIGr_dId;
850: val[2] = -dIGr_dIq;
851: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
853: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
855: /* fgen[idx+6] = (-KE[i]*Efd - SE + VR)/TE[i]; */
856: /* SE = k1[i]*PetscExpScalar(k2[i]*Efd); */
858: dSE_dEfd = k1[i] * k2[i] * PetscExpScalar(k2[i] * Efd);
860: row[0] = idx + 6;
861: col[0] = idx + 6;
862: col[1] = idx + 8;
863: val[0] = (-KE[i] - dSE_dEfd) / TE[i];
864: val[1] = 1 / TE[i];
865: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
867: /* Exciter differential equations */
869: /* fgen[idx+7] = (-RF + KF[i]*Efd/TF[i])/TF[i]; */
870: row[0] = idx + 7;
871: col[0] = idx + 6;
872: col[1] = idx + 7;
873: val[0] = (KF[i] / TF[i]) / TF[i];
874: val[1] = -1 / TF[i];
875: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
877: /* fgen[idx+8] = (-VR + KA[i]*RF - KA[i]*KF[i]*Efd/TF[i] + KA[i]*(Vref[i] - Vm))/TA[i]; */
878: /* Vm = (Vd^2 + Vq^2)^0.5; */
880: row[0] = idx + 8;
881: if (VRatmax[i]) {
882: col[0] = idx + 8;
883: val[0] = 1.0;
884: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
885: } else if (VRatmin[i]) {
886: col[0] = idx + 8;
887: val[0] = -1.0;
888: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
889: } else {
890: dVm_dVd = Vd / Vm;
891: dVm_dVq = Vq / Vm;
892: dVm_dVr = dVm_dVd * dVd_dVr + dVm_dVq * dVq_dVr;
893: dVm_dVi = dVm_dVd * dVd_dVi + dVm_dVq * dVq_dVi;
894: row[0] = idx + 8;
895: col[0] = idx + 6;
896: col[1] = idx + 7;
897: col[2] = idx + 8;
898: val[0] = -(KA[i] * KF[i] / TF[i]) / TA[i];
899: val[1] = KA[i] / TA[i];
900: val[2] = -1 / TA[i];
901: col[3] = net_start + 2 * gbus[i];
902: col[4] = net_start + 2 * gbus[i] + 1;
903: val[3] = -KA[i] * dVm_dVr / TA[i];
904: val[4] = -KA[i] * dVm_dVi / TA[i];
905: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
906: }
907: idx = idx + 9;
908: }
910: for (i = 0; i < nbus; i++) {
911: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
912: row[0] = net_start + 2 * i;
913: for (k = 0; k < ncols; k++) {
914: col[k] = net_start + cols[k];
915: val[k] = yvals[k];
916: }
917: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
918: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
920: PetscCall(MatGetRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
921: row[0] = net_start + 2 * i + 1;
922: for (k = 0; k < ncols; k++) {
923: col[k] = net_start + cols[k];
924: val[k] = yvals[k];
925: }
926: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
927: PetscCall(MatRestoreRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
928: }
930: PetscCall(MatAssemblyBegin(J, MAT_FLUSH_ASSEMBLY));
931: PetscCall(MatAssemblyEnd(J, MAT_FLUSH_ASSEMBLY));
933: PetscCall(VecGetArrayRead(user->V0, &v0));
934: for (i = 0; i < nload; i++) {
935: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
936: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
937: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
938: Vm2 = Vm * Vm;
939: Vm4 = Vm2 * Vm2;
940: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
941: PD = QD = 0.0;
942: dPD_dVr = dPD_dVi = dQD_dVr = dQD_dVi = 0.0;
943: for (k = 0; k < ld_nsegsp[i]; k++) {
944: PD += ld_alphap[k] * PD0[i] * PetscPowScalar((Vm / Vm0), ld_betap[k]);
945: dPD_dVr += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar((1 / Vm0), ld_betap[k]) * Vr * PetscPowScalar(Vm, (ld_betap[k] - 2));
946: dPD_dVi += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar((1 / Vm0), ld_betap[k]) * Vi * PetscPowScalar(Vm, (ld_betap[k] - 2));
947: }
948: for (k = 0; k < ld_nsegsq[i]; k++) {
949: QD += ld_alphaq[k] * QD0[i] * PetscPowScalar((Vm / Vm0), ld_betaq[k]);
950: dQD_dVr += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar((1 / Vm0), ld_betaq[k]) * Vr * PetscPowScalar(Vm, (ld_betaq[k] - 2));
951: dQD_dVi += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar((1 / Vm0), ld_betaq[k]) * Vi * PetscPowScalar(Vm, (ld_betaq[k] - 2));
952: }
954: /* IDr = (PD*Vr + QD*Vi)/Vm2; */
955: /* IDi = (-QD*Vr + PD*Vi)/Vm2; */
957: dIDr_dVr = (dPD_dVr * Vr + dQD_dVr * Vi + PD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vr) / Vm4;
958: dIDr_dVi = (dPD_dVi * Vr + dQD_dVi * Vi + QD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vi) / Vm4;
960: dIDi_dVr = (-dQD_dVr * Vr + dPD_dVr * Vi - QD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vr) / Vm4;
961: dIDi_dVi = (-dQD_dVi * Vr + dPD_dVi * Vi + PD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vi) / Vm4;
963: /* fnet[2*lbus[i]] += IDi; */
964: row[0] = net_start + 2 * lbus[i];
965: col[0] = net_start + 2 * lbus[i];
966: col[1] = net_start + 2 * lbus[i] + 1;
967: val[0] = dIDi_dVr;
968: val[1] = dIDi_dVi;
969: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
970: /* fnet[2*lbus[i]+1] += IDr; */
971: row[0] = net_start + 2 * lbus[i] + 1;
972: col[0] = net_start + 2 * lbus[i];
973: col[1] = net_start + 2 * lbus[i] + 1;
974: val[0] = dIDr_dVr;
975: val[1] = dIDr_dVi;
976: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
977: }
978: PetscCall(VecRestoreArrayRead(user->V0, &v0));
980: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
981: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
983: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
985: PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
986: PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
987: PetscFunctionReturn(PETSC_SUCCESS);
988: }
990: /*
991: J = [I, 0
992: dg_dx, dg_dy]
993: */
994: PetscErrorCode AlgJacobian(SNES snes, Vec X, Mat A, Mat B, void *ctx)
995: {
996: Userctx *user = (Userctx *)ctx;
998: PetscFunctionBegin;
999: PetscCall(ResidualJacobian(X, A, B, ctx));
1000: PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
1001: PetscCall(MatZeroRowsIS(A, user->is_diff, 1.0, NULL, NULL));
1002: PetscFunctionReturn(PETSC_SUCCESS);
1003: }
1005: /*
1006: J = [-df_dx, -df_dy
1007: dg_dx, dg_dy]
1008: */
1010: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec X, Mat A, Mat B, void *ctx)
1011: {
1012: Userctx *user = (Userctx *)ctx;
1014: PetscFunctionBegin;
1015: user->t = t;
1017: PetscCall(ResidualJacobian(X, A, B, user));
1019: PetscFunctionReturn(PETSC_SUCCESS);
1020: }
1022: /*
1023: J = [df_dx-aI, df_dy
1024: dg_dx, dg_dy]
1025: */
1027: PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A, Mat B, Userctx *user)
1028: {
1029: PetscScalar atmp = (PetscScalar)a;
1030: PetscInt i, row;
1032: PetscFunctionBegin;
1033: user->t = t;
1035: PetscCall(RHSJacobian(ts, t, X, A, B, user));
1036: PetscCall(MatScale(B, -1.0));
1037: for (i = 0; i < ngen; i++) {
1038: row = 9 * i;
1039: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1040: row = 9 * i + 1;
1041: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1042: row = 9 * i + 2;
1043: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1044: row = 9 * i + 3;
1045: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1046: row = 9 * i + 6;
1047: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1048: row = 9 * i + 7;
1049: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1050: row = 9 * i + 8;
1051: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1052: }
1053: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1054: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1055: PetscFunctionReturn(PETSC_SUCCESS);
1056: }
1058: int main(int argc, char **argv)
1059: {
1060: TS ts;
1061: SNES snes_alg;
1062: PetscMPIInt size;
1063: Userctx user;
1064: PetscViewer Xview, Ybusview, viewer;
1065: Vec X, F_alg;
1066: Mat J, A;
1067: PetscInt i, idx, *idx2;
1068: Vec Xdot;
1069: PetscScalar *x, *mat, *amat;
1070: const PetscScalar *rmat;
1071: Vec vatol;
1072: PetscInt *direction;
1073: PetscBool *terminate;
1074: const PetscInt *idx3;
1075: PetscScalar *vatoli;
1076: PetscInt k;
1078: PetscFunctionBeginUser;
1079: PetscCall(PetscInitialize(&argc, &argv, "petscoptions", help));
1080: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
1081: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs");
1083: user.neqs_gen = 9 * ngen; /* # eqs. for generator subsystem */
1084: user.neqs_net = 2 * nbus; /* # eqs. for network subsystem */
1085: user.neqs_pgrid = user.neqs_gen + user.neqs_net;
1087: /* Create indices for differential and algebraic equations */
1089: PetscCall(PetscMalloc1(7 * ngen, &idx2));
1090: for (i = 0; i < ngen; i++) {
1091: idx2[7 * i] = 9 * i;
1092: idx2[7 * i + 1] = 9 * i + 1;
1093: idx2[7 * i + 2] = 9 * i + 2;
1094: idx2[7 * i + 3] = 9 * i + 3;
1095: idx2[7 * i + 4] = 9 * i + 6;
1096: idx2[7 * i + 5] = 9 * i + 7;
1097: idx2[7 * i + 6] = 9 * i + 8;
1098: }
1099: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, 7 * ngen, idx2, PETSC_COPY_VALUES, &user.is_diff));
1100: PetscCall(ISComplement(user.is_diff, 0, user.neqs_pgrid, &user.is_alg));
1101: PetscCall(PetscFree(idx2));
1103: /* Read initial voltage vector and Ybus */
1104: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "X.bin", FILE_MODE_READ, &Xview));
1105: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "Ybus.bin", FILE_MODE_READ, &Ybusview));
1107: PetscCall(VecCreate(PETSC_COMM_WORLD, &user.V0));
1108: PetscCall(VecSetSizes(user.V0, PETSC_DECIDE, user.neqs_net));
1109: PetscCall(VecLoad(user.V0, Xview));
1111: PetscCall(MatCreate(PETSC_COMM_WORLD, &user.Ybus));
1112: PetscCall(MatSetSizes(user.Ybus, PETSC_DECIDE, PETSC_DECIDE, user.neqs_net, user.neqs_net));
1113: PetscCall(MatSetType(user.Ybus, MATBAIJ));
1114: /* PetscCall(MatSetBlockSize(user.Ybus,2)); */
1115: PetscCall(MatLoad(user.Ybus, Ybusview));
1117: /* Set run time options */
1118: PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Transient stability fault options", "");
1119: {
1120: user.tfaulton = 1.0;
1121: user.tfaultoff = 1.2;
1122: user.Rfault = 0.0001;
1123: user.setisdiff = PETSC_FALSE;
1124: user.semiexplicit = PETSC_FALSE;
1125: user.faultbus = 8;
1126: PetscCall(PetscOptionsReal("-tfaulton", "", "", user.tfaulton, &user.tfaulton, NULL));
1127: PetscCall(PetscOptionsReal("-tfaultoff", "", "", user.tfaultoff, &user.tfaultoff, NULL));
1128: PetscCall(PetscOptionsInt("-faultbus", "", "", user.faultbus, &user.faultbus, NULL));
1129: user.t0 = 0.0;
1130: user.tmax = 5.0;
1131: PetscCall(PetscOptionsReal("-t0", "", "", user.t0, &user.t0, NULL));
1132: PetscCall(PetscOptionsReal("-tmax", "", "", user.tmax, &user.tmax, NULL));
1133: PetscCall(PetscOptionsBool("-setisdiff", "", "", user.setisdiff, &user.setisdiff, NULL));
1134: PetscCall(PetscOptionsBool("-dae_semiexplicit", "", "", user.semiexplicit, &user.semiexplicit, NULL));
1135: }
1136: PetscOptionsEnd();
1138: PetscCall(PetscViewerDestroy(&Xview));
1139: PetscCall(PetscViewerDestroy(&Ybusview));
1141: /* Create DMs for generator and network subsystems */
1142: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_gen, 1, 1, NULL, &user.dmgen));
1143: PetscCall(DMSetOptionsPrefix(user.dmgen, "dmgen_"));
1144: PetscCall(DMSetFromOptions(user.dmgen));
1145: PetscCall(DMSetUp(user.dmgen));
1146: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_net, 1, 1, NULL, &user.dmnet));
1147: PetscCall(DMSetOptionsPrefix(user.dmnet, "dmnet_"));
1148: PetscCall(DMSetFromOptions(user.dmnet));
1149: PetscCall(DMSetUp(user.dmnet));
1150: /* Create a composite DM packer and add the two DMs */
1151: PetscCall(DMCompositeCreate(PETSC_COMM_WORLD, &user.dmpgrid));
1152: PetscCall(DMSetOptionsPrefix(user.dmpgrid, "pgrid_"));
1153: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmgen));
1154: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmnet));
1156: PetscCall(DMCreateGlobalVector(user.dmpgrid, &X));
1158: PetscCall(MatCreate(PETSC_COMM_WORLD, &J));
1159: PetscCall(MatSetSizes(J, PETSC_DECIDE, PETSC_DECIDE, user.neqs_pgrid, user.neqs_pgrid));
1160: PetscCall(MatSetFromOptions(J));
1161: PetscCall(PreallocateJacobian(J, &user));
1163: /* Create matrix to save solutions at each time step */
1164: user.stepnum = 0;
1166: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, 1002, NULL, &user.Sol));
1167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1168: Create timestepping solver context
1169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1170: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1171: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1172: if (user.semiexplicit) {
1173: PetscCall(TSSetType(ts, TSRK));
1174: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &user));
1175: PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, &user));
1176: } else {
1177: PetscCall(TSSetType(ts, TSCN));
1178: PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1));
1179: PetscCall(TSSetIFunction(ts, NULL, (TSIFunction)IFunction, &user));
1180: PetscCall(TSSetIJacobian(ts, J, J, (TSIJacobian)IJacobian, &user));
1181: }
1182: PetscCall(TSSetApplicationContext(ts, &user));
1184: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1185: Set initial conditions
1186: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1187: PetscCall(SetInitialGuess(X, &user));
1188: /* Just to set up the Jacobian structure */
1190: PetscCall(VecDuplicate(X, &Xdot));
1191: PetscCall(IJacobian(ts, 0.0, X, Xdot, 0.0, J, J, &user));
1192: PetscCall(VecDestroy(&Xdot));
1194: /* Save initial solution */
1196: idx = user.stepnum * (user.neqs_pgrid + 1);
1197: PetscCall(MatDenseGetArray(user.Sol, &mat));
1198: PetscCall(VecGetArray(X, &x));
1200: mat[idx] = 0.0;
1202: PetscCall(PetscArraycpy(mat + idx + 1, x, user.neqs_pgrid));
1203: PetscCall(MatDenseRestoreArray(user.Sol, &mat));
1204: PetscCall(VecRestoreArray(X, &x));
1205: user.stepnum++;
1207: PetscCall(TSSetMaxTime(ts, user.tmax));
1208: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
1209: PetscCall(TSSetTimeStep(ts, 0.01));
1210: PetscCall(TSSetFromOptions(ts));
1211: PetscCall(TSSetPostStep(ts, SaveSolution));
1212: PetscCall(TSSetSolution(ts, X));
1214: PetscCall(PetscMalloc1((2 * ngen + 2), &direction));
1215: PetscCall(PetscMalloc1((2 * ngen + 2), &terminate));
1216: direction[0] = direction[1] = 1;
1217: terminate[0] = terminate[1] = PETSC_FALSE;
1218: for (i = 0; i < ngen; i++) {
1219: direction[2 + 2 * i] = -1;
1220: direction[2 + 2 * i + 1] = 1;
1221: terminate[2 + 2 * i] = terminate[2 + 2 * i + 1] = PETSC_FALSE;
1222: }
1224: PetscCall(TSSetEventHandler(ts, 2 * ngen + 2, direction, terminate, EventFunction, PostEventFunction, (void *)&user));
1226: if (user.semiexplicit) {
1227: /* Use a semi-explicit approach with the time-stepping done by an explicit method and the
1228: algrebraic part solved via PostStage and PostEvaluate callbacks
1229: */
1230: PetscCall(TSSetType(ts, TSRK));
1231: PetscCall(TSSetPostStage(ts, PostStage));
1232: PetscCall(TSSetPostEvaluate(ts, PostEvaluate));
1233: }
1235: if (user.setisdiff) {
1236: /* Create vector of absolute tolerances and set the algebraic part to infinity */
1237: PetscCall(VecDuplicate(X, &vatol));
1238: PetscCall(VecSet(vatol, 100000.0));
1239: PetscCall(VecGetArray(vatol, &vatoli));
1240: PetscCall(ISGetIndices(user.is_diff, &idx3));
1241: for (k = 0; k < 7 * ngen; k++) vatoli[idx3[k]] = 1e-2;
1242: PetscCall(VecRestoreArray(vatol, &vatoli));
1243: }
1245: /* Create the nonlinear solver for solving the algebraic system */
1246: /* Note that although the algebraic system needs to be solved only for
1247: Idq and V, we reuse the entire system including xgen. The xgen
1248: variables are held constant by setting their residuals to 0 and
1249: putting a 1 on the Jacobian diagonal for xgen rows
1250: */
1252: PetscCall(VecDuplicate(X, &F_alg));
1253: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes_alg));
1254: PetscCall(SNESSetFunction(snes_alg, F_alg, AlgFunction, &user));
1255: PetscCall(SNESSetJacobian(snes_alg, J, J, AlgJacobian, &user));
1256: PetscCall(SNESSetFromOptions(snes_alg));
1258: user.snes_alg = snes_alg;
1260: /* Solve */
1261: PetscCall(TSSolve(ts, X));
1263: PetscCall(MatAssemblyBegin(user.Sol, MAT_FINAL_ASSEMBLY));
1264: PetscCall(MatAssemblyEnd(user.Sol, MAT_FINAL_ASSEMBLY));
1266: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, user.stepnum, NULL, &A));
1267: PetscCall(MatDenseGetArrayRead(user.Sol, &rmat));
1268: PetscCall(MatDenseGetArray(A, &amat));
1269: PetscCall(PetscArraycpy(amat, rmat, user.stepnum * (user.neqs_pgrid + 1)));
1270: PetscCall(MatDenseRestoreArray(A, &amat));
1271: PetscCall(MatDenseRestoreArrayRead(user.Sol, &rmat));
1272: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, "out.bin", FILE_MODE_WRITE, &viewer));
1273: PetscCall(MatView(A, viewer));
1274: PetscCall(PetscViewerDestroy(&viewer));
1275: PetscCall(MatDestroy(&A));
1277: PetscCall(PetscFree(direction));
1278: PetscCall(PetscFree(terminate));
1279: PetscCall(SNESDestroy(&snes_alg));
1280: PetscCall(VecDestroy(&F_alg));
1281: PetscCall(MatDestroy(&J));
1282: PetscCall(MatDestroy(&user.Ybus));
1283: PetscCall(MatDestroy(&user.Sol));
1284: PetscCall(VecDestroy(&X));
1285: PetscCall(VecDestroy(&user.V0));
1286: PetscCall(DMDestroy(&user.dmgen));
1287: PetscCall(DMDestroy(&user.dmnet));
1288: PetscCall(DMDestroy(&user.dmpgrid));
1289: PetscCall(ISDestroy(&user.is_diff));
1290: PetscCall(ISDestroy(&user.is_alg));
1291: PetscCall(TSDestroy(&ts));
1292: if (user.setisdiff) PetscCall(VecDestroy(&vatol));
1293: PetscCall(PetscFinalize());
1294: return 0;
1295: }
1297: /*TEST
1299: build:
1300: requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
1302: test:
1303: suffix: implicit
1304: args: -ts_monitor -snes_monitor_short
1305: localrunfiles: petscoptions X.bin Ybus.bin
1307: test:
1308: suffix: semiexplicit
1309: args: -ts_monitor -snes_monitor_short -dae_semiexplicit -ts_rk_type 2a
1310: localrunfiles: petscoptions X.bin Ybus.bin
1312: test:
1313: suffix: steprestart
1314: # needs ARKIMEX methods with all implicit stages since the mass matrix is not the identity
1315: args: -ts_monitor -snes_monitor_short -ts_type arkimex -ts_arkimex_type prssp2
1316: localrunfiles: petscoptions X.bin Ybus.bin
1318: TEST*/