49
50
51
53 USE elbufdef_mod
54
55
56
57#include "implicit_f.inc"
58
59
60
61#include "parit_c.inc"
62
63
64
65 INTEGER, INTENT(IN) :: NFT,NLAY,NPTR,NPTS,IR,IS,
66 . NEL,IMAT,ITASK,ICR,ICS,ICT
67 INTEGER, DIMENSION(NEL), INTENT(IN) :: NC1,NC2,NC3,NC4,NC5,NC6,NC7,NC8
68 my_real,
DIMENSION(NEL),
INTENT(IN) ::
69 . offg
71 . dt2t
72 my_real,
DIMENSION(NEL),
INTENT(IN) ::
74 TYPE(NLOCAL_STR_), INTENT(INOUT) :: NLOC_DMG
75 my_real,
DIMENSION(9,9),
INTENT(IN) ::
76 . ws,as
77 my_real,
DIMENSION(NEL,NPTR*NPTS*NLAY),
INTENT(INOUT) ::
78 . var_reg,px1,px2,px3,px4,px5,px6,px7,px8,
79 . py1,py2,py3,py4,py5,py6,py7,py8,pz1,pz2,
80 . pz3,pz4,pz5,pz6,pz7,pz8
81 TYPE(BUF_NLOCTS_), INTENT(INOUT), TARGET :: BUFNLTS
83 . dt1,dt12
84 INTEGER, INTENT(IN) :: NODADT,IDTMIN(102)
85 my_real,
INTENT(IN) :: dtfac1(102),dtmin1(102)
86
87
88
89 INTEGER I,J,II,K,NNOD,N1,N2,N3,N4,N5,N6,N7,N8,
90 . L_NLOC,IP,NDDL,NDNOD
92 . l2,xi,b1,b2,b3,b4,b5,b6,b7,b8,h(8),
93 . a1,a2,a3,a4,a5,a6,a7,a8,
94 . c1,c2,c3,c4,c5,c6,c7,c8,
95 . zeta,sspnl,dtnl,le_max,maxstif,
96 . zr,zs,zt,pr(8),ps(8),pt(8),wi,
97 . bth1,bth2,nth1,nth2,dt2p,dtnod,
98 . k1,k2,k12,dtnl_th,minmasscal
100 . lc,thk,lthk
101 my_real,
DIMENSION(NEL,NLAY) ::
102 . f1,f2,f3,f4,f5,f6,f7,f8
103 my_real,
DIMENSION(:,:) ,
ALLOCATABLE ::
104 . sti1,sti2,sti3,sti4,sti5,sti6,sti7,sti8
105 my_real,
DIMENSION(:) ,
ALLOCATABLE ::
106 . btb11,btb12,btb13,btb14,btb15,btb16,btb17
107 . btb22,btb23,btb24,btb25,btb26,btb27,btb28,btb33,
108 . btb34,btb35,btb36,btb37,btb38,btb44,btb45,btb46,
109 . btb47,btb48,btb55,btb56,btb57,btb58,btb66,btb67,
110 . btb68,btb77,btb78,btb88
111 INTEGER, DIMENSION(:), ALLOCATABLE ::
112 . POS1,POS2,POS3,POS4,POS5,POS6,POS7,POS8
113 my_real,
DIMENSION(:,:),
ALLOCATABLE ::
114 . stifnlth,dtn
115
116
117 my_real,
PARAMETER :: csta = 40.0d0
118
119 my_real,
PARAMETER :: cdamp = 0.7d0
121 . zth(10,9)
122
123 DATA zth /
124 1 0. ,0. ,0. ,
125 1 0. ,0. ,0. ,
126 1 0. ,0. ,0. ,
127 1 0. ,
128 2 -1. ,0. ,1. ,
129 2 0. ,0. ,0. ,
130 2 0. ,0. ,0. ,
131 2 0. ,
132 3 -1. ,-.549193338482966,0.549193338482966,
133 3 1. ,0. ,0. ,
134 3 0. ,0. ,0. ,
135 3 0. ,
136 4 -1. ,-.600558677589454,0. ,
137 4 0.600558677589454,1. ,0. ,
138 4 0. ,0. ,0. ,
139 4 0. ,
140 5 -1. ,-.812359691877328,-.264578928334038,
141 5 0.264578928334038,0.812359691877328,1. ,
142 5 0. ,0. ,0. ,
143 5 0. ,
144 6 -1. ,-.796839450334708,-.449914286274731,
145 6 0. ,0.449914286274731,0.796839450334708,
146 6 1. ,0.
147 6 0. ,
148 7 -1. ,-.898215824685518,-.584846546513270,
149 7 -.226843756241524,0.226843756241524,0.584846546513270,
150 7 0.898215824685518,1. ,0. ,
151 7 0. ,
152 8 -1. ,-.878478166955581,-.661099443664978,
153 8 -.354483526205989,0. ,0.354483526205989,
154 8 0.661099443664978,0.878478166955581,1. ,
155 8 0. ,
156 9 -1. ,-.936320479015252,-.735741735638020,
157 9 -.491001129763160,-.157505717044458,0.157505717044458,
158 9 0.491001129763160,0.735741735638020,0.936320479015252,
159 9 1. /
160
161 l2 = nloc_dmg%LEN(imat)**2
162 xi = nloc_dmg%DAMP(imat)
163 nnod = nloc_dmg%NNOD
164 l_nloc = nloc_dmg%L_NLOC
165 zeta = nloc_dmg%DENS(imat)
166 sspnl = nloc_dmg%SSPNL(imat)
167 le_max = nloc_dmg%LE_MAX(imat)
168 lc(1:nel) = zero
169 ALLOCATE(
170 . btb11(nel),btb12(nel),btb13(nel),btb14(nel),btb15(nel),
171 . btb16(nel),btb17(nel),btb18(nel),btb22(nel),btb23(nel),btb24(nel),
172 . btb25(nel),btb26(nel),btb27(nel),btb28(nel),btb33(nel),btb34(nel),
173 . btb35(nel),btb36(nel),btb37(nel),btb38(nel),btb44(nel),btb45(nel),
174 . btb46(nel),btb47(nel),btb48(nel),btb55(nel),btb56(nel),btb57(nel),
175 . btb58(nel),btb66(nel),btb67(nel),btb68(nel),btb77(nel),btb78(nel),
176 . btb88(nel),pos1(nel),pos2(nel),pos3(nel),pos4(nel),pos5(nel),
177 . pos6(nel),pos7(nel),pos8(nel))
178
179 IF (nodadt > 0) THEN
180
181 ALLOCATE(sti1(nel,nlay),sti2(nel,nlay),sti3(nel,nlay),sti4(nel,nlay),
182 . sti5(nel,nlay),sti6(nel,nlay),sti7(nel,nlay),sti8(nel,nlay))
183 ELSE
184 ALLOCATE(sti1(1,1),sti2(1,1),sti3(1,1),sti4(1,1),
185 . sti5(1,1),sti6(1,1),sti7(1,1),sti8(1,1))
186 ENDIF
187
188
189
190
191
192#include "vectorize.inc"
193 DO i=1,nel
194
195
196 n1 = nloc_dmg%IDXI(nc1(i))
197 n2 = nloc_dmg%IDXI(nc2(i))
198 n3 = nloc_dmg%IDXI(nc3(i))
199 n4 = nloc_dmg%IDXI(nc4(i))
200 n5 = nloc_dmg%IDXI(nc5(i))
201 n6 = nloc_dmg%IDXI(nc6(i))
202 n7 = nloc_dmg%IDXI(nc7(i))
203 n8 = nloc_dmg%IDXI(nc8(i))
204
205
206 pos1(i) = nloc_dmg%POSI(n1)
207 pos2(i) = nloc_dmg%POSI(n2)
208 pos3(i) = nloc_dmg%POSI(n3)
209 pos4(i) = nloc_dmg%POSI(n4)
210 pos5(i) = nloc_dmg%POSI(n5)
211 pos6(i) = nloc_dmg%POSI(n6)
212 pos7(i) = nloc_dmg%POSI(n7)
213 pos8(i) = nloc_dmg%POSI(n8)
214
215 ENDDO
216
217
218
219
220 IF ((l2>zero).AND.(nlay > 1)) THEN
221
222
223 DO i = 1,nel
224 thk(i) = vol(i)/
area(i)
225 lthk(i) = (zth(nlay+1,nlay)-zth(nlay,nlay))*thk(i)*half
226 ENDDO
227
228
229 nddl = nlay
230 IF (nodadt > 0) THEN
231 ALLOCATE(stifnlth(nel,nddl+1))
232 ALLOCATE(dtn(nel,nddl+1))
233 ELSE
234 ALLOCATE(dtn(1,1))
235 ALLOCATE(stifnlth(1,1))
236 dtn(1,1) = ep20
237 stifnlth(1,1) = ep20
238 ENDIF
239 ndnod = nddl+1
240
241 DO k = 1,ndnod
242 DO i = 1,nel
243
244 bufnlts%FNLTH(i,k) = zero
245
246 IF (nodadt > 0) THEN
247 stifnlth(i,k) = em20
248 ENDIF
249 ENDDO
250 ENDDO
251
252
253 DO k = 1, nddl
254
255
256 ip = ir + ((is-1) + (k-1)*npts)*nptr
257
258
259 nth1 = (as(k,nddl) - zth(k+1,nddl)) /
260 . (zth(k,nddl) - zth(k+1,nddl))
261 nth2 = (as(k,nddl) - zth(k,nddl)) /
262 . (zth(k+1,nddl) - zth(k,nddl))
263
264
265 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
266
267
268 DO i = 1,nel
269
270
271 bth1 = (one/(zth(k,nddl) - zth(k+1,nddl)))*(two/thk(i))
272 bth2 = (one/(zth(k+1,nddl) - zth(k,nddl)))*(two/thk(i))
273
274
275 k1 = l2*(bth1**2) + nth1**2
276 k12 = l2*(bth1*bth2)+ (nth1*nth2)
277 k2 = l2*(bth2**2) + nth2**2
278
279
280 bufnlts%FNLTH(i,k) = bufnlts%FNLTH(i,k)
281 . + (k1*bufnlts%UNLTH(i,k) + k12*bufnlts%UNLTH(i,k+1)
282 . + xi*((nth1**2)*bufnlts%VNLTH(i,k)
283 . + (nth1*nth2)*bufnlts%VNLTH(i,k+1))
284 . - (nth1*var_reg(i,ip
285 bufnlts%FNLTH(i,k+1) = bufnlts%FNLTH(i,k+1)
286 . + (k12*bufnlts%UNLTH(i,k) + k2*bufnlts%UNLTH(i,k+1)
287 . + xi*(nth1*nth2*bufnlts%VNLTH
288 . + (nth2**2)*bufnlts%VNLTH(i,k+1))
289 . - nth2*var_reg(i,ip))*wi*vol(i)
290
291
292 IF (nodadt > 0) THEN
293 stifnlth(i,k) = stifnlth(i,k) +
max(abs(k1)+abs(k12),abs(k12)+abs(k2))*wi*vol(i)
294 stifnlth(i,k+1) = stifnlth(i,k+1) +
max(abs
295 ENDIF
296
297 ENDDO
298 ENDDO
299
300
301 IF (nodadt > 0) THEN
302
303
304 dtnod = ep20
305 DO k = 1,ndnod
306 DO i = 1,nel
307 dtn(i,k) = dtfac1(11)*cdamp*sqrt(two*bufnlts%MASSTH(i,k)/
max(stifnlth(i,k),em20))
308 dtnod =
min(dtn(i,k),dtnod)
309 ENDDO
310 ENDDO
311
312
313 IF ((idtmin(11)==3).OR.(idtmin(11)==4).OR.(idtmin(11)==8)) THEN
314
315 IF (dtnod < dtmin1(11)*(sqrt(csta))) THEN
316 DO k = 1,ndnod
317 DO i = 1,nel
318 IF (dtn(i,k) < dtmin1(11)) THEN
319 dt2p = dtmin1(11)/(dtfac1(11)*cdamp)
320 bufnlts%MASSTH(i,k) =
max(bufnlts%MASSTH(i,k),csta*half*stifnlth(i,k)*dt2p*dt2p*onep00001)
321 ENDIF
322 ENDDO
323 ENDDO
324 ENDIF
325 dtnod = dtmin1(11)*(sqrt(csta))
326 ENDIF
327
328
329 IF (dtnod < dt2t) THEN
330 dt2t =
min(dt2t,dtnod)
331 ENDIF
332 ENDIF
333
334 DO k = 1,ndnod
335 DO i = 1,nel
336
337 bufnlts%VNLTH(i,k) = bufnlts%VNLTH(i,k) - (bufnlts%FNLTH(i,k)/bufnlts%MASSTH(i,k))*dt12
338 ENDDO
339 ENDDO
340
341 DO k = 1,ndnod
342 DO i = 1,nel
343
344 bufnlts%UNLTH(i,k) = bufnlts%UNLTH(i,k) + bufnlts%VNLTH(i,k)*dt1
345 ENDDO
346 ENDDO
347
348
349 DO k = 1, nddl
350
351 ip = ir + ((is-1) + (k-1)*npts)*nptr
352
353 nth1 = (as(k,nddl) - zth(k+1,nddl))/
354 . (zth(k,nddl) - zth(k+1,nddl))
355 nth2 = (as(k,nddl) - zth(k,nddl))/
356 . (zth(k+1,nddl) - zth(k,nddl))
357
358 DO i = 1,nel
359
360 var_reg(i,ip) = nth1*bufnlts%UNLTH(i,k) + nth2*bufnlts%UNLTH(i,k+1)
361 ENDDO
362 ENDDO
363 ENDIF
364
365
366
367
368
369 DO k = 1,nlay
370
371
372 IF (ict == 1) THEN
373 zr = as(ir,nptr)
374 zs = as(is,npts)
375 zt = as(k,nlay)
376 ELSEIF (ics == 1) THEN
377 zr = as(ir,nptr)
378 zs = as(k,nlay)
379 zt = as(is,npts)
380 ELSEIF (icr == 1) THEN
381 zr = as(k,nlay)
382 zs = as(ir,nptr)
383 zt = as(is,npts)
384 ENDIF
385 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
386
387
388 CALL basis8 (zr,zs,zt,h,pr,ps,pt)
389
390
391 ip = ir + ((is-1) + (k-1)*npts)*nptr
392
393
394#include "vectorize.inc"
395 DO i=1,nel
396
397
398 IF (offg(i) /= zero) THEN
399
400
401 btb11(i) = px1(i,ip)**2 + py1(i,ip)**2 + pz1(i,ip)**2
402 btb12(i) = px1(i,ip)*px2(i,ip) + py1(i,ip)*py2(i,ip) + pz1(i,ip)*pz2(i,ip)
403 btb13(i) = px1(i,ip)*px3(i,ip) + py1
404 btb14(i) = px1(i,ip)*px4(i,ip) + py1(i,ip)*py4(i,ip) + pz1(i,ip)*pz4(i,ip)
405 btb15(i) = px1(i,ip)*px5(i,ip) + py1(i,ip)*py5(i,ip) + pz1(i,ip)*pz5(i,ip)
406 btb16(i) = px1(i,ip)*px6(i,ip) + py1(i,ip)*py6(i,ip) + pz1(i,ip)*pz6(i,ip)
407 btb17(i) = px1(i,ip)*px7(i,ip) + py1(i,ip)*py7(i,ip) + pz1(i,ip)*pz7(i,ip)
408 btb18(i) = px1(i,ip)*px8(i,ip) + py1(i,ip)*py8(i,ip) + pz1(i,ip)*pz8(i,ip)
409 btb22(i) = px2(i,ip)**2 + py2(i,ip)**2 + pz2(i,ip)**2
410 btb23(i) = px2(i,ip)*px3(i,ip) + py2(i,ip)*py3(i,ip) + pz2(i,ip)*pz3(i,ip)
411 btb24(i) = px2(i,ip
412 btb25(i) = px2(i,ip)*px5(i,ip) + py2(i,ip)*py5(i,ip) + pz2(i,ip)*pz5(i,ip)
413 btb26(i) = px2(i,ip)*px6(i,ip) + py2(i,ip)*py6(i,ip) + pz2(i,ip)*pz6(i,ip)
414 btb27(i) = px2(i,ip)*px7(i,ip) + py2(i,ip)*py7(i,ip) + pz2(i,ip)*pz7(i,ip)
415 btb28(i) = px2(i,ip)*px8(i,ip) + py2(i,ip)*py8(i,ip) + pz2(i,ip)*pz8(i,ip)
416 btb33(i) = px3(i,ip)**2 + py3(i,ip)**2 + pz3(i,ip)**2
417 btb34(i) = px3(i,ip)*px4(i,ip) + py3(i,ip)*py4(i,ip) + pz3(i,ip)*pz4(i,ip)
418 btb35(i) = px3(i,ip)*px5(i,ip) + py3(i,ip)*py5(i,ip) + pz3(i,ip)*pz5(i,ip)
419 btb36(i) = px3(i,ip)*px6(i,ip) + py3(i,ip)*py6(i,ip) + pz3(i,ip)*pz6(i,ip)
420 btb37(i) = px3(i,ip)*px7(i,ip) + py3(i,ip)*py7(i,ip) + pz3(i,ip)*pz7(i,ip)
421 btb38(i) = px3(i,ip)*px8(i,ip) + py3(i,ip)*py8(i,ip) + pz3(i,ip)*pz8(i,ip)
422 btb44(i) = px4(i,ip)**2 + py4(i,ip)**2 + pz4(i,ip)**2
423 btb45(i) = px4(i,ip)*px5(i,ip) + py4(i,ip)*py5(i,ip) + pz4(i,ip)*pz5(i,ip)
424 btb46(i) = px4(i,ip)*px6(i,ip) + py4(i,ip)*py6(i,ip) + pz4(i,ip)*pz6(i,ip)
425 btb47(i) = px4(i,ip)*px7(i,ip) + py4(i,ip)*py7(i,ip) + pz4(i,ip)*pz7(i,ip)
426 btb48(i) = px4(i,ip)*px8(i,ip) + py4(i,ip)*py8(i,ip) + pz4(i,ip)*pz8(i,ip)
427 btb55(i) = px5(i,ip)**2 + py5(i,ip)**2 + pz5(i,ip)**2
428 btb56(i) = px5(i,ip)*px6(i,ip) + py5(i,ip)*py6(i,ip) + pz5(i,ip)*pz6(i,ip)
429 btb57(i) = px5(i,ip)*px7(i,ip) + py5(i,ip)*py7(i,ip) + pz5(i,ip)*pz7(i,ip)
430 btb58(i) = px5(i,ip)*px8(i,ip) + py5(i,ip)*py8(i,ip) + pz5(i,ip)*pz8(i,ip)
431 btb66(i) = px6(i,ip)**2 + py6(i,ip)**2 + pz6(i,ip)**2
432 btb67(i) = px6(i,ip)*px7(i,ip) + py6(i,ip)*py7(i,ip) + pz6(i,ip)*pz7(i,ip)
433 btb68(i) = px6(i,ip)*px8(i,ip) + py6(i,ip)*py8(i,ip) + pz6(i,ip)*pz8(i,ip)
434 btb77(i) = px7(i,ip)**2 + py7(i,ip)**2 + pz7(i,ip)**2
435 btb78(i) = px7(i,ip)*px8(i,ip) + py7(i,ip)*py8(i,ip) + pz7(i,ip)*pz8(i,ip)
436 btb88(i) = px8(i,ip)**2 + py8(i,ip)**2 + pz8(i,ip)**2
437
438
439 a1 = vol(i) * wi * (h(1)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
440 . h(1)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
441 . h(1)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(1)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
442 . h(1)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(1)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
443
444 IF (nodadt == 0) THEN
445 a1 = a1 + vol(i) * wi * xi * (h(1)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
446 . h(1)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
447 . h(1)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(1)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
448 . h(1)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(1)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
449 ELSE
450 minmasscal =
min(sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1)),
451 . sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1)),
452 . sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1)),
453 . sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1)),
454 . sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1)),
455 . sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1)),
456 . sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1)),
457 . sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1)))
458 a1 = a1 + vol(i) * wi * xi * (
459 . h(1)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
460 . h(1)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
461 . h(1)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
462 . h(1)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
463 . h(1)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
464 . h(1)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
465 . h(1)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
466 . h(1)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
467 ENDIF
468
469 b1 = l2 * vol(i) * wi * (btb11(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb12(i)*nloc_dmg%UNL(pos2(i)+k-1) +
470 . btb13(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb14(i)*nloc_dmg%UNL(pos4(i)+k-1) +
471 . btb15(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb16(i)*nloc_dmg%UNL
472 . btb17(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb18(i)*nloc_dmg%UNL(pos8(i)+k-1))
473
474 c1 = vol(i) * wi * h(1) * var_reg(i,ip)
475
476 a2 = vol(i) * wi * (h(2)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
477 . h(2)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
478 . h(2)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
479 . h(2)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(2)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
480
481 IF (nodadt == 0) THEN
482 a2 = a2 + vol(i) * wi * xi * (h(2)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
483 . h(2)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
484 . h(2)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
485 . h(2)*h(7)*nloc_dmg%VNL(pos7(i)+k
486 ELSE
487 a2 = a2 + vol(i) * wi * xi * (
488 . h(2)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
489 . h(2)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
490 . h(2)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
491 . h(2)*h(4)*minmasscal*nloc_dmg%VNL(pos4
492 . h(2)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
493 . h(2)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
494 . h(2)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
495 . h(2)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
496 ENDIF
497
498 b2 = l2 * vol(i) * wi * (btb12(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb22(i)*nloc_dmg%UNL(pos2(i)+k-1) +
499 . btb23(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb24(i)*nloc_dmg%UNL(pos4(i)+k-1) +
500 . btb25(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos6(i)+k-1) +
501 . btb27(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb28(i)*nloc_dmg%UNL(pos8
502
503 c2 = vol(i) * wi * h(2) * var_reg(i,ip)
504
505 a3 = vol(i) * wi * (h(3)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
506 . h(3)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
507 . h(3)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
508 . h(3)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(3)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
509
510 IF (nodadt == 0) THEN
511 a3 = a3 + vol(i) * wi * xi * (h(3)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
512 . h(3)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
513 . h(3)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
514 . h(3)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(3)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
515 ELSE
516 a3 = a3 + vol(i) * wi * xi * (
517 . h(3)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
518 . h(3)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
519 . h(3)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
520 . h(3)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
521 . h(3)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
522 . h(3)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
523 . h(3)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
524 . h(3)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
525 ENDIF
526
527 b3 = l2 * vol(i) * wi * (btb13(i)*nloc_dmg%UNL
528 . btb33(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb34(i)*nloc_dmg%UNL(pos4(i)+k-1) +
529 . btb35(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb36(i)*nloc_dmg%UNL(pos6(i)+k-1) +
530 . btb37(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb38(i)*nloc_dmg%UNL(pos8(i)+k-1))
531
532 c3 = vol(i) * wi * h(3) * var_reg(i,ip)
533
534 a4 = vol(i) * wi * (h(4)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
535 . h(4)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
536 . h(4)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(4)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
537 . h(4)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
538
539 IF (nodadt == 0) THEN
540 a4 = a4 + vol(i) * wi * xi * (h(4)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
541 . h(4)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
542 . h(4)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(4)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
543 . h(4)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
544 ELSE
545 a4 = a4 + vol(i) * wi * xi * (
546 . h(4)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
547 . h(4)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
548 . h(4)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
549 . h(4)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
550 . h(4)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
551 . h(4)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
552 . h(4)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
553 . h(4)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
554 ENDIF
555
556 b4 = l2 * vol(i) * wi * (btb14(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb24(i)*nloc_dmg%UNL(pos2(i)+k-1) +
557 . btb34(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb44(i)*nloc_dmg%UNL(pos4(i)+k-1) +
558 . btb45(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb46(i)*nloc_dmg%UNL(pos6(i)+k-1) +
559 . btb47(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb48(i)*nloc_dmg%UNL(pos8(i)+k-1))
560
561 c4 = vol(i) * wi * h(4) * var_reg(i,ip)
562
563 a5 = vol(i) * wi * (h(5)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(5)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
564 . h(5)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(5)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
565 . h(5)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
566 . h(5)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
567
568 IF (nodadt == 0) THEN
569 a5 = a5 + vol(i) * wi * xi * (h(5)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(5)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
570 . h(5)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(5)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
571 . h(5)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
572 . h(5)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
573 ELSE
574 a5 = a5 + vol(i) * wi * xi * (
575 . h(5)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
576 . h(5)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
577 . h(5)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
578 . h(5)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
579 . h(5)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
580 . h(5)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
581 . h(5)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
582 . h(5)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
583 ENDIF
584
585 b5 = l2 * vol(i) * wi * (btb15(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb25(i)*nloc_dmg%UNL(pos2(i)+k-1) +
586 . btb35(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb45(i)*nloc_dmg%UNL(pos4(i)+k-1) +
587 . btb55(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb56(i)*nloc_dmg%UNL
588 . btb57(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb58(i)*nloc_dmg%UNL(pos8(i)+k-1))
589
590 c5 = vol(i) * wi * h(5) * var_reg(i,ip)
591
592 a6 = vol(i) * wi * (h(6)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(6)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
593 . h(6)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(6)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
594 . h(6)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(6)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
595 . h(6)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(6)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
596
597 IF (nodadt == 0) THEN
598 a6 = a6 + vol(i) * wi * xi * (h(6)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(6)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
599 . h(6)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(6)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
600 . h(6)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(6)*h(6)*nloc_dmg%VNL(pos6(i)+k
601 . h(6)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(6)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
602 ELSE
603 a6 = a6 + vol(i) * wi * xi * (
604 . h(6)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
605 . h(6)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
606 . h(6)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
607 . h(6)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
608 . h(6)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
609 . h(6)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
610 . h(6)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
611 . h(6)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
612 ENDIF
613
614 b6 = l2 * vol(i) * wi * (btb16(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos2(i)+k-1) +
615 . btb36(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb46(i)*nloc_dmg%UNL(pos4(i)+k-1) +
616 . btb56(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb66(i)*nloc_dmg%UNL(pos6(i)+k-1) +
617 . btb67(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos8(i)+k-1))
618
619 c6 = vol(i) * wi * h(6) * var_reg(i,ip)
620
621 a7 = vol(i) * wi * (h(7)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
622 . h(7)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
623 . h(7)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(7)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
624 . h(7)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(7)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
625
626 IF (nodadt == 0) THEN
627 a7 = a7 + vol(i) * wi * xi * (h(7)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
628 . h(7)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
629 . h(7)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(7)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
630 . h(7)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(7)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
631 ELSE
632 a7 = a7 + vol(i) * wi * xi * (
633 . h(7)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
634 . h(7)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
635 . h(7)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
636 . h(7)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
637 . h(7)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
638 . h(7)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
639 . h(7)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
640 . h(7)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
641 ENDIF
642
643 b7 = l2 * vol(i) * wi * (btb17(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb27(i)*nloc_dmg%UNL(pos2(i)+k-1) +
644 . btb37(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb47(i)*nloc_dmg%UNL(pos4(i)+k-1) +
645 . btb57(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb67(i)*nloc_dmg%UNL(pos6(i)+k-1) +
646 . btb77(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb78(i)*nloc_dmg%UNL(pos8(i)+k-1))
647
648 c7 = vol(i) * wi * h(7) * var_reg(i,ip)
649
650 a8 = vol(i) * wi * (h(8)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
651 . h(8)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
652 . h(8)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
653 . h(8)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(8)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
654
655 IF (nodadt == 0) THEN
656 a8 = a8 + vol(i) * wi * xi * (h(8)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
657 . h(8)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
658 . h(8)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
659 . h(8)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(8)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
660 ELSE
661 a8 = a8 + vol(i) * wi * xi * (
662 . h(8)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
663 . h(8)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
664 . h(8)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
665 . h(8)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
666 . h(8)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
667 . h(8)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
668 . h(8)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
669 . h(8)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
670 ENDIF
671
672 b8 = l2 * vol(i) * wi * (btb18(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb28(i)*nloc_dmg%UNL(pos2(i)+k-1) +
673 . btb38(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb48(i)*nloc_dmg%UNL(pos4(i)+k-1) +
674 . btb58(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos6(i)+k-1) +
675 . btb78(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb88(i)*nloc_dmg%UNL(pos8(i)+k-1))
676
677 c8 = vol(i) * wi * h(8) * var_reg(i,ip)
678
679
680
681 f1(i,k) = a1 + b1 - c1
682 f2(i,k) = a2 + b2 - c2
683 f3(i,k) = a3 + b3 - c3
684 f4(i,k) = a4 + b4 - c4
685 f5(i,k) = a5 + b5 - c5
686 f6(i,k) = a6 + b6 - c6
687 f7(i,k) = a7 + b7 - c7
688 f8(i,k) = a8 + b8 - c8
689
690
691 IF (nodadt > 0) THEN
692 sti1(i,k) = (abs(l2*btb11(i) + h(1)*h(1)) + abs(l2*btb12(i) + h(1)*h(2)) + abs(l2*btb13(i) + h(1)*h(3)) +
693 . abs(l2*btb14(i) + h(1)*h(4)) + abs(l2*btb15(i) + h(1)*h(5)) + abs(l2*btb16(i) + h(1)*h(6)) +
694 . abs(l2*btb17(i) + h(1)*h(7)) + abs(l2*btb18(i) + h(1)*h(8)))*vol(i) * wi
695 sti2(i,k) = (abs(l2*btb12(i) + h(2)*h(1)) + abs(l2*btb22(i) + h(2)*h(2)) + abs(l2*btb23(i) + h(2)*h(3)) +
696 . abs(l2*btb24(i) + h(2)*h(4)) + abs(l2*btb25(i) + h(2)*h(5)) + abs(l2*btb26(i) + h(2)*h(6)) +
697 . abs(l2*btb27(i) + h(2)*h(7)) + abs(l2*btb28(i) + h(2)*h(8)))*vol(i) * wi
698 sti3(i,k) = (abs(l2*btb13(i) + h(3)*h(1)) + abs(l2*btb23(i) + h(3)*h(2)) + abs(l2*btb33(i) + h(3)*h(3)) +
699 . abs(l2*btb34(i) + h(3)*h(4)) + abs(l2*btb35(i) + h(3)*h(5)) + abs(l2*btb36(i) + h(3)*h(6)) +
700 . abs(l2*btb37(i) + h(3)*h(7)) + abs(l2*btb38(i) + h(3)*h(8)))*vol(i) * wi
701 sti4(i,k) = (abs(l2*btb14(i) + h(4)*h(1)) + abs(l2*btb24(i) + h(4)*h(2)) + abs(l2*btb34(i) + h(4)*h(3)) +
702 . abs(l2*btb44(i) + h(4)*h(4)) + abs(l2*btb45(i) + h(4)*h(5)) + abs(l2*btb46(i) + h(4)*h(6)) +
703 . abs(l2*btb47(i) + h(4)*h(7)) + abs(l2*btb48(i) + h(4)*h(8)))*vol(i) * wi
704 sti5(i,k) = (abs(l2*btb15(i) + h(5)*h(1)) + abs(l2*btb25(i) + h(5)*h(2)) + abs(l2*btb35(i) + h(5)*h(3)) +
705 . abs(l2*btb45(i) + h(5)*h(4)) + abs(l2*btb55(i) + h(5)*h(5)) + abs(l2*btb56(i) + h(5)*h(6)) +
706 . abs(l2*btb57(i) + h(5)*h(7)) + abs(l2*btb58(i) + h(5)*h(8)))*vol(i) * wi
707 sti6(i,k) = (abs(l2*btb16(i) + h(6)*h(1)) + abs(l2*btb26(i) + h(6)*h(2)) + abs(l2*btb36(i) + h(6)*h(3)) +
708 . abs(l2*btb46(i) + h(6)*h(4)) + abs(l2*btb56(i) + h(6)*h(5)) + abs(l2*btb66(i) + h(6)*h(6)) +
709 . abs(l2*btb67(i) + h(6)*h(7)) + abs(l2*btb68(i) + h(6)*h(8)))*vol(i) * wi
710 sti7(i,k) = (abs(l2*btb17(i) + h(7)*h(1)) + abs(l2*btb27(i) + h(7)*h(2)) + abs(l2*btb37(i) + h(7)*h(3)) +
711 . abs(l2*btb47(i) + h(7)*h(4)) + abs(l2*btb57(i) + h(7)*h(5)) + abs(l2*btb67(i) + h(7)*h(6)) +
712 . abs(l2*btb77(i) + h(7)*h(7)) + abs(l2*btb78(i) + h(7)*h(8)))*vol(i) * wi
713 sti8(i,k) = (abs(l2*btb18(i) + h(8)*h(1)) + abs(l2*btb28(i) + h(8)*h(2)) + abs(l2*btb38(i) + h(8)*h(3)) +
714 . abs(l2*btb48(i) + h(8)*h(4)) + abs(l2*btb58(i) + h(8)*h(5)) + abs(l2*btb68(i) + h(8)*h(6)) +
715 . abs(l2*btb78(i) + h(8)*h(7)) + abs(l2*btb88(i) + h(8)*h(8)))*vol(i) * wi
716 ENDIF
717
718
719 ELSE
720
721
722 lc(i) = (wi*vol0(i))**third
723
724 IF (nodadt > 0) THEN
725
726 f1(i,k) = sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1))*h(1)*zeta*sspnl*half*
727 . (nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i
728 f2(i,k) = sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1))*h(2)*zeta*sspnl*half*
729 . (nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
730 f3(i,k) = sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1))*h(3)*zeta*sspnl
731 . (nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three
732 f4(i,k) = sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1))*h(4)*zeta*sspnl*half*
733 . (nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
734 f5(i,k) = sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1))*h(5)*zeta*sspnl*half*
735 . (nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
736 f6(i,k) = sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1))*h(6)*zeta*sspnl*half*
737 . (nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
738 f7(i,k) = sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1))*h(7)*zeta*sspnl*half*
739 . (nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
740 f8(i,k) = sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1))*h(8)*zeta*sspnl*half*
741 . (nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
742
743 sti1(i,k) = em20
744 sti2(i,k) = em20
745 sti3(i,k) = em20
746 sti4(i,k) = em20
747 sti5(i,k) = em20
748 sti6(i,k) = em20
749 sti7(i,k) = em20
750 sti8(i,k) = em20
751 ELSE
752
753 f1(i,k) = h(1)*zeta*sspnl*half*(nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i)+k-1))*(three/four)*(lc(i)**2)
754 f2(i,k) = h(2)*zeta*sspnl*half*(nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
755 f3(i,k) = h(3)*zeta*sspnl*half*(nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three/four)*(lc(i)**2)
756 f4(i,k) = h(4)*zeta*sspnl*half*(nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
757 f5(i,k) = h(5)*zeta*sspnl*half*(nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
758 f6(i,k) = h(6)*zeta*sspnl*half*(nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
759 f7(i,k) = h(7)*zeta*sspnl*half*(nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
760 f8(i,k) = h(8)*zeta*sspnl*half*(nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
761 ENDIF
762 ENDIF
763 ENDDO
764 ENDDO
765
766
767
768
769
770
771 IF (iparit == 0) THEN
772
773 DO i=1,nel
774
775#include "vectorize.inc"
776 DO k=1,nlay
777
778 nloc_dmg%FNL(pos1(i)+k-1,itask+1) = nloc_dmg%FNL(pos1(i)+k-1,itask+1) - f1(i,k)
779 nloc_dmg%FNL(pos2(i)+k-1,itask+1) = nloc_dmg%FNL(pos2(i)+k-1,itask+1) - f2(i,k)
780 nloc_dmg%FNL(pos3(i)+k-1,itask+1) = nloc_dmg%FNL(pos3(i)+k-1,itask+1) - f3(i,k)
781 nloc_dmg%FNL(pos4(i)+k-1,itask+1) = nloc_dmg%FNL(pos4
782 nloc_dmg%FNL(pos5(i)+k-1,itask+1) = nloc_dmg%FNL(pos5(i)+k-1,itask+1) - f5(i,k)
783 nloc_dmg%FNL(pos6(i)+k-1,itask+1) = nloc_dmg%FNL(pos6(i)+k-1,itask+1) - f6(i,k)
784 nloc_dmg%FNL(pos7(i)+k-1,itask+1) = nloc_dmg%FNL(pos7(i)+k-1,itask+1) - f7(i,k)
785 nloc_dmg%FNL(pos8(i)+k-1,itask+1) = nloc_dmg%FNL(pos8(i)+k-1,itask+1) - f8(i,k)
786 IF (nodadt > 0) THEN
787
788 maxstif =
max(sti1(i,k),sti2(i,k),sti3(i,k),sti4(i,k),
789 . sti5(i,k),sti6(i,k),sti7(i,k),sti8(i,k))
790
791 nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) + maxstif
792 nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) + maxstif
793 nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) + maxstif
794 nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) + maxstif
795 nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) + maxstif
796 nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) + maxstif
797 nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) + maxstif
798 nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) + maxstif
799 ENDIF
800 ENDDO
801 ENDDO
802
803
804 ELSE
805
806 DO j = 1,nlay
807
808
809 ip = ir + ((is-1) + (j-1)*npts)*nptr
810
811
812 DO i=1,nel
813 ii = i + nft
814
815
816 IF (nodadt > 0) THEN
817 maxstif =
max(sti1(i,j),sti2(i,j),sti3(i,j),sti4(i,j),
818 . sti5(i,j),sti6(i,j),sti7(i,j),sti8(i,j))
819 ENDIF
820
821 k = nloc_dmg%IADS(1,ii)
822 IF (ip == 1) THEN
823 nloc_dmg%FSKY(k,j) = -f1(i,j)
824 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
825 ELSE
826 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f1(i,j)
827 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
828 ENDIF
829
830 k = nloc_dmg%IADS(2,ii)
831 IF (ip == 1) THEN
832 nloc_dmg%FSKY(k,j) = -f2(i,j)
833 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
834 ELSE
835 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f2(i,j)
836 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
837 ENDIF
838
839 k = nloc_dmg%IADS(3,ii)
840 IF (ip == 1) THEN
841 nloc_dmg%FSKY(k,j) = -f3(i,j)
842 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
843 ELSE
844 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f3(i,j)
845 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
846 ENDIF
847
848 k = nloc_dmg%IADS(4,ii)
849 IF (ip == 1) THEN
850 nloc_dmg%FSKY(k,j) = -f4(i,j)
851 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
852 ELSE
853 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f4(i,j)
854 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
855 ENDIF
856
857 k = nloc_dmg%IADS(5,ii)
858 IF (ip == 1) THEN
859 nloc_dmg%FSKY(k,j) = -f5(i,j)
860 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
861 ELSE
862 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f5(i,j)
863 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
864 ENDIF
865
866 k = nloc_dmg%IADS(6,ii)
867 IF (ip == 1) THEN
868 nloc_dmg%FSKY(k,j) = -f6(i,j)
869 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
870 ELSE
871 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f6(i,j)
872 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
873 ENDIF
874
875 k = nloc_dmg%IADS(7,ii)
876 IF (ip == 1) THEN
877 nloc_dmg%FSKY(k,j) = -f7(i,j)
878 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
879 ELSE
880 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f7(i,j)
881 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
882 ENDIF
883
884 k = nloc_dmg%IADS(8,ii)
885 IF (ip == 1) THEN
886 nloc_dmg%FSKY(k,j) = -f8(i,j)
887 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
888 ELSE
889 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f8(i,j)
890 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
891 ENDIF
892
893 ENDDO
894 ENDDO
895 ENDIF
896
897
898
899
900 IF (nodadt == 0) THEN
901 DO i = 1,nel
902
903 IF (offg(i)/=zero) THEN
904
905 dtnl = (two*(
min((vol(i))**third,le_max))*sqrt(three*zeta))/
906 . sqrt(twelve*l2 + (
min((vol(i))**third,le_max))**2)
907
908 IF ((l2>zero).AND.(nlay > 1)) THEN
909 dtnl_th = (two*(
min(lthk(i),le_max))*sqrt(three*zeta))/
910 . sqrt(twelve*l2 + (
min(lthk(i),le_max))**2)
911 ELSE
912 dtnl_th = ep20
913 ENDIF
914
915 dt2t =
min(dt2t,dtfac1(1)*cdamp*dtnl_th,dtfac1(1)*cdamp*dtnl)
916 ENDIF
917 ENDDO
918 ENDIF
919
920
921 IF (ALLOCATED(btb11)) DEALLOCATE(btb11)
922 IF (ALLOCATED(btb12)) DEALLOCATE(btb12)
923 IF (ALLOCATED(btb13)) DEALLOCATE(btb13)
924 IF (ALLOCATED(btb14)) DEALLOCATE(btb14)
925 IF (ALLOCATED(btb15)) DEALLOCATE(btb15)
926 IF (ALLOCATED(btb16)) DEALLOCATE(btb16)
927 IF (ALLOCATED(btb17)) DEALLOCATE(btb17)
928 IF (ALLOCATED(btb18)) DEALLOCATE(btb18)
929 IF (ALLOCATED(btb22)) DEALLOCATE(btb22)
930 IF (ALLOCATED(btb23)) DEALLOCATE(btb23)
931 IF (ALLOCATED(btb24)) DEALLOCATE(btb24)
932 IF (ALLOCATED(btb25)) DEALLOCATE(btb25)
933 IF (ALLOCATED(btb26)) DEALLOCATE(btb26)
934 IF (ALLOCATED(btb27)) DEALLOCATE(btb27)
935 IF (ALLOCATED(btb28)) DEALLOCATE(btb28)
936 IF (ALLOCATED(btb33)) DEALLOCATE(btb33)
937 IF (ALLOCATED(btb34)) DEALLOCATE(btb34)
938 IF (ALLOCATED(btb35)) DEALLOCATE(btb35)
939 IF (ALLOCATED(btb36)) DEALLOCATE(btb36)
940 IF (ALLOCATED(btb37)) DEALLOCATE(btb37)
941 IF (ALLOCATED(btb38)) DEALLOCATE(btb38)
942 IF (ALLOCATED(btb44)) DEALLOCATE(btb44)
943 IF (ALLOCATED(btb45)) DEALLOCATE(btb45)
944 IF (ALLOCATED(btb46)) DEALLOCATE(btb46)
945 IF (ALLOCATED(btb47)) DEALLOCATE(btb47)
946 IF (ALLOCATED(btb48)) DEALLOCATE(btb48)
947 IF (ALLOCATED(btb55)) DEALLOCATE(btb55)
948 IF (ALLOCATED(btb56)) DEALLOCATE(btb56)
949 IF (ALLOCATED(btb57)) DEALLOCATE(btb57)
950 IF (ALLOCATED(btb58)) DEALLOCATE(btb58)
951 IF (ALLOCATED(btb66)) DEALLOCATE(btb66)
952 IF (ALLOCATED(btb67)) DEALLOCATE(btb67)
953 IF (ALLOCATED(btb68)) DEALLOCATE(btb68)
954 IF (ALLOCATED(btb77)) DEALLOCATE(btb77)
955 IF (ALLOCATED(btb78)) DEALLOCATE(btb78)
956 IF (ALLOCATED(btb88)) DEALLOCATE(btb88)
957 IF (ALLOCATED(pos1)) DEALLOCATE(pos1)
958 IF (ALLOCATED(pos2)) DEALLOCATE(pos2)
959 IF (ALLOCATED(pos3)) DEALLOCATE(pos3)
960 IF (ALLOCATED(pos4)) DEALLOCATE(pos4)
961 IF (ALLOCATED(pos5)) DEALLOCATE(pos5)
962 IF (ALLOCATED(pos6)) DEALLOCATE(pos6)
963 IF (ALLOCATED(pos7)) DEALLOCATE(pos7)
964 IF (ALLOCATED(pos8)) DEALLOCATE(pos8)
965 IF (ALLOCATED(sti1)) DEALLOCATE(sti1)
966 IF (ALLOCATED(sti2)) DEALLOCATE(sti2)
967 IF (ALLOCATED(sti3)) DEALLOCATE(sti3)
968 IF (ALLOCATED(sti4)) DEALLOCATE(sti4)
969 IF (ALLOCATED(sti5)) DEALLOCATE(sti5)
970 IF (ALLOCATED(sti6)) DEALLOCATE(sti6)
971 IF (ALLOCATED(sti7)) DEALLOCATE(sti7)
972 IF (ALLOCATED(sti8)) DEALLOCATE(sti8)
973
subroutine area(d1, x, x2, y, y2, eint, stif0)
subroutine basis8(r, s, t, h, pr, ps, pt)