34 1 NLOC_DMG,VAR_REG, NEL, OFFG,
43 A PZ8, IMAT, ITASK, DT2T,
44 B VOL0, NFT, NLAY, NPTR,
47 E BUFNLTS, AREA, NODADT, DT1,
48 F DT12, DTFAC1, DTMIN1, IDTMIN)
57#include "implicit_f.inc"
65 INTEGER,
INTENT(IN) :: NFT,NLAY,NPTR,NPTS,IR,IS,
66 . NEL,IMAT,ITASK,ICR,ICS,ICT
67 INTEGER,
DIMENSION(NEL),
INTENT(IN) :: NC1,NC2,NC3,NC4,NC5,NC6,NC7,NC8
68 my_real,
DIMENSION(NEL),
INTENT(IN) ::
72 my_real,
DIMENSION(NEL),
INTENT(IN) ::
75 my_real,
DIMENSION(9,9),
INTENT(IN) ::
77 my_real,
DIMENSION(NEL,NPTR*NPTS*NLAY),
INTENT(INOUT) ::
78 . VAR_REG,PX1,PX2,PX3,PX4,PX5,PX6,PX7,PX8,
79 . PY1,PY2,PY3,PY4,PY5,PY6,PY7,PY8,PZ1,PZ2,
80 . pz3,pz4,pz5,pz6,pz7,pz8
81 TYPE(buf_nlocts_),
INTENT(INOUT),
TARGET :: BUFNLTS
82 my_real,
INTENT(IN) ::
84 INTEGER,
INTENT(IN) :: NODADT,IDTMIN(102)
85 my_real,
INTENT(IN) :: dtfac1(102),dtmin1(102)
89 INTEGER I,J,II,K,NNOD,N1,N2,N3,,N5,N6,N7,N8,
90 . l_nloc,ip,nddl,ndnod
92 . l2,xi,b1,b2,b3,b4,b5,b6,b7,b8,h(8),
93 . a1,a2,a3,a4,a5,a6,a7,a8,
94 . c1,c2,c3,c4,c5,c6,c7,c8,
95 . zeta,sspnl,dtnl,le_max,maxstif,
96 . zr,zs,zt,pr(8),ps(8),pt(8),wi,
97 . bth1,bth2,nth1,nth2,dt2p
98 . k1,k2,k12,dtnl_th,minmasscal
99 my_real,
DIMENSION(NEL) ::
101 my_real,
DIMENSION(NEL,NLAY) ::
102 . f1,f2,f3,f4,f5,f6,f7,f8
103 my_real,
DIMENSION(:,:) ,
ALLOCATABLE ::
104 . sti1,sti2,sti3,sti4,sti5,sti6,sti7,sti8
105 my_real,
DIMENSION(:) ,
ALLOCATABLE ::
106 . btb11,btb12,btb13,btb14,btb15,btb16,btb17,btb18,
107 . btb22,btb23,btb24,btb25,btb26,btb27,btb28,btb33,
108 . btb34,btb35,btb36,btb37,btb38,btb44,btb45,btb46,
109 . btb47,btb48,btb55,btb56,btb57,btb58,btb66,btb67,
110 . btb68,btb77,btb78,btb88
111 INTEGER,
DIMENSION(:),
ALLOCATABLE ::
112 . POS1,POS2,POS3,POS4,POS5,POS6,POS7,POS8
113 my_real,
DIMENSION(:,:),
ALLOCATABLE ::
127 3 -1. ,-.549193338482966,0.549193338482966,
131 4 -1. ,-.600558677589454,0. ,
132 4 0.600558677589454,1. ,0. ,
135 5 -1. ,-.812359691877328,-.264578928334038,
136 5 0.264578928334038,0.812359691877328,1. ,
139 6 -1. ,-.796839450334708,-.449914286274731,
140 6 0. ,0.449914286274731,0.796839450334708,
143 7 -1. ,-.898215824685518,-.584846546513270,
144 7 -.226843756241524,0.226843756241524,0.584846546513270,
145 7 0.898215824685518,1. ,0. ,
147 8 -1. ,-.878478166955581,-.661099443664978,
148 8 -.354483526205989,0. ,0.354483526205989,
149 8 0.661099443664978,0.878478166955581,1. ,
151 9 -1. ,-.936320479015252,-.735741735638020,
152 9 -.491001129763160,-.157505717044458,0.157505717044458,
153 9 0.491001129763160,0.735741735638020,0.936320479015252,
156 l2 = nloc_dmg%LEN(imat)**2
157 xi = nloc_dmg%DAMP(imat)
159 l_nloc = nloc_dmg%L_NLOC
160 zeta = nloc_dmg%DENS(imat)
161 sspnl = nloc_dmg%SSPNL(imat)
162 le_max = nloc_dmg%LE_MAX(imat)
165 . btb11(nel),btb12(nel),btb13(nel),btb14(nel),btb15(nel),
166 . btb16(nel),btb17(nel),btb18(nel),btb22(nel),btb23(nel),btb24(nel),
167 . btb25(nel),btb26(nel),btb27(nel),btb28(nel),btb33(nel),btb34(nel),
168 . btb35(nel),btb36(nel),btb37(nel),btb38(nel),btb44(nel),btb45(nel),
169 . btb46(nel),btb47(nel),btb48(nel),btb55(nel),btb56(nel),btb57(nel),
170 . btb58(nel),btb66(nel),btb67(nel),btb68(nel),btb77(nel),btb78(nel),
171 . btb88(nel),pos1(nel),pos2(nel),pos3(nel),pos4(nel),pos5(nel),
172 . pos6(nel),pos7(nel),pos8(nel))
176 ALLOCATE(sti1(nel,nlay),sti2(nel,nlay),sti3(nel,nlay),sti4(nel,nlay),
177 . sti5(nel,nlay),sti6(nel,nlay),sti7(nel,nlay),sti8(nel,nlay))
179 ALLOCATE(sti1(1,1),sti2(1,1),sti3(1,1),sti4(1,1),
180 . sti5(1,1),sti6(1,1),sti7(1,1),sti8(1,1))
187#include "vectorize.inc"
191 n1 = nloc_dmg%IDXI(nc1(i))
192 n2 = nloc_dmg%IDXI(nc2(i))
193 n3 = nloc_dmg%IDXI(nc3(i))
194 n4 = nloc_dmg%IDXI(nc4(i))
195 n5 = nloc_dmg%IDXI(nc5(i))
196 n6 = nloc_dmg%IDXI(nc6(i))
197 n7 = nloc_dmg%IDXI(nc7(i))
198 n8 = nloc_dmg%IDXI(nc8(i))
201 pos1(i) = nloc_dmg%POSI(n1)
202 pos2(i) = nloc_dmg%POSI(n2)
203 pos3(i) = nloc_dmg%POSI(n3)
204 pos4(i) = nloc_dmg%POSI(n4)
205 pos5(i) = nloc_dmg%POSI(n5)
206 pos6(i) = nloc_dmg%POSI(n6)
207 pos7(i) = nloc_dmg%POSI(n7)
208 pos8(i) = nloc_dmg%POSI(n8)
215 IF ((l2>zero).AND.(nlay > 1))
THEN
219 thk(i) = vol(i)/
area(i)
220 lthk(i) = (zth(nlay+1,nlay)-zth(nlay,nlay))*thk(i)*half
226 ALLOCATE(stifnlth(nel,nddl+1))
227 ALLOCATE(dtn(nel,nddl+1))
230 ALLOCATE(stifnlth(1,1))
239 bufnlts%FNLTH(i,k) = zero
251 ip = ir + ((is-1) + (k-1)*npts)*nptr
254 nth1 = (as(k,nddl) - zth(k+1,nddl)) /
255 . (zth(k,nddl) - zth(k+1,nddl))
256 nth2 = (as(k,nddl) - zth(k,nddl)) /
257 . (zth(k+1,nddl) - zth(k,nddl))
260 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
266 bth1 = (one/(zth(k,nddl) - zth(k+1,nddl)))*(two/thk(i))
267 bth2 = (one/(zth(k+1,nddl) - zth(k,nddl)))*(two/thk(i))
270 k1 = l2*(bth1**2) + nth1**2
271 k12 = l2*(bth1*bth2)+ (nth1*nth2)
272 k2 = l2*(bth2**2) + nth2**2
275 bufnlts%FNLTH(i,k) = bufnlts%FNLTH(i,k)
276 . + (k1*bufnlts%UNLTH(i,k) + k12*bufnlts%UNLTH(i,k+1)
277 . + xi*((nth1**2)*bufnlts%VNLTH(i,k)
278 . + (nth1*nth2)*bufnlts%VNLTH(i,k+1))
279 . - (nth1*var_reg(i,ip)))*wi*vol(i)
280 bufnlts%FNLTH(i,k+1) = bufnlts%FNLTH(i,k+1)
281 . + (k12*bufnlts%UNLTH(i,k) + k2*bufnlts%UNLTH(i,k+1)
282 . + xi*(nth1*nth2*bufnlts%VNLTH(i,k)
283 . + (nth2**2)*bufnlts%VNLTH(i,k+1))
284 . - nth2*var_reg(i,ip))*wi*vol(i)
288 stifnlth(i,k) = stifnlth(i,k) +
max(abs(k1)+abs(k12),abs(k12)+abs(k2))*wi*vol(i)
289 stifnlth(i,k+1) = stifnlth(i,k+1) +
max(abs(k1)+abs(k12),abs(k12)+abs(k2))*wi*vol(i)
302 dtn(i,k) = dtfac1(11)*cdamp*sqrt(two*bufnlts%MASSTH(i,k)/
max(stifnlth(i,k),em20))
303 dtnod =
min(dtn(i,k),dtnod)
308 IF ((idtmin(11)==3).OR.(idtmin(1
THEN
310 IF (dtnod < dtmin1(11)*(sqrt(csta)))
THEN
313 IF (dtn(i,k) < dtmin1(11))
THEN
314 dt2p = dtmin1(11)/(dtfac1(11)*cdamp)
315 bufnlts%MASSTH(i,k) =
max(bufnlts%MASSTH(i,k),csta*half*stifnlth(i,k)*dt2p*dt2p*onep00001)
319 dtnod = dtmin1(11)*(sqrt(csta))
324 IF (dtnod < dt2t)
THEN
325 dt2t =
min(dt2t,dtnod)
332 bufnlts%VNLTH(i,k) = bufnlts%VNLTH(i,k) - (bufnlts%FNLTH(i,k)/bufnlts%MASSTH(i,k))*dt12
339 bufnlts%UNLTH(i,k) = bufnlts%UNLTH(i,k) + bufnlts%VNLTH(i,k)*dt1
346 ip = ir + ((is-1) + (k-1)*npts)*nptr
348 nth1 = (as(k,nddl) - zth(k+1,nddl))/
349 . (zth(k,nddl) - zth(k+1,nddl))
350 nth2 = (as(k,nddl) - zth(k,nddl))/
351 . (zth(k+1,nddl) - zth(k,nddl))
355 var_reg(i,ip) = nth1*bufnlts%UNLTH(i,k) + nth2*bufnlts%UNLTH(i,k+1)
371 ELSEIF (ics == 1)
THEN
375 ELSEIF (icr == 1)
THEN
380 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
383 CALL basis8 (zr,zs,zt,h,pr,ps,pt)
386 ip = ir + ((is-1) + (k-1)*npts)*nptr
389#include "vectorize.inc"
393 IF (offg(i) /= zero)
THEN
396 btb11(i) = px1(i,ip)**2 + py1(i,ip)**2 + pz1(i,ip)**2
397 btb12(i) = px1(i,ip)*px2(i,ip) + py1(i,ip)*py2(i,ip) + pz1(i,ip)*pz2(i,ip)
398 btb13(i) = px1(i,ip)*px3(i,ip) + py1(i,ip)*py3(i,ip) + pz1(i,ip)*pz3(i,ip)
399 btb14(i) = px1(i,ip)*px4(i,ip) + py1(i,ip)*py4(i,ip) + pz1(i,ip)*pz4(i,ip)
400 btb15(i) = px1(i,ip)*px5(i,ip) + py1(i,ip)*py5(i,ip) + pz1(i,ip)*pz5(i,ip)
401 btb16(i) = px1(i,ip)*px6(i,ip) + py1(i,ip)*py6(i,ip) + pz1(i,ip)*pz6(i,ip)
402 btb17(i) = px1(i,ip)*px7(i,ip) + py1(i,ip)*py7(i,ip) + pz1(i,ip)*pz7(i,ip)
403 btb18(i) = px1(i,ip)*px8(i,ip) + py1(i,ip)*py8(i,ip) + pz1(i,ip)*pz8(i,ip)
404 btb22(i) = px2(i,ip)**2 + py2(i,ip)**2 + pz2(i,ip)**2
405 btb23(i) = px2(i,ip)*px3(i,ip) + py2(i,ip)*py3(i,ip) + pz2(i,ip)*pz3(i,ip)
406 btb24(i) = px2(i,ip)*px4(i,ip) + py2(i,ip)*py4(i,ip) + pz2(i,ip)*pz4(i,ip)
407 btb25(i) = px2(i,ip)*px5(i,ip) + py2(i,ip)*py5(i,ip) + pz2(i,ip)*pz5(i,ip)
408 btb26(i) = px2(i,ip)*px6(i,ip) + py2(i,ip)*py6(i,ip) + pz2(i,ip)*pz6(i,ip)
409 btb27(i) = px2(i,ip)*px7(i,ip) + py2(i,ip)*py7(i,ip) + pz2(i,ip)*pz7(i,ip)
410 btb28(i) = px2(i,ip)*px8(i,ip) + py2(i,ip)*py8(i,ip) + pz2(i,ip)*pz8(i,ip)
411 btb33(i) = px3(i,ip)**2 + py3(i,ip)**2 + pz3(i,ip)**2
412 btb34(i) = px3(i,ip)*px4(i,ip) + py3(i,ip)*py4(i,ip) + pz3(i,ip)*pz4(i,ip)
413 btb35(i) = px3(i,ip)*px5(i,ip) + py3(i,ip)*py5(i,ip) + pz3(i,ip)*pz5(i,ip)
414 btb36(i) = px3(i,ip)*px6(i,ip) + py3(i,ip)*py6(i,ip) + pz3(i,ip)*pz6(i,ip)
415 btb37(i) = px3(i,ip)*px7(i,ip) + py3(i,ip)*py7(i,ip) + pz3(i,ip)*pz7(i,ip)
416 btb38(i) = px3(i,ip)*px8(i,ip) + py3(i,ip)*py8(i,ip) + pz3(i,ip)*pz8(i,ip)
417 btb44(i) = px4(i,ip)**2 + py4(i,ip)**2 + pz4(i,ip)**2
418 btb45(i) = px4(i,ip)*px5(i,ip) + py4(i,ip)*py5(i,ip) + pz4(i,ip)*pz5(i,ip)
419 btb46(i) = px4(i,ip)*px6(i,ip) + py4(i,ip)*py6(i,ip) + pz4(i,ip)*pz6(i,ip)
420 btb47(i) = px4(i,ip)*px7(i,ip) + py4(i,ip)*py7(i,ip) + pz4(i,ip)*pz7(i,ip)
421 btb48(i) = px4(i,ip)*px8(i,ip) + py4(i,ip)*py8(i,ip) + pz4(i,ip)*pz8(i,ip)
422 btb55(i) = px5(i,ip)**2 + py5(i,ip)**2 + pz5(i,ip)**2
423 btb56(i) = px5(i,ip)*px6(i,ip) + py5(i,ip)*py6(i,ip) + pz5(i,ip)*pz6(i,ip)
424 btb57(i) = px5(i,ip)*px7(i,ip) + py5(i,ip)*py7(i,ip) + pz5(i,ip)*pz7(i
425 btb58(i) = px5(i,ip)*px8(i,ip) + py5(i,ip)*py8(i,ip)
426 btb66(i) = px6(i,ip)**2 + py6(i,ip)**2 + pz6(i,ip)**2
427 btb67(i) = px6(i,ip)*px7(i,ip) + py6(i,ip)*py7(i,ip) + pz6(i,ip)*pz7(i,ip)
430 btb78(i) = px7(i,ip)*px8(i,ip) + py7(i,ip)*py8(i,ip) + pz7(i,ip)*pz8(i,ip)
431 btb88(i) = px8(i,ip)**2 + py8(i,ip)**2 + pz8
434 a1 = vol(i) * wi * (h(1)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
435 . h(1)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
436 . h(1)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(1)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
437 . h(1)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(1)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
439 IF (nodadt == 0)
THEN
440 a1 = a1 + vol(i) * wi * xi * (h(1)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
441 . h(1)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
445 minmasscal =
min(sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1)),
446 . sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1)),
447 . sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1)),
448 . sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1)),
449 . sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1)),
450 . sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1)),
451 . sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1)),
452 . sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1)))
453 a1 = a1 + vol(i) * wi * xi * (
454 . h(1)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
455 . h(1)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
456 . h(1)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
457 . h(1)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
458 . h(1)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
459 . h(1)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
460 . h(1)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
461 . h(1)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
464 b1 = l2 * vol(i) * wi * (btb11(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb12(i)*nloc_dmg%UNL(pos2(i)+k-1) +
465 . btb13(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb14(i)*nloc_dmg%UNL(pos4(i)+k-1) +
466 . btb15(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb16(i)*nloc_dmg%UNL(pos6(i)+k-1) +
467 . btb17(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb18(i)*nloc_dmg%UNL(pos8(i)+k-1))
469 c1 = vol(i) * wi * h(1) * var_reg(i,ip)
471 a2 = vol(i) * wi * (h(2)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%UNL(pos2
472 . h(2)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
473 . h(2)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%UNL
474 . h(2)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(2)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
476 IF (nodadt == 0)
THEN
477 a2 = a2 + vol(i) * wi * xi * (h(2)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
478 . h(2)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
479 . h(2)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
480 . h(2)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(2)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
482 a2 = a2 + vol(i) * wi * xi * (
483 . h(2)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
484 . h(2)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
485 . h(2)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
486 . h(2)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
487 . h(2)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
488 . h(2)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
489 . h(2)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
490 . h(2)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
493 b2 = l2 * vol(i) * wi * (btb12(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb22(i)*nloc_dmg%UNL(pos2(i)+k-1) +
494 . btb23(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb24(i)*nloc_dmg%UNL(pos4(i)+k-1) +
495 . btb25(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos6(i)+k-1) +
496 . btb27(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb28
498 c2 = vol(i) * wi * h(2) * var_reg(i,ip)
500 a3 = vol(i) * wi * (h(3)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%UNL(pos2
501 . h(3)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
502 . h(3)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%UNL
503 . h(3)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) +
505 IF (nodadt == 0)
THEN
506 a3 = a3 + vol(i) * wi * xi * (h(3)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%VNL(pos2
507 . h(3)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%VNL(pos4
508 . h(3)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
509 . h(3)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(3)*h(8)*nloc_dmg%VNL
511 a3 = a3 + vol(i) * wi * xi * (
512 . h(3)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
513 . h(3)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-
514 . h(3)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
515 . h(3)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
516 . h(3)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
517 . h(3)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
518 . h(3)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
519 . h(3)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
522 b3 = l2 * vol(i) * wi * (btb13(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb23(i)*nloc_dmg%UNL(pos2(i)+k-1) +
523 . btb33(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb34(i)*nloc_dmg%UNL(pos4(i)+k-1) +
524 . btb35(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb36(i)*nloc_dmg%UNL(pos6(i)+k
525 . btb37(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb38(i)
527 c3 = vol(i) * wi * h(3) * var_reg(i,ip)
529 a4 = vol(i) * wi * (h(4)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
530 . h(4)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
531 . h(4)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(4)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
532 . h(4)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
534 IF (nodadt == 0)
THEN
535 a4 = a4 + vol(i) * wi * xi * (h(4)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
536 . h(4)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
537 . h(4)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(4)*h(6
538 . h(4)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%VNL(pos8(i
540 a4 = a4 + vol(i) * wi * xi * (
541 . h(4)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
542 . h(4)*h(2)*minmasscal
543 . h(4)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
544 . h(4)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
545 . h(4)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
546 . h(4)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
547 . h(4)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1
548 . h(4)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
551 b4 = l2 * vol(i) * wi * (btb14(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb24(i
552 . btb34(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb44(i)*nloc_dmg%UNL(pos4(i)+k-1) +
553 . btb45(i)*nloc_dmg%UNL(pos5(i
554 . btb47(i)*nloc_dmg%UNL(pos7(i
556 c4 = vol(i) * wi * h(4) * var_reg(i,ip)
558 a5 = vol(i) * wi * (h(5)*h(1)*nloc_dmg%UNL(pos1(i)+k-1)
559 . h(5)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(5)*h
560 . h(5)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
561 . h(5)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
563 IF (nodadt == 0)
THEN
564 a5 = a5 + vol(i) * wi * xi * (h(5)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(5)*h(2)*nloc_dmg%VNL(pos2
565 . h(5)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(5)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
566 . h(5)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
567 . h(5)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
569 a5 = a5 + vol(i) * wi * xi * (
570 . h(5)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
571 . h(5)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
572 . h(5)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k
573 . h(5)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
574 . h(5)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1)
575 . h(5)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
576 . h(5)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
577 . h(5)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
580 b5 = l2 * vol(i) * wi * (btb15(i)*nloc_dmg%UNL(pos1(i
581 . btb35(i)*nloc_dmg%UNL(pos3(i)+k-1
582 . btb55(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb56(i)*nloc_dmg%UNL(pos6(i)+k-1) +
583 . btb57(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb58(i)*nloc_dmg%UNL(pos8(i)+k-1))
585 c5 = vol(i) * wi * h(5) * var_reg(i,ip)
587 a6 = vol(i) * wi * (h(6)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(6)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
588 . h(6)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(6)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
589 . h(6)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(6)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
590 . h(6)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h
592 IF (nodadt == 0)
THEN
593 a6 = a6 + vol(i) * wi * xi * (h(6)*h(1)*nloc_dmg%VNL(pos1
594 . h(6)*h(3)*nloc_dmg%VNL(pos3(i
595 . h(6)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h
596 . h(6)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(6)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
598 a6 = a6 + vol(i) * wi * xi * (
599 . h(6)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
600 . h(6)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
601 . h(6)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
602 . h(6)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
603 . h(6)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i
604 . h(6)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
605 . h(6)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
606 . h(6)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
609 b6 = l2 * vol(i) * wi * (btb16(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos2
610 . btb36(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb46(i)*nloc_dmg%UNL(pos4(i)+k-1) +
611 . btb56(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb66(i)*nloc_dmg%UNL(pos6(i)+k-1) +
612 . btb67(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos8(i)+k-1))
614 c6 = vol(i) * wi * h(6) * var_reg(i,ip)
616 a7 = vol(i) * wi * (h(7)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
617 . h(7)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
618 . h(7)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(7)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
619 . h(7)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(7)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
621 IF (nodadt == 0)
THEN
622 a7 = a7 + vol(i) * wi * xi * (h(7)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
623 . h(7)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
624 . h(7)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(7)*h
625 . h(7)*h(7)*nloc_dmg%VNL(pos7(i
627 a7 = a7 + vol(i) * wi * xi * (
628 . h(7)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
629 . h(7)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
630 . h(7)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
631 . h(7)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
632 . h(7)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
633 . h(7)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
634 . h(7)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
635 . h(7)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
638 b7 = l2 * vol(i) * wi * (btb17(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb27(i)*nloc_dmg%UNL(pos2(i)+k-1) +
639 . btb37(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb47(i)*nloc_dmg%UNL(pos4(i)+k-1) +
640 . btb57(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb67
641 . btb77(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb78(i)*nloc_dmg%UNL(pos8(i)+k-1))
643 c7 = vol(i) * wi * h(7) * var_reg(i,ip)
645 a8 = vol(i) * wi * (h(8)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
646 . h(8)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%UNL
647 . h(8)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%UNL
648 . h(8)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(8)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
650 IF (nodadt == 0)
THEN
651 a8 = a8 + vol(i) * wi * xi * (h(8)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
652 . h(8)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
653 . h(8)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
654 . h(8)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h
656 a8 = a8 + vol(i) * wi * xi * (
657 . h(8)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
658 . h(8)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
659 . h(8)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
660 . h(8)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
661 . h(8)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
662 . h(8)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
663 . h(8)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
664 . h(8)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
667 b8 = l2 * vol(i) * wi * (btb18(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb28(i)*nloc_dmg%UNL(pos2(i)+k-1) +
668 . btb38(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb48(i)*nloc_dmg%UNL(pos4(i)+k-1) +
669 . btb58(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos6(i)+k-1) +
670 . btb78(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb88(i)*nloc_dmg%UNL(pos8(i)+k-1))
672 c8 = vol(i) * wi * h(8) * var_reg(i,ip)
676 f1(i,k) = a1 + b1 - c1
677 f2(i,k) = a2 + b2 - c2
678 f3(i,k) = a3 + b3 - c3
679 f4(i,k) = a4 + b4 - c4
680 f5(i,k) = a5 + b5 - c5
681 f6(i,k) = a6 + b6 - c6
682 f7(i,k) = a7 + b7 - c7
683 f8(i,k) = a8 + b8 - c8
687 sti1(i,k) = (abs(l2*btb11(i) + h(1)*h(1)) + abs(l2*btb12(i) + h(1)*h(2)) + abs(l2*btb13(i) + h(1)*h(3)) +
688 . abs(l2*btb14(i) + h(1)*h(4)) + abs(l2*btb15(i) + h(1)*h(5)) + abs(l2*btb16(i) + h(1)*h(6)) +
689 . abs(l2*btb17(i) + h(1)*h(7)) + abs(l2*btb18(i) + h(1)*h(8)))*vol(i) * wi
690 sti2(i,k) = (abs(l2*btb12(i) + h(2)*h(1)) + abs(l2*btb22(i) + h(2)*h(2)) + abs(l2*btb23(i) + h(2)*h(3)) +
691 . abs(l2*btb24(i) + h(2)*h(4)) + abs(l2*btb25(i) + h(2)*h(5)) + abs(l2*btb26(i) + h(2)*h(6)) +
692 . abs(l2*btb27(i) + h(2)*h(7)) + abs(l2*btb28(i) + h(2)*h(8)))*vol(i) * wi
693 sti3(i,k) = (abs(l2*btb13(i) + h(3)*h(1)) + abs(l2*btb23(i) + h(3)*h(2)) + abs(l2*btb33(i) + h(3)*h(3)) +
694 . abs(l2*btb34(i) + h(3)*h(4)) + abs(l2*btb35(i) + h(3)*h(5)) + abs(l2*btb36(i) + h(3)*h(6)) +
695 . abs(l2*btb37(i) + h(3)*h(7)) + abs(l2*btb38(i) + h(3)*h(8)))*vol(i) * wi
696 sti4(i,k) = (abs(l2*btb14(i) + h(4)*h(1)) + abs(l2*btb24(i) + h(4)*h(2)) + abs(l2*btb34(i) + h(4)*h(3)) +
697 . abs(l2*btb44(i) + h(4)*h(4)) + abs(l2*btb45(i) + h(4)*h(5)) + abs(l2*btb46(i) + h(4)*h(6)) +
698 . abs(l2*btb47(i) + h(4)*h(7)) + abs(l2*btb48(i) + h(4)*h(8)))*vol(i) * wi
699 sti5(i,k) = (abs(l2*btb15(i) + h(5)*h(1)) + abs(l2*btb25(i) + h(5)*h(2)) + abs(l2*btb35(i) + h(5)*h(3)) +
700 . abs(l2*btb45(i) + h(5)*h(4)) + abs(l2*btb55(i) + h(5)*h(5)) + abs(l2*btb56(i) + h(5)*h(6)) +
701 . abs(l2*btb57(i) + h(5)*h(7)) + abs(l2*btb58(i) + h(5)*h(8)))*vol(i) * wi
702 sti6(i,k) = (abs(l2*btb16(i) + h(6)*h(1)) + abs(l2*btb26(i) + h(6)*h(2)) + abs(l2*btb36(i) + h(6)*h(3)) +
703 . abs(l2*btb46(i) + h(6)*h(4)) + abs(l2*btb56(i) + h(6)*h(5)) + abs(l2*btb66(i) + h(6)*h(6)) +
704 . abs(l2*btb67(i) + h(6)*h(7)) + abs(l2*btb68(i) + h(6)*h(8)))*vol(i) * wi
705 sti7(i,k) = (abs(l2*btb17(i) + h(7)*h(1)) + abs(l2*btb27(i) + h(7)*h(2)) + abs(l2*btb37(i) + h(7)*h(3)) +
706 . abs(l2*btb47(i) + h(7)*h(4)) + abs(l2*btb57(i) + h(7)*h(5)) + abs(l2*btb67(i) + h
707 . abs(l2*btb77(i) + h(7)*h(7)) + abs(l2*btb78(i) + h(7)*h(8)))*vol(i) * wi
708 sti8(i,k) = (abs(l2*btb18(i) + h(8)*h(1)) + abs(l2*btb28(i) + h(8)*h(2)) + abs(l2*btb38(i) + h(8)*h(3)) +
709 . abs(l2*btb48(i) + h(8)*h(4)) + abs(l2*btb58(i) + h(8)*h(5)) + abs(l2*btb68(i) + h(8)*h(6)) +
710 . abs(l2*btb78(i) + h(8)*h(7)) + abs(l2*btb88(i) + h(8)*h(8)))*vol(i) * wi
717 lc(i) = (wi*vol0(i))**third
721 f1(i,k) = sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1))*h(1)*zeta*sspnl*half*
722 . (nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i)+k-1))*(three/four)*(lc(i)**2)
723 f2(i,k) = sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1))*h(2)*zeta*sspnl*half*
724 . (nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
725 f3(i,k) = sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1))*h(3)*zeta*sspnl*half*
726 . (nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three/four)*(lc(i)**2)
727 f4(i,k) = sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1))*h(4)*zeta*sspnl*half*
728 . (nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
729 f5(i,k) = sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1))*h(5)*zeta*sspnl*half*
730 . (nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
731 f6(i,k) = sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1))*h(6)*zeta*sspnl*half*
732 . (nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
733 f7(i,k) = sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1))*h(7)*zeta*sspnl*half*
734 . (nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
735 f8(i,k) = sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1))*h(8)*zeta*sspnl*half*
736 . (nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
748 f1(i,k) = h(1)*zeta*sspnl*half*(nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i)+k-1))*(three/four)*(lc(i)**2)
749 f2(i,k) = h(2)*zeta*sspnl*half*(nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
750 f3(i,k) = h(3)*zeta*sspnl*half*(nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three/four)*(lc(i)**2)
751 f4(i,k) = h(4)*zeta*sspnl*half*(nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
752 f5(i,k) = h(5)*zeta*sspnl*half*(nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
753 f6(i,k) = h(6)*zeta*sspnl*half*(nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
754 f7(i,k) = h(7)*zeta*sspnl*half*(nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
755 f8(i,k) = h(8)*zeta*sspnl*half*(nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
766 IF (iparit == 0)
THEN
770#include "vectorize.inc"
772 ! assembling
the forces in
the classic way
773 nloc_dmg%FNL(pos1(i)+k-1,itask+1) = nloc_dmg%FNL(pos1(i)+k-1,itask+1) - f1(i,k)
774 nloc_dmg%FNL(pos2(i)+k-1,itask+1) = nloc_dmg%FNL(pos2(i)+k-1,itask+1) - f2(i,k)
775 nloc_dmg%FNL(pos3(i)+k-1,itask+1) = nloc_dmg%FNL(pos3(i)+k-1,itask+1) - f3(i,k)
776 nloc_dmg%FNL(pos4(i)+k-1,itask+1) = nloc_dmg%FNL(pos4(i)+k-1,itask+1) - f4(i,k)
777 nloc_dmg%FNL(pos5(i)+k-1,itask+1) = nloc_dmg%FNL(pos5(i)+k-1,itask+1) - f5(i,k)
778 nloc_dmg%FNL(pos6(i)+k-1,itask+1) = nloc_dmg%FNL(pos6(i)+k-1,itask+1) - f6(i,k)
779 nloc_dmg%FNL(pos7(i)+k-1,itask+1) = nloc_dmg%FNL(pos7(i)+k-1,itask+1) - f7(i,k)
780 nloc_dmg%FNL(pos8(i)+k-1,itask+1) = nloc_dmg%FNL(pos8(i)+k-1,itask+1) - f8(i,k)
783 maxstif =
max(sti1(i,k),sti2(i,k),sti3(i,k),sti4(i,k),
784 . sti5(i,k),sti6(i,k),sti7(i,k),sti8(i,k))
786 nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) + maxstif
787 nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) + maxstif
788 nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) + maxstif
789 nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) + maxstif
790 nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) + maxstif
791 nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) + maxstif
792 nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) + maxstif
793 nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) + maxstif
804 ip = ir + ((is-1) + (j-1)*npts)*nptr
812 maxstif =
max(sti1(i,j),sti2(i,j
813 . sti5(i,j),sti6(i,j),sti7(i,j),sti8(i,j))
816 k = nloc_dmg%IADS(1,ii)
818 nloc_dmg%FSKY(k,j) = -f1(i,j)
819 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
821 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f1(i,j)
822 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
825 k = nloc_dmg%IADS(2,ii)
827 nloc_dmg%FSKY(k,j) = -f2(i,j)
828 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
830 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f2(i,j)
831 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
834 k = nloc_dmg%IADS(3,ii)
836 nloc_dmg%FSKY(k,j) = -f3(i,j)
837 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
839 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f3(i,j)
840 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
843 k = nloc_dmg%IADS(4,ii)
845 nloc_dmg%FSKY(k,j) = -f4(i,j)
846 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
848 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY
849 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
852 k = nloc_dmg%IADS(5,ii)
854 nloc_dmg%FSKY(k,j) = -f5(i,j)
855 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
857 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f5(i,j)
858 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
861 k = nloc_dmg%IADS(6,ii)
863 nloc_dmg%FSKY(k,j) = -f6(i,j)
864 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
866 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f6(i,j)
867 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
870 k = nloc_dmg%IADS(7,ii)
872 nloc_dmg%FSKY(k,j) = -f7(i,j)
873 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
875 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f7(i,j)
876 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
879 k = nloc_dmg%IADS(8,ii)
881 nloc_dmg%FSKY(k,j) = -f8(i,j)
882 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
884 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f8(i,j)
885 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
895 IF (nodadt == 0)
THEN
898 IF (offg(i)/=zero)
THEN
900 dtnl = (two*(
min((vol(i))**third,le_max))*sqrt(three*zeta))/
901 . sqrt(twelve*l2 + (
min((vol(i))**third,le_max))**2)
903 IF ((l2>zero).AND.(nlay > 1))
THEN
904 dtnl_th = (two*(
min(lthk(i),le_max))*sqrt(three*zeta))/
905 . sqrt(twelve*l2 + (
min(lthk(i),le_max))**2)
910 dt2t =
min(dt2t,dtfac1(1)*cdamp*dtnl_th,dtfac1(1)*cdamp*dtnl)
916 IF (
ALLOCATED(btb11))
DEALLOCATE(btb11)
917 IF (
ALLOCATED(btb12))
DEALLOCATE(btb12)
918 IF (
ALLOCATED(btb13))
DEALLOCATE(btb13)
919 IF (
ALLOCATED(btb14))
DEALLOCATE(btb14)
920 IF (
ALLOCATED(btb15))
DEALLOCATE(btb15)
921 IF (
ALLOCATED(btb16))
DEALLOCATE(btb16)
922 IF (
ALLOCATED(btb17))
DEALLOCATE(btb17)
923 IF (
ALLOCATED(btb18))
DEALLOCATE(btb18)
924 IF (
ALLOCATED(btb22))
DEALLOCATE(btb22)
925 IF (
ALLOCATED(btb23))
DEALLOCATE(btb23)
926 IF (
ALLOCATED(btb24))
DEALLOCATE(btb24)
927 IF (
ALLOCATED(btb25))
DEALLOCATE(btb25)
928 IF (
ALLOCATED(btb26))
DEALLOCATE(btb26)
929 IF (
ALLOCATED(btb27))
DEALLOCATE(btb27)
930 IF (
ALLOCATED(btb28))
DEALLOCATE(btb28)
931 IF (
ALLOCATED(btb33))
DEALLOCATE(btb33)
932 IF (
ALLOCATED(btb34))
DEALLOCATE(btb34)
933 IF (
ALLOCATED(btb35))
DEALLOCATE(btb35)
934 IF (
ALLOCATED(btb36))
DEALLOCATE(btb36)
935 IF (
ALLOCATED(btb37))
DEALLOCATE(btb37)
936 IF (
ALLOCATED(btb38))
DEALLOCATE(btb38)
937 IF (
ALLOCATED(btb44))
DEALLOCATE(btb44)
938 IF (
ALLOCATED(btb45))
DEALLOCATE(btb45)
939 IF (
ALLOCATED(btb46))
DEALLOCATE(btb46)
940 IF (
ALLOCATED(btb47))
DEALLOCATE(btb47)
941 IF (
ALLOCATED(btb48))
DEALLOCATE(btb48)
942 IF (
ALLOCATED(btb55))
DEALLOCATE(btb55)
943 IF (
ALLOCATED(btb56))
DEALLOCATE(btb56)
944 IF (
ALLOCATED(btb57))
DEALLOCATE(btb57)
945 IF (
ALLOCATED(btb58))
DEALLOCATE(btb58)
946 IF (
ALLOCATED(btb66))
DEALLOCATE(btb66)
947 IF (
ALLOCATED(btb67))
DEALLOCATE(btb67)
948 IF (
ALLOCATED(btb68))
DEALLOCATE(btb68)
949 IF (
ALLOCATED(btb77))
DEALLOCATE(btb77)
950 IF (
ALLOCATED(btb78))
DEALLOCATE(btb78)
951 IF (
ALLOCATED(btb88))
DEALLOCATE(btb88)
952 IF (
ALLOCATED(pos1))
DEALLOCATE(pos1)
953 IF (
ALLOCATED(pos2))
DEALLOCATE(pos2)
954 IF (
ALLOCATED(pos3))
DEALLOCATE(pos3)
955 IF (
ALLOCATED(pos4))
DEALLOCATE(pos4)
956 IF (
ALLOCATED(pos5))
DEALLOCATE(pos5)
957 IF (
ALLOCATED(pos6))
DEALLOCATE(pos6)
958 IF (
ALLOCATED(pos7))
DEALLOCATE(pos7)
959 IF (
ALLOCATED(pos8))
DEALLOCATE(pos8)
960 IF (
ALLOCATED(sti1))
DEALLOCATE(sti1)
961 IF (
ALLOCATED(sti2))
DEALLOCATE(sti2)
962 IF (
ALLOCATED(sti3))
DEALLOCATE(sti3)
963 IF (
ALLOCATED(sti4))
DEALLOCATE(sti4)
964 IF (
ALLOCATED(sti5))
DEALLOCATE(sti5)
965 IF (
ALLOCATED(sti6))
DEALLOCATE(sti6)
966 IF (
ALLOCATED(sti7))
DEALLOCATE(sti7)
967 IF (
ALLOCATED(sti8))
DEALLOCATE(sti8)