OpenRadioss 2025.1.11
OpenRadioss project
Loading...
Searching...
No Matches
s8cfint_reg.F File Reference
#include "implicit_f.inc"
#include "parit_c.inc"
#include "vectorize.inc"

Go to the source code of this file.

Functions/Subroutines

subroutine s8cfint_reg (nloc_dmg, var_reg, nel, offg, vol, nc1, nc2, nc3, nc4, nc5, nc6, nc7, nc8, px1, px2, px3, px4, px5, px6, px7, px8, py1, py2, py3, py4, py5, py6, py7, py8, pz1, pz2, pz3, pz4, pz5, pz6, pz7, pz8, imat, itask, dt2t, vol0, nft, nlay, nptr, npts, ir, is, ws, as, icr, ics, ict, bufnlts, area, nodadt, dt1, dt12, dtfac1, dtmin1, idtmin)

Function/Subroutine Documentation

◆ s8cfint_reg()

subroutine s8cfint_reg ( type(nlocal_str_), intent(inout) nloc_dmg,
intent(inout) var_reg,
integer, intent(in) nel,
intent(in) offg,
intent(in) vol,
integer, dimension(nel), intent(in) nc1,
integer, dimension(nel), intent(in) nc2,
integer, dimension(nel), intent(in) nc3,
integer, dimension(nel), intent(in) nc4,
integer, dimension(nel), intent(in) nc5,
integer, dimension(nel), intent(in) nc6,
integer, dimension(nel), intent(in) nc7,
integer, dimension(nel), intent(in) nc8,
intent(inout) px1,
intent(inout) px2,
intent(inout) px3,
intent(inout) px4,
intent(inout) px5,
intent(inout) px6,
intent(inout) px7,
intent(inout) px8,
intent(inout) py1,
intent(inout) py2,
intent(inout) py3,
intent(inout) py4,
intent(inout) py5,
intent(inout) py6,
intent(inout) py7,
intent(inout) py8,
intent(inout) pz1,
intent(inout) pz2,
intent(inout) pz3,
intent(inout) pz4,
intent(inout) pz5,
intent(inout) pz6,
intent(inout) pz7,
intent(inout) pz8,
integer, intent(in) imat,
integer, intent(in) itask,
intent(inout) dt2t,
intent(in) vol0,
integer, intent(in) nft,
integer, intent(in) nlay,
integer, intent(in) nptr,
integer, intent(in) npts,
integer, intent(in) ir,
integer, intent(in) is,
intent(in) ws,
intent(in) as,
integer, intent(in) icr,
integer, intent(in) ics,
integer, intent(in) ict,
type(buf_nlocts_), intent(inout), target bufnlts,
intent(in) area,
integer, intent(in) nodadt,
intent(in) dt1,
intent(in) dt12,
dimension(102), intent(in) dtfac1,
dimension(102), intent(in) dtmin1,
integer, dimension(102), intent(in) idtmin )

Definition at line 33 of file s8cfint_reg.F.

49C-----------------------------------------------
50C M o d u l e s
51C-----------------------------------------------
53 USE elbufdef_mod
54C-----------------------------------------------
55C I m p l i c i t T y p e s
56C-----------------------------------------------
57#include "implicit_f.inc"
58C-----------------------------------------------
59C G l o b a l P a r a m e t e r s
60C-----------------------------------------------
61#include "parit_c.inc"
62C-----------------------------------------------
63C D u m m y A r g u m e n t s
64C-----------------------------------------------
65 INTEGER, INTENT(IN) :: NFT,NLAY,NPTR,NPTS,IR,IS,
66 . NEL,IMAT,ITASK,ICR,ICS,ICT
67 INTEGER, DIMENSION(NEL), INTENT(IN) :: NC1,NC2,NC3,NC4,NC5,NC6,NC7,NC8
68 my_real, DIMENSION(NEL), INTENT(IN) ::
69 . offg
70 my_real, INTENT(INOUT) ::
71 . dt2t
72 my_real, DIMENSION(NEL), INTENT(IN) ::
73 . vol,vol0,area
74 TYPE(NLOCAL_STR_), INTENT(INOUT) :: NLOC_DMG
75 my_real, DIMENSION(9,9), INTENT(IN) ::
76 . ws,as
77 my_real, DIMENSION(NEL,NPTR*NPTS*NLAY), INTENT(INOUT) ::
78 . var_reg,px1,px2,px3,px4,px5,px6,px7,px8,
79 . py1,py2,py3,py4,py5,py6,py7,py8,pz1,pz2,
80 . pz3,pz4,pz5,pz6,pz7,pz8
81 TYPE(BUF_NLOCTS_), INTENT(INOUT), TARGET :: BUFNLTS
82 my_real, INTENT(IN) ::
83 . dt1,dt12
84 INTEGER, INTENT(IN) :: NODADT,IDTMIN(102)
85 my_real, INTENT(IN) :: dtfac1(102),dtmin1(102)
86C-----------------------------------------------
87C L o c a l V a r i a b l e s
88C-----------------------------------------------
89 INTEGER I,J,II,K,NNOD,N1,N2,N3,N4,N5,N6,N7,N8,
90 . L_NLOC,IP,NDDL,NDNOD
91 my_real
92 . l2,xi,b1,b2,b3,b4,b5,b6,b7,b8,h(8),
93 . a1,a2,a3,a4,a5,a6,a7,a8,
94 . c1,c2,c3,c4,c5,c6,c7,c8,
95 . zeta,sspnl,dtnl,le_max,maxstif,
96 . zr,zs,zt,pr(8),ps(8),pt(8),wi,
97 . bth1,bth2,nth1,nth2,dt2p,dtnod,
98 . k1,k2,k12,dtnl_th,minmasscal
99 my_real, DIMENSION(NEL) ::
100 . lc,thk,lthk
101 my_real, DIMENSION(NEL,NLAY) ::
102 . f1,f2,f3,f4,f5,f6,f7,f8
103 my_real, DIMENSION(:,:) ,ALLOCATABLE ::
104 . sti1,sti2,sti3,sti4,sti5,sti6,sti7,sti8
105 my_real, DIMENSION(:) ,ALLOCATABLE ::
106 . btb11,btb12,btb13,btb14,btb15,btb16,btb17,btb18,
107 . btb22,btb23,btb24,btb25,btb26,btb27,btb28,btb33,
108 . btb34,btb35,btb36,btb37,btb38,btb44,btb45,btb46,
109 . btb47,btb48,btb55,btb56,btb57,btb58,btb66,btb67,
110 . btb68,btb77,btb78,btb88
111 INTEGER, DIMENSION(:), ALLOCATABLE ::
112 . POS1,POS2,POS3,POS4,POS5,POS6,POS7,POS8
113 my_real, DIMENSION(:,:), ALLOCATABLE ::
114 . stifnlth,dtn
115 my_real
116 . zth(10,9)
117 ! Position of nodes in the thickshell thickness
118 DATA zth /
119 1 0. ,0. ,0. ,
120 1 0. ,0. ,0. ,
121 1 0. ,0. ,0. ,
122 1 0. ,
123 2 -1. ,0. ,1. ,
124 2 0. ,0. ,0. ,
125 2 0. ,0. ,0. ,
126 2 0. ,
127 3 -1. ,-.549193338482966,0.549193338482966,
128 3 1. ,0. ,0. ,
129 3 0. ,0. ,0. ,
130 3 0. ,
131 4 -1. ,-.600558677589454,0. ,
132 4 0.600558677589454,1. ,0. ,
133 4 0. ,0. ,0. ,
134 4 0. ,
135 5 -1. ,-.812359691877328,-.264578928334038,
136 5 0.264578928334038,0.812359691877328,1. ,
137 5 0. ,0. ,0. ,
138 5 0. ,
139 6 -1. ,-.796839450334708,-.449914286274731,
140 6 0. ,0.449914286274731,0.796839450334708,
141 6 1. ,0. ,0. ,
142 6 0. ,
143 7 -1. ,-.898215824685518,-.584846546513270,
144 7 -.226843756241524,0.226843756241524,0.584846546513270,
145 7 0.898215824685518,1. ,0. ,
146 7 0. ,
147 8 -1. ,-.878478166955581,-.661099443664978,
148 8 -.354483526205989,0. ,0.354483526205989,
149 8 0.661099443664978,0.878478166955581,1. ,
150 8 0. ,
151 9 -1. ,-.936320479015252,-.735741735638020,
152 9 -.491001129763160,-.157505717044458,0.157505717044458,
153 9 0.491001129763160,0.735741735638020,0.936320479015252,
154 9 1. /
155C=======================================================================
156 l2 = nloc_dmg%LEN(imat)**2
157 xi = nloc_dmg%DAMP(imat)
158 nnod = nloc_dmg%NNOD
159 l_nloc = nloc_dmg%L_NLOC
160 zeta = nloc_dmg%DENS(imat)
161 sspnl = nloc_dmg%SSPNL(imat)
162 le_max = nloc_dmg%LE_MAX(imat) ! Maximal length of convergence
163 lc(1:nel) = zero
164 ALLOCATE(
165 . btb11(nel),btb12(nel),btb13(nel),btb14(nel),btb15(nel),
166 . btb16(nel),btb17(nel),btb18(nel),btb22(nel),btb23(nel),btb24(nel),
167 . btb25(nel),btb26(nel),btb27(nel),btb28(nel),btb33(nel),btb34(nel),
168 . btb35(nel),btb36(nel),btb37(nel),btb38(nel),btb44(nel),btb45(nel),
169 . btb46(nel),btb47(nel),btb48(nel),btb55(nel),btb56(nel),btb57(nel),
170 . btb58(nel),btb66(nel),btb67(nel),btb68(nel),btb77(nel),btb78(nel),
171 . btb88(nel),pos1(nel),pos2(nel),pos3(nel),pos4(nel),pos5(nel),
172 . pos6(nel),pos7(nel),pos8(nel))
173 ! For nodal timestep
174 IF (nodadt > 0) THEN
175 ! Non-local nodal stifness
176 ALLOCATE(sti1(nel,nlay),sti2(nel,nlay),sti3(nel,nlay),sti4(nel,nlay),
177 . sti5(nel,nlay),sti6(nel,nlay),sti7(nel,nlay),sti8(nel,nlay))
178 ELSE
179 ALLOCATE(sti1(1,1),sti2(1,1),sti3(1,1),sti4(1,1),
180 . sti5(1,1),sti6(1,1),sti7(1,1),sti8(1,1))
181 ENDIF
182c
183 !-----------------------------------------------------------------------
184 ! Computation of the position of the non-local d.o.fs
185 !-----------------------------------------------------------------------
186 ! Loop over elements
187#include "vectorize.inc"
188 DO i=1,nel
189c
190 ! Number of the element nodes
191 n1 = nloc_dmg%IDXI(nc1(i))
192 n2 = nloc_dmg%IDXI(nc2(i))
193 n3 = nloc_dmg%IDXI(nc3(i))
194 n4 = nloc_dmg%IDXI(nc4(i))
195 n5 = nloc_dmg%IDXI(nc5(i))
196 n6 = nloc_dmg%IDXI(nc6(i))
197 n7 = nloc_dmg%IDXI(nc7(i))
198 n8 = nloc_dmg%IDXI(nc8(i))
199c
200 ! Recovering the position of the non-local d.o.f.s
201 pos1(i) = nloc_dmg%POSI(n1)
202 pos2(i) = nloc_dmg%POSI(n2)
203 pos3(i) = nloc_dmg%POSI(n3)
204 pos4(i) = nloc_dmg%POSI(n4)
205 pos5(i) = nloc_dmg%POSI(n5)
206 pos6(i) = nloc_dmg%POSI(n6)
207 pos7(i) = nloc_dmg%POSI(n7)
208 pos8(i) = nloc_dmg%POSI(n8)
209c
210 ENDDO
211c
212 !-----------------------------------------------------------------------
213 ! Pre-treatment non-local regularization in the thickshell thickness
214 !-----------------------------------------------------------------------
215 IF ((l2>zero).AND.(nlay > 1)) THEN
216c
217 ! Compute thickshell thickness
218 DO i = 1,nel
219 thk(i) = vol(i)/area(i)
220 lthk(i) = (zth(nlay+1,nlay)-zth(nlay,nlay))*thk(i)*half
221 ENDDO
222c
223 ! Allocation of the velocities predictor
224 nddl = nlay
225 IF (nodadt > 0) THEN
226 ALLOCATE(stifnlth(nel,nddl+1))
227 ALLOCATE(dtn(nel,nddl+1))
228 ELSE
229 ALLOCATE(dtn(1,1))
230 ALLOCATE(stifnlth(1,1))
231 dtn(1,1) = ep20
232 stifnlth(1,1) = ep20
233 ENDIF
234 ndnod = nddl+1
235c
236 DO k = 1,ndnod
237 DO i = 1,nel
238 ! Resetting non-local forces
239 bufnlts%FNLTH(i,k) = zero
240 ! Resetting non-local nodal stiffness
241 IF (nodadt > 0) THEN
242 stifnlth(i,k) = em20
243 ENDIF
244 ENDDO
245 ENDDO
246c
247 ! Computation of non-local forces in the shell thickness
248 DO k = 1, nddl
249c
250 ! Integration point number
251 ip = ir + ((is-1) + (k-1)*npts)*nptr
252c
253 ! Computation of shape functions value
254 nth1 = (as(k,nddl) - zth(k+1,nddl)) /
255 . (zth(k,nddl) - zth(k+1,nddl))
256 nth2 = (as(k,nddl) - zth(k,nddl)) /
257 . (zth(k+1,nddl) - zth(k,nddl))
258c
259 ! Weight of integration
260 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
261c
262 ! Loop over elements
263 DO i = 1,nel
264c
265 ! Computation of B-matrix values
266 bth1 = (one/(zth(k,nddl) - zth(k+1,nddl)))*(two/thk(i))
267 bth2 = (one/(zth(k+1,nddl) - zth(k,nddl)))*(two/thk(i))
268c
269 ! Computation of the non-local K matrix
270 k1 = l2*(bth1**2) + nth1**2
271 k12 = l2*(bth1*bth2)+ (nth1*nth2)
272 k2 = l2*(bth2**2) + nth2**2
273c
274 ! Computation of the non-local forces
275 bufnlts%FNLTH(i,k) = bufnlts%FNLTH(i,k)
276 . + (k1*bufnlts%UNLTH(i,k) + k12*bufnlts%UNLTH(i,k+1)
277 . + xi*((nth1**2)*bufnlts%VNLTH(i,k)
278 . + (nth1*nth2)*bufnlts%VNLTH(i,k+1))
279 . - (nth1*var_reg(i,ip)))*wi*vol(i)
280 bufnlts%FNLTH(i,k+1) = bufnlts%FNLTH(i,k+1)
281 . + (k12*bufnlts%UNLTH(i,k) + k2*bufnlts%UNLTH(i,k+1)
282 . + xi*(nth1*nth2*bufnlts%VNLTH(i,k)
283 . + (nth2**2)*bufnlts%VNLTH(i,k+1))
284 . - nth2*var_reg(i,ip))*wi*vol(i)
285c
286 ! Computation of non-local nodal stiffness
287 IF (nodadt > 0) THEN
288 stifnlth(i,k) = stifnlth(i,k) + max(abs(k1)+abs(k12),abs(k12)+abs(k2))*wi*vol(i)
289 stifnlth(i,k+1) = stifnlth(i,k+1) + max(abs(k1)+abs(k12),abs(k12)+abs(k2))*wi*vol(i)
290 ENDIF
291c
292 ENDDO
293 ENDDO
294c
295 ! Updating non-local mass with /DT/NODA
296 IF (nodadt > 0) THEN
297C
298 ! Initial computation of the nodal timestep
299 dtnod = ep20
300 DO k = 1,ndnod
301 DO i = 1,nel
302 dtn(i,k) = dtfac1(11)*cdamp*sqrt(two*bufnlts%MASSTH(i,k)/max(stifnlth(i,k),em20))
303 dtnod = min(dtn(i,k),dtnod)
304 ENDDO
305 ENDDO
306C
307 ! /DT/NODA/CSTX - Constant timestep with added mass
308 IF ((idtmin(11)==3).OR.(idtmin(11)==4).OR.(idtmin(11)==8)) THEN
309 ! Added mass computation if necessary
310 IF (dtnod < dtmin1(11)*(sqrt(csta))) THEN
311 DO k = 1,ndnod
312 DO i = 1,nel
313 IF (dtn(i,k) < dtmin1(11)) THEN
314 dt2p = dtmin1(11)/(dtfac1(11)*cdamp)
315 bufnlts%MASSTH(i,k) = max(bufnlts%MASSTH(i,k),csta*half*stifnlth(i,k)*dt2p*dt2p*onep00001)
316 ENDIF
317 ENDDO
318 ENDDO
319 dtnod = dtmin1(11)*(sqrt(csta))
320 ENDIF
321 ENDIF
322C
323 ! Classical nodal timestep check
324 IF (dtnod < dt2t) THEN
325 dt2t = min(dt2t,dtnod)
326 ENDIF
327 ENDIF
328c
329 DO k = 1,ndnod
330 DO i = 1,nel
331 ! Updating the non-local in-thickness velocities
332 bufnlts%VNLTH(i,k) = bufnlts%VNLTH(i,k) - (bufnlts%FNLTH(i,k)/bufnlts%MASSTH(i,k))*dt12
333 ENDDO
334 ENDDO
335c
336 DO k = 1,ndnod
337 DO i = 1,nel
338 ! Computing the non-local in-thickness cumulated values
339 bufnlts%UNLTH(i,k) = bufnlts%UNLTH(i,k) + bufnlts%VNLTH(i,k)*dt1
340 ENDDO
341 ENDDO
342c
343 ! Transfert at the integration point
344 DO k = 1, nddl
345 ! Integration point number
346 ip = ir + ((is-1) + (k-1)*npts)*nptr
347 !Computation of shape functions value
348 nth1 = (as(k,nddl) - zth(k+1,nddl))/
349 . (zth(k,nddl) - zth(k+1,nddl))
350 nth2 = (as(k,nddl) - zth(k,nddl))/
351 . (zth(k+1,nddl) - zth(k,nddl))
352 ! Loop over elements
353 DO i = 1,nel
354 !Integration points non-local variables
355 var_reg(i,ip) = nth1*bufnlts%UNLTH(i,k) + nth2*bufnlts%UNLTH(i,k+1)
356 ENDDO
357 ENDDO
358 ENDIF
359c
360 !-----------------------------------------------------------------------
361 ! Computation of non-local forces
362 !-----------------------------------------------------------------------
363 ! Loop over layers
364 DO k = 1,nlay
365c
366 ! Integration points positions and weight
367 IF (ict == 1) THEN
368 zr = as(ir,nptr)
369 zs = as(is,npts)
370 zt = as(k,nlay)
371 ELSEIF (ics == 1) THEN
372 zr = as(ir,nptr)
373 zs = as(k,nlay)
374 zt = as(is,npts)
375 ELSEIF (icr == 1) THEN
376 zr = as(k,nlay)
377 zs = as(ir,nptr)
378 zt = as(is,npts)
379 ENDIF
380 wi = half*ws(ir,nptr)*half*ws(is,npts)*half*ws(k,nlay)
381c
382 ! Shape Functions Value
383 CALL basis8 (zr,zs,zt,h,pr,ps,pt)
384c
385 ! integration point number
386 ip = ir + ((is-1) + (k-1)*npts)*nptr
387c
388 ! Loop over elements
389#include "vectorize.inc"
390 DO i=1,nel
391c
392 ! If the element is not broken, normal computation
393 IF (offg(i) /= zero) THEN
394c
395 ! Computation of the product BtxB
396 btb11(i) = px1(i,ip)**2 + py1(i,ip)**2 + pz1(i,ip)**2
397 btb12(i) = px1(i,ip)*px2(i,ip) + py1(i,ip)*py2(i,ip) + pz1(i,ip)*pz2(i,ip)
398 btb13(i) = px1(i,ip)*px3(i,ip) + py1(i,ip)*py3(i,ip) + pz1(i,ip)*pz3(i,ip)
399 btb14(i) = px1(i,ip)*px4(i,ip) + py1(i,ip)*py4(i,ip) + pz1(i,ip)*pz4(i,ip)
400 btb15(i) = px1(i,ip)*px5(i,ip) + py1(i,ip)*py5(i,ip) + pz1(i,ip)*pz5(i,ip)
401 btb16(i) = px1(i,ip)*px6(i,ip) + py1(i,ip)*py6(i,ip) + pz1(i,ip)*pz6(i,ip)
402 btb17(i) = px1(i,ip)*px7(i,ip) + py1(i,ip)*py7(i,ip) + pz1(i,ip)*pz7(i,ip)
403 btb18(i) = px1(i,ip)*px8(i,ip) + py1(i,ip)*py8(i,ip) + pz1(i,ip)*pz8(i,ip)
404 btb22(i) = px2(i,ip)**2 + py2(i,ip)**2 + pz2(i,ip)**2
405 btb23(i) = px2(i,ip)*px3(i,ip) + py2(i,ip)*py3(i,ip) + pz2(i,ip)*pz3(i,ip)
406 btb24(i) = px2(i,ip)*px4(i,ip) + py2(i,ip)*py4(i,ip) + pz2(i,ip)*pz4(i,ip)
407 btb25(i) = px2(i,ip)*px5(i,ip) + py2(i,ip)*py5(i,ip) + pz2(i,ip)*pz5(i,ip)
408 btb26(i) = px2(i,ip)*px6(i,ip) + py2(i,ip)*py6(i,ip) + pz2(i,ip)*pz6(i,ip)
409 btb27(i) = px2(i,ip)*px7(i,ip) + py2(i,ip)*py7(i,ip) + pz2(i,ip)*pz7(i,ip)
410 btb28(i) = px2(i,ip)*px8(i,ip) + py2(i,ip)*py8(i,ip) + pz2(i,ip)*pz8(i,ip)
411 btb33(i) = px3(i,ip)**2 + py3(i,ip)**2 + pz3(i,ip)**2
412 btb34(i) = px3(i,ip)*px4(i,ip) + py3(i,ip)*py4(i,ip) + pz3(i,ip)*pz4(i,ip)
413 btb35(i) = px3(i,ip)*px5(i,ip) + py3(i,ip)*py5(i,ip) + pz3(i,ip)*pz5(i,ip)
414 btb36(i) = px3(i,ip)*px6(i,ip) + py3(i,ip)*py6(i,ip) + pz3(i,ip)*pz6(i,ip)
415 btb37(i) = px3(i,ip)*px7(i,ip) + py3(i,ip)*py7(i,ip) + pz3(i,ip)*pz7(i,ip)
416 btb38(i) = px3(i,ip)*px8(i,ip) + py3(i,ip)*py8(i,ip) + pz3(i,ip)*pz8(i,ip)
417 btb44(i) = px4(i,ip)**2 + py4(i,ip)**2 + pz4(i,ip)**2
418 btb45(i) = px4(i,ip)*px5(i,ip) + py4(i,ip)*py5(i,ip) + pz4(i,ip)*pz5(i,ip)
419 btb46(i) = px4(i,ip)*px6(i,ip) + py4(i,ip)*py6(i,ip) + pz4(i,ip)*pz6(i,ip)
420 btb47(i) = px4(i,ip)*px7(i,ip) + py4(i,ip)*py7(i,ip) + pz4(i,ip)*pz7(i,ip)
421 btb48(i) = px4(i,ip)*px8(i,ip) + py4(i,ip)*py8(i,ip) + pz4(i,ip)*pz8(i,ip)
422 btb55(i) = px5(i,ip)**2 + py5(i,ip)**2 + pz5(i,ip)**2
423 btb56(i) = px5(i,ip)*px6(i,ip) + py5(i,ip)*py6(i,ip) + pz5(i,ip)*pz6(i,ip)
424 btb57(i) = px5(i,ip)*px7(i,ip) + py5(i,ip)*py7(i,ip) + pz5(i,ip)*pz7(i,ip)
425 btb58(i) = px5(i,ip)*px8(i,ip) + py5(i,ip)*py8(i,ip) + pz5(i,ip)*pz8(i,ip)
426 btb66(i) = px6(i,ip)**2 + py6(i,ip)**2 + pz6(i,ip)**2
427 btb67(i) = px6(i,ip)*px7(i,ip) + py6(i,ip)*py7(i,ip) + pz6(i,ip)*pz7(i,ip)
428 btb68(i) = px6(i,ip)*px8(i,ip) + py6(i,ip)*py8(i,ip) + pz6(i,ip)*pz8(i,ip)
429 btb77(i) = px7(i,ip)**2 + py7(i,ip)**2 + pz7(i,ip)**2
430 btb78(i) = px7(i,ip)*px8(i,ip) + py7(i,ip)*py8(i,ip) + pz7(i,ip)*pz8(i,ip)
431 btb88(i) = px8(i,ip)**2 + py8(i,ip)**2 + pz8(i,ip)**2
432c
433 ! Computation of LEN**2*BTB*Unl product and NTN*UNL, NTN*VNL
434 a1 = vol(i) * wi * (h(1)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
435 . h(1)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
436 . h(1)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(1)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
437 . h(1)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(1)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
438c
439 IF (nodadt == 0) THEN
440 a1 = a1 + vol(i) * wi * xi * (h(1)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(1)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
441 . h(1)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(1)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
442 . h(1)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(1)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
443 . h(1)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(1)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
444 ELSE
445 minmasscal = min(sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1)),
446 . sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1)),
447 . sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1)),
448 . sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1)),
449 . sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1)),
450 . sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1)),
451 . sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1)),
452 . sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1)))
453 a1 = a1 + vol(i) * wi * xi * (
454 . h(1)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
455 . h(1)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
456 . h(1)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
457 . h(1)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
458 . h(1)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
459 . h(1)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
460 . h(1)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
461 . h(1)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
462 ENDIF
463c
464 b1 = l2 * vol(i) * wi * (btb11(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb12(i)*nloc_dmg%UNL(pos2(i)+k-1) +
465 . btb13(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb14(i)*nloc_dmg%UNL(pos4(i)+k-1) +
466 . btb15(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb16(i)*nloc_dmg%UNL(pos6(i)+k-1) +
467 . btb17(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb18(i)*nloc_dmg%UNL(pos8(i)+k-1))
468c
469 c1 = vol(i) * wi * h(1) * var_reg(i,ip)
470c
471 a2 = vol(i) * wi * (h(2)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
472 . h(2)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
473 . h(2)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
474 . h(2)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(2)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
475c
476 IF (nodadt == 0) THEN
477 a2 = a2 + vol(i) * wi * xi * (h(2)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(2)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
478 . h(2)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(2)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
479 . h(2)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(2)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
480 . h(2)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(2)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
481 ELSE
482 a2 = a2 + vol(i) * wi * xi * (
483 . h(2)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
484 . h(2)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
485 . h(2)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
486 . h(2)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
487 . h(2)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
488 . h(2)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
489 . h(2)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
490 . h(2)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
491 ENDIF
492c
493 b2 = l2 * vol(i) * wi * (btb12(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb22(i)*nloc_dmg%UNL(pos2(i)+k-1) +
494 . btb23(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb24(i)*nloc_dmg%UNL(pos4(i)+k-1) +
495 . btb25(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos6(i)+k-1) +
496 . btb27(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb28(i)*nloc_dmg%UNL(pos8(i)+k-1))
497c
498 c2 = vol(i) * wi * h(2) * var_reg(i,ip)
499c
500 a3 = vol(i) * wi * (h(3)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
501 . h(3)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
502 . h(3)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
503 . h(3)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(3)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
504c
505 IF (nodadt == 0) THEN
506 a3 = a3 + vol(i) * wi * xi * (h(3)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(3)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
507 . h(3)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(3)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
508 . h(3)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(3)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
509 . h(3)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(3)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
510 ELSE
511 a3 = a3 + vol(i) * wi * xi * (
512 . h(3)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
513 . h(3)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
514 . h(3)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
515 . h(3)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
516 . h(3)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
517 . h(3)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
518 . h(3)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
519 . h(3)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
520 ENDIF
521c
522 b3 = l2 * vol(i) * wi * (btb13(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb23(i)*nloc_dmg%UNL(pos2(i)+k-1) +
523 . btb33(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb34(i)*nloc_dmg%UNL(pos4(i)+k-1) +
524 . btb35(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb36(i)*nloc_dmg%UNL(pos6(i)+k-1) +
525 . btb37(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb38(i)*nloc_dmg%UNL(pos8(i)+k-1))
526c
527 c3 = vol(i) * wi * h(3) * var_reg(i,ip)
528c
529 a4 = vol(i) * wi * (h(4)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
530 . h(4)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
531 . h(4)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(4)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
532 . h(4)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
533c
534 IF (nodadt == 0) THEN
535 a4 = a4 + vol(i) * wi * xi * (h(4)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(4)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
536 . h(4)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(4)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
537 . h(4)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(4)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
538 . h(4)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(4)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
539 ELSE
540 a4 = a4 + vol(i) * wi * xi * (
541 . h(4)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
542 . h(4)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
543 . h(4)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
544 . h(4)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
545 . h(4)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
546 . h(4)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
547 . h(4)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
548 . h(4)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
549 ENDIF
550c
551 b4 = l2 * vol(i) * wi * (btb14(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb24(i)*nloc_dmg%UNL(pos2(i)+k-1) +
552 . btb34(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb44(i)*nloc_dmg%UNL(pos4(i)+k-1) +
553 . btb45(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb46(i)*nloc_dmg%UNL(pos6(i)+k-1) +
554 . btb47(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb48(i)*nloc_dmg%UNL(pos8(i)+k-1))
555c
556 c4 = vol(i) * wi * h(4) * var_reg(i,ip)
557c
558 a5 = vol(i) * wi * (h(5)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(5)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
559 . h(5)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(5)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
560 . h(5)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
561 . h(5)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
562c
563 IF (nodadt == 0) THEN
564 a5 = a5 + vol(i) * wi * xi * (h(5)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(5)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
565 . h(5)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(5)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
566 . h(5)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(5)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
567 . h(5)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(5)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
568 ELSE
569 a5 = a5 + vol(i) * wi * xi * (
570 . h(5)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
571 . h(5)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
572 . h(5)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
573 . h(5)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
574 . h(5)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
575 . h(5)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
576 . h(5)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
577 . h(5)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
578 ENDIF
579c
580 b5 = l2 * vol(i) * wi * (btb15(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb25(i)*nloc_dmg%UNL(pos2(i)+k-1) +
581 . btb35(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb45(i)*nloc_dmg%UNL(pos4(i)+k-1) +
582 . btb55(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb56(i)*nloc_dmg%UNL(pos6(i)+k-1) +
583 . btb57(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb58(i)*nloc_dmg%UNL(pos8(i)+k-1))
584c
585 c5 = vol(i) * wi * h(5) * var_reg(i,ip)
586c
587 a6 = vol(i) * wi * (h(6)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(6)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
588 . h(6)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(6)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
589 . h(6)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(6)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
590 . h(6)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(6)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
591c
592 IF (nodadt == 0) THEN
593 a6 = a6 + vol(i) * wi * xi * (h(6)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(6)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
594 . h(6)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(6)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
595 . h(6)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(6)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
596 . h(6)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(6)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
597 ELSE
598 a6 = a6 + vol(i) * wi * xi * (
599 . h(6)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
600 . h(6)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
601 . h(6)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
602 . h(6)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
603 . h(6)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
604 . h(6)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
605 . h(6)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
606 . h(6)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
607 ENDIF
608c
609 b6 = l2 * vol(i) * wi * (btb16(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb26(i)*nloc_dmg%UNL(pos2(i)+k-1) +
610 . btb36(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb46(i)*nloc_dmg%UNL(pos4(i)+k-1) +
611 . btb56(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb66(i)*nloc_dmg%UNL(pos6(i)+k-1) +
612 . btb67(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos8(i)+k-1))
613c
614 c6 = vol(i) * wi * h(6) * var_reg(i,ip)
615c
616 a7 = vol(i) * wi * (h(7)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
617 . h(7)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
618 . h(7)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(7)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
619 . h(7)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(7)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
620c
621 IF (nodadt == 0) THEN
622 a7 = a7 + vol(i) * wi * xi * (h(7)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(7)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
623 . h(7)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(7)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
624 . h(7)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(7)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
625 . h(7)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(7)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
626 ELSE
627 a7 = a7 + vol(i) * wi * xi * (
628 . h(7)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
629 . h(7)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
630 . h(7)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
631 . h(7)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
632 . h(7)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
633 . h(7)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
634 . h(7)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
635 . h(7)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
636 ENDIF
637c
638 b7 = l2 * vol(i) * wi * (btb17(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb27(i)*nloc_dmg%UNL(pos2(i)+k-1) +
639 . btb37(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb47(i)*nloc_dmg%UNL(pos4(i)+k-1) +
640 . btb57(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb67(i)*nloc_dmg%UNL(pos6(i)+k-1) +
641 . btb77(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb78(i)*nloc_dmg%UNL(pos8(i)+k-1))
642c
643 c7 = vol(i) * wi * h(7) * var_reg(i,ip)
644c
645 a8 = vol(i) * wi * (h(8)*h(1)*nloc_dmg%UNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%UNL(pos2(i)+k-1) +
646 . h(8)*h(3)*nloc_dmg%UNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%UNL(pos4(i)+k-1) +
647 . h(8)*h(5)*nloc_dmg%UNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%UNL(pos6(i)+k-1) +
648 . h(8)*h(7)*nloc_dmg%UNL(pos7(i)+k-1) + h(8)*h(8)*nloc_dmg%UNL(pos8(i)+k-1))
649c
650 IF (nodadt == 0) THEN
651 a8 = a8 + vol(i) * wi * xi * (h(8)*h(1)*nloc_dmg%VNL(pos1(i)+k-1) + h(8)*h(2)*nloc_dmg%VNL(pos2(i)+k-1) +
652 . h(8)*h(3)*nloc_dmg%VNL(pos3(i)+k-1) + h(8)*h(4)*nloc_dmg%VNL(pos4(i)+k-1) +
653 . h(8)*h(5)*nloc_dmg%VNL(pos5(i)+k-1) + h(8)*h(6)*nloc_dmg%VNL(pos6(i)+k-1) +
654 . h(8)*h(7)*nloc_dmg%VNL(pos7(i)+k-1) + h(8)*h(8)*nloc_dmg%VNL(pos8(i)+k-1))
655 ELSE
656 a8 = a8 + vol(i) * wi * xi * (
657 . h(8)*h(1)*minmasscal*nloc_dmg%VNL(pos1(i)+k-1) +
658 . h(8)*h(2)*minmasscal*nloc_dmg%VNL(pos2(i)+k-1) +
659 . h(8)*h(3)*minmasscal*nloc_dmg%VNL(pos3(i)+k-1) +
660 . h(8)*h(4)*minmasscal*nloc_dmg%VNL(pos4(i)+k-1) +
661 . h(8)*h(5)*minmasscal*nloc_dmg%VNL(pos5(i)+k-1) +
662 . h(8)*h(6)*minmasscal*nloc_dmg%VNL(pos6(i)+k-1) +
663 . h(8)*h(7)*minmasscal*nloc_dmg%VNL(pos7(i)+k-1) +
664 . h(8)*h(8)*minmasscal*nloc_dmg%VNL(pos8(i)+k-1))
665 ENDIF
666c
667 b8 = l2 * vol(i) * wi * (btb18(i)*nloc_dmg%UNL(pos1(i)+k-1) + btb28(i)*nloc_dmg%UNL(pos2(i)+k-1) +
668 . btb38(i)*nloc_dmg%UNL(pos3(i)+k-1) + btb48(i)*nloc_dmg%UNL(pos4(i)+k-1) +
669 . btb58(i)*nloc_dmg%UNL(pos5(i)+k-1) + btb68(i)*nloc_dmg%UNL(pos6(i)+k-1) +
670 . btb78(i)*nloc_dmg%UNL(pos7(i)+k-1) + btb88(i)*nloc_dmg%UNL(pos8(i)+k-1))
671c
672 c8 = vol(i) * wi * h(8) * var_reg(i,ip)
673c
674c Fint = Vol * (L*L * BT*B*Unl + NT*N*Unl + Damp*NT*N*Vnl - NT*Vreg )
675c
676 f1(i,k) = a1 + b1 - c1
677 f2(i,k) = a2 + b2 - c2
678 f3(i,k) = a3 + b3 - c3
679 f4(i,k) = a4 + b4 - c4
680 f5(i,k) = a5 + b5 - c5
681 f6(i,k) = a6 + b6 - c6
682 f7(i,k) = a7 + b7 - c7
683 f8(i,k) = a8 + b8 - c8
684c
685 ! Computing nodal equivalent stiffness
686 IF (nodadt > 0) THEN
687 sti1(i,k) = (abs(l2*btb11(i) + h(1)*h(1)) + abs(l2*btb12(i) + h(1)*h(2)) + abs(l2*btb13(i) + h(1)*h(3)) +
688 . abs(l2*btb14(i) + h(1)*h(4)) + abs(l2*btb15(i) + h(1)*h(5)) + abs(l2*btb16(i) + h(1)*h(6)) +
689 . abs(l2*btb17(i) + h(1)*h(7)) + abs(l2*btb18(i) + h(1)*h(8)))*vol(i) * wi
690 sti2(i,k) = (abs(l2*btb12(i) + h(2)*h(1)) + abs(l2*btb22(i) + h(2)*h(2)) + abs(l2*btb23(i) + h(2)*h(3)) +
691 . abs(l2*btb24(i) + h(2)*h(4)) + abs(l2*btb25(i) + h(2)*h(5)) + abs(l2*btb26(i) + h(2)*h(6)) +
692 . abs(l2*btb27(i) + h(2)*h(7)) + abs(l2*btb28(i) + h(2)*h(8)))*vol(i) * wi
693 sti3(i,k) = (abs(l2*btb13(i) + h(3)*h(1)) + abs(l2*btb23(i) + h(3)*h(2)) + abs(l2*btb33(i) + h(3)*h(3)) +
694 . abs(l2*btb34(i) + h(3)*h(4)) + abs(l2*btb35(i) + h(3)*h(5)) + abs(l2*btb36(i) + h(3)*h(6)) +
695 . abs(l2*btb37(i) + h(3)*h(7)) + abs(l2*btb38(i) + h(3)*h(8)))*vol(i) * wi
696 sti4(i,k) = (abs(l2*btb14(i) + h(4)*h(1)) + abs(l2*btb24(i) + h(4)*h(2)) + abs(l2*btb34(i) + h(4)*h(3)) +
697 . abs(l2*btb44(i) + h(4)*h(4)) + abs(l2*btb45(i) + h(4)*h(5)) + abs(l2*btb46(i) + h(4)*h(6)) +
698 . abs(l2*btb47(i) + h(4)*h(7)) + abs(l2*btb48(i) + h(4)*h(8)))*vol(i) * wi
699 sti5(i,k) = (abs(l2*btb15(i) + h(5)*h(1)) + abs(l2*btb25(i) + h(5)*h(2)) + abs(l2*btb35(i) + h(5)*h(3)) +
700 . abs(l2*btb45(i) + h(5)*h(4)) + abs(l2*btb55(i) + h(5)*h(5)) + abs(l2*btb56(i) + h(5)*h(6)) +
701 . abs(l2*btb57(i) + h(5)*h(7)) + abs(l2*btb58(i) + h(5)*h(8)))*vol(i) * wi
702 sti6(i,k) = (abs(l2*btb16(i) + h(6)*h(1)) + abs(l2*btb26(i) + h(6)*h(2)) + abs(l2*btb36(i) + h(6)*h(3)) +
703 . abs(l2*btb46(i) + h(6)*h(4)) + abs(l2*btb56(i) + h(6)*h(5)) + abs(l2*btb66(i) + h(6)*h(6)) +
704 . abs(l2*btb67(i) + h(6)*h(7)) + abs(l2*btb68(i) + h(6)*h(8)))*vol(i) * wi
705 sti7(i,k) = (abs(l2*btb17(i) + h(7)*h(1)) + abs(l2*btb27(i) + h(7)*h(2)) + abs(l2*btb37(i) + h(7)*h(3)) +
706 . abs(l2*btb47(i) + h(7)*h(4)) + abs(l2*btb57(i) + h(7)*h(5)) + abs(l2*btb67(i) + h(7)*h(6)) +
707 . abs(l2*btb77(i) + h(7)*h(7)) + abs(l2*btb78(i) + h(7)*h(8)))*vol(i) * wi
708 sti8(i,k) = (abs(l2*btb18(i) + h(8)*h(1)) + abs(l2*btb28(i) + h(8)*h(2)) + abs(l2*btb38(i) + h(8)*h(3)) +
709 . abs(l2*btb48(i) + h(8)*h(4)) + abs(l2*btb58(i) + h(8)*h(5)) + abs(l2*btb68(i) + h(8)*h(6)) +
710 . abs(l2*btb78(i) + h(8)*h(7)) + abs(l2*btb88(i) + h(8)*h(8)))*vol(i) * wi
711 ENDIF
712c
713 ! If the element is broken
714 ELSE
715c
716 ! Initial element characteristic length
717 lc(i) = (wi*vol0(i))**third
718c
719 IF (nodadt > 0) THEN
720 ! Non-local absorbing forces
721 f1(i,k) = sqrt(nloc_dmg%MASS(pos1(i)+k-1)/nloc_dmg%MASS0(pos1(i)+k-1))*h(1)*zeta*sspnl*half*
722 . (nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i)+k-1))*(three/four)*(lc(i)**2)
723 f2(i,k) = sqrt(nloc_dmg%MASS(pos2(i)+k-1)/nloc_dmg%MASS0(pos2(i)+k-1))*h(2)*zeta*sspnl*half*
724 . (nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
725 f3(i,k) = sqrt(nloc_dmg%MASS(pos3(i)+k-1)/nloc_dmg%MASS0(pos3(i)+k-1))*h(3)*zeta*sspnl*half*
726 . (nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three/four)*(lc(i)**2)
727 f4(i,k) = sqrt(nloc_dmg%MASS(pos4(i)+k-1)/nloc_dmg%MASS0(pos4(i)+k-1))*h(4)*zeta*sspnl*half*
728 . (nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
729 f5(i,k) = sqrt(nloc_dmg%MASS(pos5(i)+k-1)/nloc_dmg%MASS0(pos5(i)+k-1))*h(5)*zeta*sspnl*half*
730 . (nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
731 f6(i,k) = sqrt(nloc_dmg%MASS(pos6(i)+k-1)/nloc_dmg%MASS0(pos6(i)+k-1))*h(6)*zeta*sspnl*half*
732 . (nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
733 f7(i,k) = sqrt(nloc_dmg%MASS(pos7(i)+k-1)/nloc_dmg%MASS0(pos7(i)+k-1))*h(7)*zeta*sspnl*half*
734 . (nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
735 f8(i,k) = sqrt(nloc_dmg%MASS(pos8(i)+k-1)/nloc_dmg%MASS0(pos8(i)+k-1))*h(8)*zeta*sspnl*half*
736 . (nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
737 ! Computing nodal equivalent stiffness
738 sti1(i,k) = em20
739 sti2(i,k) = em20
740 sti3(i,k) = em20
741 sti4(i,k) = em20
742 sti5(i,k) = em20
743 sti6(i,k) = em20
744 sti7(i,k) = em20
745 sti8(i,k) = em20
746 ELSE
747 ! Non-local absorbing forces
748 f1(i,k) = h(1)*zeta*sspnl*half*(nloc_dmg%VNL(pos1(i)+k-1)+nloc_dmg%VNL_OLD(pos1(i)+k-1))*(three/four)*(lc(i)**2)
749 f2(i,k) = h(2)*zeta*sspnl*half*(nloc_dmg%VNL(pos2(i)+k-1)+nloc_dmg%VNL_OLD(pos2(i)+k-1))*(three/four)*(lc(i)**2)
750 f3(i,k) = h(3)*zeta*sspnl*half*(nloc_dmg%VNL(pos3(i)+k-1)+nloc_dmg%VNL_OLD(pos3(i)+k-1))*(three/four)*(lc(i)**2)
751 f4(i,k) = h(4)*zeta*sspnl*half*(nloc_dmg%VNL(pos4(i)+k-1)+nloc_dmg%VNL_OLD(pos4(i)+k-1))*(three/four)*(lc(i)**2)
752 f5(i,k) = h(5)*zeta*sspnl*half*(nloc_dmg%VNL(pos5(i)+k-1)+nloc_dmg%VNL_OLD(pos5(i)+k-1))*(three/four)*(lc(i)**2)
753 f6(i,k) = h(6)*zeta*sspnl*half*(nloc_dmg%VNL(pos6(i)+k-1)+nloc_dmg%VNL_OLD(pos6(i)+k-1))*(three/four)*(lc(i)**2)
754 f7(i,k) = h(7)*zeta*sspnl*half*(nloc_dmg%VNL(pos7(i)+k-1)+nloc_dmg%VNL_OLD(pos7(i)+k-1))*(three/four)*(lc(i)**2)
755 f8(i,k) = h(8)*zeta*sspnl*half*(nloc_dmg%VNL(pos8(i)+k-1)+nloc_dmg%VNL_OLD(pos8(i)+k-1))*(three/four)*(lc(i)**2)
756 ENDIF
757 ENDIF
758 ENDDO
759 ENDDO
760c
761 !-----------------------------------------------------------------------
762 ! Assembling of the non-local forces
763 !-----------------------------------------------------------------------
764c
765 ! If PARITH/OFF
766 IF (iparit == 0) THEN
767 ! Loop over elements
768 DO i=1,nel
769 ! Loop over non-local degrees of freedom
770#include "vectorize.inc"
771 DO k=1,nlay
772 ! Assembling the forces in the classic way
773 nloc_dmg%FNL(pos1(i)+k-1,itask+1) = nloc_dmg%FNL(pos1(i)+k-1,itask+1) - f1(i,k)
774 nloc_dmg%FNL(pos2(i)+k-1,itask+1) = nloc_dmg%FNL(pos2(i)+k-1,itask+1) - f2(i,k)
775 nloc_dmg%FNL(pos3(i)+k-1,itask+1) = nloc_dmg%FNL(pos3(i)+k-1,itask+1) - f3(i,k)
776 nloc_dmg%FNL(pos4(i)+k-1,itask+1) = nloc_dmg%FNL(pos4(i)+k-1,itask+1) - f4(i,k)
777 nloc_dmg%FNL(pos5(i)+k-1,itask+1) = nloc_dmg%FNL(pos5(i)+k-1,itask+1) - f5(i,k)
778 nloc_dmg%FNL(pos6(i)+k-1,itask+1) = nloc_dmg%FNL(pos6(i)+k-1,itask+1) - f6(i,k)
779 nloc_dmg%FNL(pos7(i)+k-1,itask+1) = nloc_dmg%FNL(pos7(i)+k-1,itask+1) - f7(i,k)
780 nloc_dmg%FNL(pos8(i)+k-1,itask+1) = nloc_dmg%FNL(pos8(i)+k-1,itask+1) - f8(i,k)
781 IF (nodadt > 0) THEN
782 ! Spectral radius of stiffness matrix
783 maxstif = max(sti1(i,k),sti2(i,k),sti3(i,k),sti4(i,k),
784 . sti5(i,k),sti6(i,k),sti7(i,k),sti8(i,k))
785 ! Computing nodal stiffness
786 nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos1(i)+k-1,itask+1) + maxstif
787 nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos2(i)+k-1,itask+1) + maxstif
788 nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos3(i)+k-1,itask+1) + maxstif
789 nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos4(i)+k-1,itask+1) + maxstif
790 nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos5(i)+k-1,itask+1) + maxstif
791 nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos6(i)+k-1,itask+1) + maxstif
792 nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos7(i)+k-1,itask+1) + maxstif
793 nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) = nloc_dmg%STIFNL(pos8(i)+k-1,itask+1) + maxstif
794 ENDIF
795 ENDDO
796 ENDDO
797c
798 ! If PARITH/ON
799 ELSE
800 ! Loop over additional d.o.fs
801 DO j = 1,nlay
802c
803 ! Integration point number
804 ip = ir + ((is-1) + (j-1)*npts)*nptr
805c
806 ! Loop over elements
807 DO i=1,nel
808 ii = i + nft
809c
810 ! Spectral radius of stiffness matrix
811 IF (nodadt > 0) THEN
812 maxstif = max(sti1(i,j),sti2(i,j),sti3(i,j),sti4(i,j),
813 . sti5(i,j),sti6(i,j),sti7(i,j),sti8(i,j))
814 ENDIF
815c
816 k = nloc_dmg%IADS(1,ii)
817 IF (ip == 1) THEN
818 nloc_dmg%FSKY(k,j) = -f1(i,j)
819 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
820 ELSE
821 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f1(i,j)
822 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
823 ENDIF
824c
825 k = nloc_dmg%IADS(2,ii)
826 IF (ip == 1) THEN
827 nloc_dmg%FSKY(k,j) = -f2(i,j)
828 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
829 ELSE
830 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f2(i,j)
831 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
832 ENDIF
833c
834 k = nloc_dmg%IADS(3,ii)
835 IF (ip == 1) THEN
836 nloc_dmg%FSKY(k,j) = -f3(i,j)
837 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
838 ELSE
839 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f3(i,j)
840 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
841 ENDIF
842c
843 k = nloc_dmg%IADS(4,ii)
844 IF (ip == 1) THEN
845 nloc_dmg%FSKY(k,j) = -f4(i,j)
846 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
847 ELSE
848 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f4(i,j)
849 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
850 ENDIF
851c
852 k = nloc_dmg%IADS(5,ii)
853 IF (ip == 1) THEN
854 nloc_dmg%FSKY(k,j) = -f5(i,j)
855 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
856 ELSE
857 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f5(i,j)
858 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
859 ENDIF
860c
861 k = nloc_dmg%IADS(6,ii)
862 IF (ip == 1) THEN
863 nloc_dmg%FSKY(k,j) = -f6(i,j)
864 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
865 ELSE
866 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f6(i,j)
867 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
868 ENDIF
869c
870 k = nloc_dmg%IADS(7,ii)
871 IF (ip == 1) THEN
872 nloc_dmg%FSKY(k,j) = -f7(i,j)
873 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
874 ELSE
875 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f7(i,j)
876 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
877 ENDIF
878c
879 k = nloc_dmg%IADS(8,ii)
880 IF (ip == 1) THEN
881 nloc_dmg%FSKY(k,j) = -f8(i,j)
882 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = maxstif
883 ELSE
884 nloc_dmg%FSKY(k,j) = nloc_dmg%FSKY(k,j) - f8(i,j)
885 IF (nodadt > 0) nloc_dmg%STSKY(k,j) = nloc_dmg%STSKY(k,j) + maxstif
886 ENDIF
887c
888 ENDDO
889 ENDDO
890 ENDIF
891c
892 !-----------------------------------------------------------------------
893 ! Computing non-local timestep
894 !-----------------------------------------------------------------------
895 IF (nodadt == 0) THEN
896 DO i = 1,nel
897 ! If the element is not broken, normal computation
898 IF (offg(i)/=zero) THEN
899 ! Non-local critical time-step in the plane
900 dtnl = (two*(min((vol(i))**third,le_max))*sqrt(three*zeta))/
901 . sqrt(twelve*l2 + (min((vol(i))**third,le_max))**2)
902 ! Non-local critical time-step in the thickness
903 IF ((l2>zero).AND.(nlay > 1)) THEN
904 dtnl_th = (two*(min(lthk(i),le_max))*sqrt(three*zeta))/
905 . sqrt(twelve*l2 + (min(lthk(i),le_max))**2)
906 ELSE
907 dtnl_th = ep20
908 ENDIF
909 ! Retaining the minimal value
910 dt2t = min(dt2t,dtfac1(1)*cdamp*dtnl_th,dtfac1(1)*cdamp*dtnl)
911 ENDIF
912 ENDDO
913 ENDIF
914c-----------
915 ! Deallocation of tables
916 IF (ALLOCATED(btb11)) DEALLOCATE(btb11)
917 IF (ALLOCATED(btb12)) DEALLOCATE(btb12)
918 IF (ALLOCATED(btb13)) DEALLOCATE(btb13)
919 IF (ALLOCATED(btb14)) DEALLOCATE(btb14)
920 IF (ALLOCATED(btb15)) DEALLOCATE(btb15)
921 IF (ALLOCATED(btb16)) DEALLOCATE(btb16)
922 IF (ALLOCATED(btb17)) DEALLOCATE(btb17)
923 IF (ALLOCATED(btb18)) DEALLOCATE(btb18)
924 IF (ALLOCATED(btb22)) DEALLOCATE(btb22)
925 IF (ALLOCATED(btb23)) DEALLOCATE(btb23)
926 IF (ALLOCATED(btb24)) DEALLOCATE(btb24)
927 IF (ALLOCATED(btb25)) DEALLOCATE(btb25)
928 IF (ALLOCATED(btb26)) DEALLOCATE(btb26)
929 IF (ALLOCATED(btb27)) DEALLOCATE(btb27)
930 IF (ALLOCATED(btb28)) DEALLOCATE(btb28)
931 IF (ALLOCATED(btb33)) DEALLOCATE(btb33)
932 IF (ALLOCATED(btb34)) DEALLOCATE(btb34)
933 IF (ALLOCATED(btb35)) DEALLOCATE(btb35)
934 IF (ALLOCATED(btb36)) DEALLOCATE(btb36)
935 IF (ALLOCATED(btb37)) DEALLOCATE(btb37)
936 IF (ALLOCATED(btb38)) DEALLOCATE(btb38)
937 IF (ALLOCATED(btb44)) DEALLOCATE(btb44)
938 IF (ALLOCATED(btb45)) DEALLOCATE(btb45)
939 IF (ALLOCATED(btb46)) DEALLOCATE(btb46)
940 IF (ALLOCATED(btb47)) DEALLOCATE(btb47)
941 IF (ALLOCATED(btb48)) DEALLOCATE(btb48)
942 IF (ALLOCATED(btb55)) DEALLOCATE(btb55)
943 IF (ALLOCATED(btb56)) DEALLOCATE(btb56)
944 IF (ALLOCATED(btb57)) DEALLOCATE(btb57)
945 IF (ALLOCATED(btb58)) DEALLOCATE(btb58)
946 IF (ALLOCATED(btb66)) DEALLOCATE(btb66)
947 IF (ALLOCATED(btb67)) DEALLOCATE(btb67)
948 IF (ALLOCATED(btb68)) DEALLOCATE(btb68)
949 IF (ALLOCATED(btb77)) DEALLOCATE(btb77)
950 IF (ALLOCATED(btb78)) DEALLOCATE(btb78)
951 IF (ALLOCATED(btb88)) DEALLOCATE(btb88)
952 IF (ALLOCATED(pos1)) DEALLOCATE(pos1)
953 IF (ALLOCATED(pos2)) DEALLOCATE(pos2)
954 IF (ALLOCATED(pos3)) DEALLOCATE(pos3)
955 IF (ALLOCATED(pos4)) DEALLOCATE(pos4)
956 IF (ALLOCATED(pos5)) DEALLOCATE(pos5)
957 IF (ALLOCATED(pos6)) DEALLOCATE(pos6)
958 IF (ALLOCATED(pos7)) DEALLOCATE(pos7)
959 IF (ALLOCATED(pos8)) DEALLOCATE(pos8)
960 IF (ALLOCATED(sti1)) DEALLOCATE(sti1)
961 IF (ALLOCATED(sti2)) DEALLOCATE(sti2)
962 IF (ALLOCATED(sti3)) DEALLOCATE(sti3)
963 IF (ALLOCATED(sti4)) DEALLOCATE(sti4)
964 IF (ALLOCATED(sti5)) DEALLOCATE(sti5)
965 IF (ALLOCATED(sti6)) DEALLOCATE(sti6)
966 IF (ALLOCATED(sti7)) DEALLOCATE(sti7)
967 IF (ALLOCATED(sti8)) DEALLOCATE(sti8)
968c
#define my_real
Definition cppsort.cpp:32
subroutine area(d1, x, x2, y, y2, eint, stif0)
#define min(a, b)
Definition macros.h:20
#define max(a, b)
Definition macros.h:21
subroutine basis8(r, s, t, h, pr, ps, pt)
Definition basis8.F:30