44#include "implicit_f.inc"
48#include "com_xfem1.inc"
53 INTEGER BUFLEN,NPT,ITY,ISNOD,NEWLEN,INLOC
54 INTEGER,
INTENT(IN) :: IDAMP_FREQ_RANGE
55 TYPE(elbuf_struct_) ,
TARGET :: ELBUF_STR
59 INTEGER I,K,IL,IR,IS,IT,IAD,ERR,NUVAR,NVARTMP,NVARTMP_EOS,IGTYP,IXFEM,NLAY,
60 . nintlay,nptr,npts,nptt,nel,npg,ipt,len1,len2,ilaw,imat,ieos,nonl,
61 . ivisc,iporo,nfail,nvar_mat,nvar_eos,nvar_visc,nvar_loc,nvar_lay,npttot,iadp,
64 . g_gama,g_sig,g_off,g_noff,g_eint,g_eins,g_temp,
65 . g_rho,g_pla,g_vol,g_epsd,g_qvis,g_deltax,g_tb,g_rk,g_re,g_wpla,
66 . g_smstr,g_hourg,g_bfrac,g_thk,g_for,g_mom,g_tag22,g_stra,g_sigi,
67 . g_dmg,g_forpg,g_mompg,g_strpg,g_uelr,g_uelr1,g_epsq,g_ierr,
68 . g_damdl,g_forth,g_eintth,g_fill,g_seq,g_strw,g_strwpg,g_thk_i,
69 . g_jac_i,g_area,g_skew,g_length,g_totdepl,g_totrot,g_forep,g_momep,
70 . g_dep_in_tens,g_dep_in_comp,g_rot_in_tens,g_rot_in_comp,
71 . g_posx,g_posy,g_posz,g_posxx,g_posyy,g_poszz,g_yield,g_length_err,
72 . g_dv,g_dfs,g_skew_err,g_e6,g_ruptcrit,g_mass,g_v_repcvt,
73 . g_vr_repcvt,g_nuvar,g_nuvarn,g_inifric,g_dt,g_isms,g_strhg,g_etotsh,
74 . g_bpreld,g_aburn,g_mu,g_planl,g_epsdnl,g_dt_piter_old,g_dt_piter,g_tempg,
75 . g_cor_nf,g_cor_fr,g_cor_xr,g_defini,g_forini,g_idt_tsh,
76 . g_forpgpinch,g_mompgpinch,g_epgpinchxz,g_epgpinchyz,g_epgpinchzz,
77 . g_skew_id,g_maxfrac,g_maxeps,g_gama_r,g_slipring_id,g_slipring_strand,g_retractor_id,
78 . g_ringslip,g_add_node,g_update,g_intvar,g_betaorth,g_amu,g_slipring_fram_id,g_fram_factor,
79 . g_tm_yield,g_tm_seq,g_tm_eint,g_tm_dmg,g_tm_sig,g_tm_stra,g_tsaiwu,g_dmgscl,
80 . g_sh_ioffset,g_eint_distor,g_for_g,g_forpg_g,g_yield_in_comp,g_xxold_in_comp
82 . ly_dmg,ly_gama,ly_dira,ly_dirb,ly_crkdir,ly_plapt,ly_sigpt,
83 . ly_hourg,ly_uelr,ly_uelr1,ly_offpg,ly_off,len_plapt,len_sigpt
87 . l_off,l_eint,l_eins,l_rho,l_dp_drho,l_vol,l_pla,l_tb,l_temp,
88 . l_rk,l_re,l_vk,l_rob,l_sig,l_sigv,l_sigl,l_siga,l_sigd,l_sigb,
89 . l_sigc,l_sigf,l_stra,l_epsa,l_epsd,l_epsq,l_epsf,l_crak,l_wpla,
90 . l_ang,l_sf,l_qvis,l_deltax,l_gama,
91 . l_dam,l_dsum,l_dglo,l_epe,l_epc,l_xst,l_ssp,l_z,l_frac,l_visc,
92 . l_thk,l_for,l_mom,l_eps,l_smstr,l_bfrac,l_dmg,l_forth,l_eintth,
93 . l_seq,l_sigply,l_jac_i,l_fac_yld,l_aburn,l_mu,l_planl,l_epsdnl,
94 . l_dmgscl,l_pij,l_vol0dp,l_tsaiwu
96 . lf_dam,lf_dammx,lf_damini,lf_tdel,lf_indx,lf_off
98 .
ALLOCATABLE,
DIMENSION(:) :: rbuf
100 DOUBLE PRECISION,
DIMENSION(:),
ALLOCATABLE :: DP_RBUF
104 TYPE(buf_mat_) ,
POINTER :: MATBUF
105 TYPE(buf_lay_) ,
POINTER :: BUFLY
106 TYPE(buf_intlay_) ,
POINTER :: INTLAY
107 TYPE(buf_visc_) ,
POINTER :: VISCBUF
108 TYPE(g_bufel_) ,
POINTER :: GBUF
109 TYPE(l_bufel_) ,
POINTER :: LBUF
110 TYPE(buf_intloc_) ,
POINTER :: ILBUF
111 TYPE(fail_loc_) ,
POINTER :: FLOC
112 TYPE(buf_nloc_) ,
POINTER :: BUFNL
113 TYPE(buf_nlocts_) ,
POINTER :: BUFNLTS
114 TYPE(l_bufel_dir_) ,
POINTER :: LBUF_DIR
126 ALLOCATE (rbuf(buflen) ,stat=err)
135 igtyp = nint(rbuf(iad+1))
137 nel = nint(rbuf(iad+1))
139 nlay = nint(rbuf(iad+1))
141 nintlay= nint(rbuf(iad+1))
143 nptr = nint(rbuf(iad+1))
145 npts = nint(rbuf(iad+1))
147 nptt = nint(rbuf(iad+1))
149 ixfem = nint(rbuf(iad+1))
151 nxel = nint(rbuf(iad+1))
153 idrape = nint(rbuf(iad+1))
156 elbuf_str%IGTYP = igtyp
158 elbuf_str%NLAY = nlay
159 elbuf_str%NINTLAY= nintlay
160 elbuf_str%NPTR = nptr
161 elbuf_str%NPTS = npts
162 elbuf_str%NPTT = nptt
163 elbuf_str%IXFEM = ixfem
164 elbuf_str%NXEL = nxel
165 elbuf_str%IDRAPE = idrape
166 npt = nlay*nptr*npts*nptt
168 IF (ity == 1 .OR. ity == 2 .OR. ity == 51 .OR. ity == 101)
THEN
171 ELSEIF (ity == 3 .OR. ity == 7)
THEN
174 ELSEIF (ity == 4 .OR. ity == 5 .OR. ity == 6 .OR. ity == 100)
THEN
181 IF (idamp_freq_range > 0)
THEN
182 ALLOCATE (elbuf_str%DAMP_RANGE%ALPHA(3) ,stat=err)
183 ALLOCATE (elbuf_str%DAMP_RANGE%TAU(3) ,stat=err)
184 elbuf_str%DAMP_RANGE%ALPHA(1:3) = rbuf(iad+1:iad+3)
185 elbuf_str%DAMP_RANGE%TAU(1:3) = rbuf(iad+4:iad+6)
189 ALLOCATE (elbuf_str%BUFLY(nlay) ,stat=err)
203 ALLOCATE (elbuf_str%INTLAY(nintlay) ,stat=err)
205 ALLOCATE(elbuf_str%INTLAY(il)%ILBUF(nptr,npts),stat=err)
206 ALLOCATE(elbuf_str%INTLAY(il)%MAT (nptr,npts),stat=err)
207 ALLOCATE(elbuf_str%INTLAY(il)%FAIL (nptr,npts),stat=err)
212 DO il = 1,elbuf_str%NLAY
215 bufly => elbuf_str%BUFLY(il)
219 imat = nint(rbuf(iad+1))
221 ieos = nint(rbuf(iad+1))
223 ivisc = nint(rbuf(iad+1))
225 iporo = nint(rbuf(iad+1))
227 nfail = nint(rbuf(iad+1))
229 nvar_mat = nint(rbuf(iad+1))
231 nvar_eos = nint(rbuf(iad+1))
233 nvartmp = nint(rbuf(iad+1))
235 nvartmp_eos = nint(rbuf(iad+1))
237 nvar_visc= nint(rbuf(iad+1))
239 nvar_loc = nint(rbuf(iad+1))
241 nvar_lay = nint(rbuf(iad+1))
243 nptt = nint(rbuf(iad+1))
248 ly_dmg = nint(rbuf(iad+1))
250 ly_gama = nint(rbuf(iad+1))
252 ly_dira = nint(rbuf(iad+1))
254 ly_dirb = nint(rbuf(iad+1))
258 ly_plapt = nint(rbuf(iad+1))
260 ly_sigpt = nint(rbuf(iad
262 ly_hourg = nint(rbuf(iad+1))
266 ly_uelr1 = nint(rbuf(iad+1))
268 ly_offpg = nint(rbuf(iad+1))
270 ly_off = nint(rbuf(iad+1))
275 l_off = nint(rbuf(iad+1))
277 l_gama = nint(rbuf(iad+1))
279 l_stra = nint(rbuf(iad+1))
281 l_frac = nint(rbuf(iad+1))
283 l_bfrac = nint(rbuf(iad+1))
285 l_eint = nint(rbuf(iad+1))
287 l_eins = nint(rbuf(iad+1))
289 l_rho = nint(rbuf(iad+1))
291 l_dp_drho= nint(rbuf(iad+1))
293 l_qvis = nint(rbuf(iad+1))
295 l_deltax = nint(rbuf(iad+1))
297 l_vol = nint(rbuf(iad+1))
299 l_epsa = nint(rbuf(iad+1))
301 l_epsd = nint(rbuf(iad+1))
303 l_epsq = nint(rbuf(iad+1))
305 l_epsf = nint(rbuf(iad+1))
307 l_pla = nint(rbuf(iad+1))
309 l_wpla = nint(rbuf(iad+1))
311 l_temp = nint(rbuf(iad+1))
313 l_tb = nint(rbuf(iad+1))
315 l_rk = nint(rbuf(iad+1))
317 l_re = nint(rbuf(iad+1))
319 l_vk = nint(rbuf(iad+1))
321 l_sf = nint(rbuf(iad+1))
323 l_rob = nint(rbuf(iad+1))
325 l_dam = nint(rbuf(iad+1))
327 l_dsum = nint(rbuf(iad+1))
329 l_dglo = nint(rbuf(iad+1))
331 l_crak = nint(rbuf(iad+1))
333 l_ang = nint(rbuf(iad+1))
335 l_epe = nint(rbuf(iad+1))
337 l_epc = nint(rbuf(iad+1))
339 l_xst = nint(rbuf(iad+1))
341 l_ssp = nint(rbuf(iad+1))
343 l_z = nint(rbuf(iad+1))
345 l_visc = nint(rbuf(iad+1))
347 l_sigl = nint(rbuf(iad+1))
349 l_sigv = nint(rbuf(iad+1))
351 l_siga = nint(rbuf(iad+1))
353 l_sigb = nint(rbuf(iad+1))
355 l_sigc = nint(rbuf(iad+1))
357 l_sigd = nint(rbuf(iad+1))
359 l_sigf = nint(rbuf(iad+1))
361 l_sig = nint(rbuf(iad+1))
363 l_sigply = nint(rbuf(iad+1))
365 l_for = nint(rbuf(iad+1))
367 l_mom = nint(rbuf(iad+1))
369 l_thk = nint(rbuf(iad+1))
371 l_smstr = nint(rbuf(iad+1))
373 l_dmg = nint(rbuf(iad+1))
375 l_forth = nint(rbuf(iad
377 l_eintth = nint(rbuf(iad+1))
379 l_seq = nint(rbuf(iad+1))
381 l_jac_i = nint(rbuf(iad+1))
383 l_fac_yld = nint(rbuf(iad+1))
385 l_aburn = nint(rbuf(iad+1))
387 l_mu = nint(rbuf(iad+1))
389 l_planl = nint(rbuf(iad+1))
391 l_epsdnl = nint(rbuf(iad+1))
393 l_dmgscl = nint(rbuf(iad+1))
395 l_tsaiwu = nint(rbuf(iad+1))
397 l_pij = nint(rbuf(iad+1))
399 l_vol0dp = nint(rbuf(iad+1))
409 bufly%NVAR_MAT = nvar_mat
410 bufly%NVAR_EOS = nvar_eos
411 bufly%NVARTMP = nvartmp
412 bufly%NVARTMP_EOS = nvartmp_eos
413 bufly%NVAR_VISC = nvar_visc
414 bufly%NVAR_LOC = nvar_loc
415 bufly%NVAR_LAY = nvar_lay
418 bufly%LY_DMG = ly_dmg
419 bufly%LY_GAMA = ly_gama
420 bufly%LY_DIRA = ly_dira
421 bufly%LY_DIRB = ly_dirb
422 bufly%LY_CRKDIR = ly_crkdir
423 bufly%LY_PLAPT = ly_plapt
424 bufly%LY_SIGPT = ly_sigpt
425 bufly%LY_HOURG = ly_hourg
426 bufly%LY_UELR = ly_uelr
427 bufly%LY_UELR1 = ly_uelr1
428 bufly%LY_OFFPG = ly_offpg
429 bufly%LY_OFF = ly_off
432 bufly%L_GAMA = l_gama
433 bufly%L_STRA = l_stra
434 bufly%L_FRAC = l_frac
435 bufly%L_BFRAC = l_bfrac
436 bufly%L_EINT = l_eint
437 bufly%L_EINS = l_eins
439 bufly%L_DP_DRHO = l_dp_drho
440 bufly%L_QVIS = l_qvis
441 bufly%L_DELTAX = l_deltax
443 bufly%L_EPSA = l_epsa
444 bufly%L_EPSD = l_epsd
445 bufly%L_EPSQ = l_epsq
446 bufly%L_EPSF = l_epsf
448 bufly%L_WPLA = l_wpla
449 bufly%L_TEMP = l_temp
457 bufly%L_DSUM = l_dsum
458 bufly%L_DGLO = l_dglo
459 bufly%L_CRAK = l_crak
466 bufly%L_VISC = l_visc
467 bufly%L_SIGL = l_sigl
468 bufly%L_SIGV = l_sigv
469 bufly%L_SIGA = l_siga
470 bufly%L_SIGB = l_sigb
471 bufly%L_SIGC = l_sigc
472 bufly%L_SIGD = l_sigd
473 bufly%L_SIGF = l_sigf
475 bufly%L_SIGPLY = l_sigply
479 bufly%L_SMSTR = l_smstr
481 bufly%L_FORTH = l_forth
482 bufly%L_EINTTH = l_eintth
484 bufly%L_JAC_I = l_jac_i
485 bufly%L_FAC_YLD = l_fac_yld
486 bufly%L_ABURN = l_aburn
488 bufly%L_PLANL = l_planl
489 bufly%L_EPSDNL = l_epsdnl
490 bufly%L_DMGSCL = l_dmgscl
491 bufly%L_TSAIWU = l_tsaiwu
493 bufly%L_VOL0DP = l_vol0dp
495 IF (igtyp == 51 .OR. igtyp == 52)
THEN
496 nptt = elbuf_str%BUFLY(il)%NPTT
498 nptt = elbuf_str%NPTT
500 ALLOCATE(elbuf_str%BUFLY(il)%LBUF(nptr,npts,nptt),stat=err)
501 ALLOCATE(elbuf_str%BUFLY(il)%MAT (nptr,npts,nptt),stat=err)
502 ALLOCATE(elbuf_str%BUFLY(il)%FAIL(nptr,npts,nptt),stat=err)
503 ALLOCATE(elbuf_str%BUFLY(il)%PROP(nptr,npts,nptt),stat=err)
504 ALLOCATE(elbuf_str%BUFLY(il)%EOS (nptr,npts,nptt),stat=err)
505 ALLOCATE(elbuf_str%BUFLY(il)%VISC(nptr,npts,nptt),stat=err
506 ALLOCATE(elbuf_str%BUFLY(il)%PORO(nptr,npts,nptt),stat=err)
507 IF(idrape > 0 .AND. (igtyp == 51 .OR. igtyp ==52))
508 .
ALLOCATE(elbuf_str%BUFLY(il)%LBUF_DIR(nptt),stat=err)
513 ALLOCATE(bufly%DMG (nel*ly_dmg) ,stat=err)
514 bufly%DMG(1:nel*ly_dmg) = rbuf(iad+1:iad+nel*ly_dmg)
516 ALLOCATE(bufly%GAMA (nel*ly_gama) ,stat=err)
517 bufly%GAMA(1:nel*ly_gama) = rbuf(iad+1:iad+nel*ly_gama)
518 iad = iad+nel*ly_gama
519 IF(idrape == 0 .OR. (idrape > 0 .AND. igtyp == 17))
THEN
520 ALLOCATE(bufly%DIRA (nel*ly_dira) ,stat=err)
521 bufly%DIRA(1:nel*ly_dira) = rbuf(iad+1:iad+nel*ly_dira)
522 iad = iad+nel*ly_dira
523 ALLOCATE(bufly%DIRB (nel*ly_dirb) ,stat=err)
524 bufly%DIRB(1:nel*ly_dirb) = rbuf(iad+1:iad+nel*ly_dirb)
525 iad = iad+nel*ly_dirb
527 ALLOCATE(bufly%CRKDIR(nel*ly_crkdir) ,stat=err)
528 bufly%CRKDIR(1:nel*ly_crkdir) = rbuf(iad+1:iad+nel*ly_crkdir)
529 iad = iad+nel*ly_crkdir
533 len_plapt = nel*ly_plapt
534 len_sigpt = nel*ly_sigpt
536 IF (igtyp /= 51 .AND. igtyp /= 52)
THEN
537 len_plapt = nel*ly_plapt*npt
538 len_sigpt = nel*ly_sigpt*npt
540 len_plapt = nel*ly_plapt*bufly%NPTT
541 len_sigpt = nel*ly_sigpt*bufly%NPTT
544 ALLOCATE(bufly%PLAPT(len_plapt) ,stat=err)
545 bufly%PLAPT(1:len_plapt) = rbuf(iad+1:iad+len_plapt)
546 iad = iad + len_plapt
547 ALLOCATE(bufly%SIGPT (len_sigpt) ,stat=err)
548 bufly%SIGPT(1:len_sigpt) = rbuf(iad+1:iad+len_sigpt)
550 ELSEIF (npg == 1)
THEN
555 ALLOCATE(bufly%HOURG(nel*ly_hourg) ,stat=err)
556 bufly%HOURG(1:nel*ly_hourg) = rbuf(iad+1:iad+nel*ly_hourg)
557 iad = iad+nel*ly_hourg
558 ALLOCATE(bufly%UELR(nel*ly_uelr) ,stat=err)
559 bufly%UELR(1:nel*ly_uelr) = rbuf(iad+1:iad+nel*ly_uelr)
560 iad = iad+nel*ly_uelr
561 ALLOCATE(bufly%UELR1(nel*ly_uelr1) ,stat=err)
562 bufly%UELR1(1:nel*ly_uelr1) = rbuf(iad+1:iad+nel*ly_uelr1)
563 iad = iad+nel*ly_uelr1
564 ALLOCATE(bufly%OFFPG(nel*ly_offpg) ,stat=err)
565 bufly%OFFPG(1:nel*ly_offpg) = rbuf(iad+1:iad+nel*ly_offpg)
566 iad = iad+nel*ly_offpg
567 ALLOCATE(bufly%OFF(nel*ly_off) ,stat=err)
568 bufly%OFF(1:nel*ly_off) = rbuf(iad+1:iad+nel*ly_off)
576 lbuf => elbuf_str%BUFLY(il)%LBUF(ir,is,it)
578 lbuf%MLAW = nint(rbuf(iad+1))
580 lbuf%lawID = nint(rbuf(iad+1))
583 ALLOCATE(lbuf%OFF (nel*l_off) ,stat=err)
584 lbuf%OFF(1:nel*l_off) = rbuf(iad+1:iad+nel*l_off)
586 ALLOCATE (lbuf%GAMA(nel*l_gama)
587 lbuf%GAMA(1:nel*l_gama) = rbuf(iad+1:iad+nel*l_gama)
589 ALLOCATE(lbuf%STRA (nel*l_stra) ,stat
590 lbuf%STRA(1:nel*l_stra) = rbuf(iad+1:iad+nel*l_stra)
592 ALLOCATE(lbuf%FRAC (nel*l_frac) ,stat
593 lbuf%FRAC(1:nel*l_frac) = rbuf(iad+1:iad+nel*l_frac)
595 ALLOCATE(lbuf%BFRAC(nel*l_bfrac) ,stat=err)
596 lbuf%BFRAC(1:nel*l_bfrac) = rbuf(iad+1:iad+nel*l_bfrac)
597 iad = iad+nel*l_bfrac
598 ALLOCATE(lbuf%EINT(nel*l_eint) ,stat=err)
599 lbuf%EINT(1:nel*l_eint) = rbuf(iad+1:iad+nel*l_eint)
601 ALLOCATE(lbuf%EINS(nel*l_eins) ,stat=err)
602 lbuf%EINS(1:nel*l_eins) = rbuf(iad+1:iad+nel*l_eins
604 ALLOCATE(lbuf%RHO(nel*l_rho) ,stat=err)
605 lbuf%RHO(1:nel*l_rho) = rbuf(iad+1:iad+nel*l_rho)
607 ALLOCATE(lbuf%DP_DRHO(nel*l_dp_drho) ,stat=err)
608 lbuf%DP_DRHO(1:nel*l_dp_drho)=rbuf(iad+1:iad+nel*l_dp_drho)
609 iad = iad+nel*l_dp_drho
610 ALLOCATE(lbuf%QVIS(nel*l_qvis) ,stat=err)
611 lbuf%QVIS(1:nel*l_qvis) = rbuf(iad+1:iad+nel*l_qvis)
613 ALLOCATE(lbuf%DELTAX(nel*l_deltax),stat=err)
614 lbuf%DELTAX(1:nel*l_deltax)=rbuf(iad+1:iad+nel*l_deltax)
615 iad = iad+nel*l_deltax
616 ALLOCATE(lbuf%VOL (nel*l_vol) ,stat=err)
617 lbuf%VOL(1:nel*l_vol) = rbuf(iad+1:iad+nel*l_vol)
619 ALLOCATE(lbuf%EPSA (nel*l_epsa) ,stat=err)
620 lbuf%EPSA(1:nel*l_epsa) = rbuf(iad+1:iad+nel*l_epsa)
622 ALLOCATE(lbuf%EPSD (nel*l_epsd) ,stat=err)
623 lbuf%EPSD(1:nel*l_epsd) = rbuf(iad+1:iad+nel*l_epsd)
625 ALLOCATE(lbuf%EPSQ (nel*l_epsq) ,stat=err)
626 lbuf%EPSQ(1:nel*l_epsq) = rbuf(iad+1:iad+nel*l_epsq)
628 ALLOCATE(lbuf%EPSF (nel*l_epsf) ,stat=err)
629 lbuf%EPSF(1:nel*l_epsf) = rbuf(iad+1:iad+nel*l_epsf)
631 ALLOCATE(lbuf%PLA (nel*l_pla) ,stat=err)
632 lbuf%PLA(1:nel*l_pla) = rbuf(iad+1:iad+nel*l_pla)
634 ALLOCATE(lbuf%WPLA (nel*l_wpla) ,stat=err)
635 lbuf%WPLA(1:nel*l_wpla) = rbuf(iad+1:iad+nel*l_wpla)
637 ALLOCATE(lbuf%TEMP(nel*l_temp), stat=err)
638 lbuf%TEMP(1:nel*l_temp) = rbuf(iad+1:iad+nel*l_temp)
640 ALLOCATE(lbuf%TB(nel*l_tb), stat=err)
643 ALLOCATE(lbuf%RK(nel*l_rk), stat=err)
644 lbuf%RK(1:nel*l_rk) = rbuf(iad+1:iad+nel*l_rk)
646 ALLOCATE(lbuf%RE(nel*l_re), stat=err)
647 lbuf%RE(1:nel*l_re) = rbuf(iad+1:iad+nel*l_re)
649 ALLOCATE(lbuf%VK(nel*l_vk), stat=err)
650 lbuf%VK(1:nel*l_vk) = rbuf(iad+1:iad+nel*l_vk)
652 ALLOCATE(lbuf%SF(nel*l_sf), stat=err)
653 lbuf%SF(1:nel*l_sf) = rbuf(iad+1:iad+nel*l_sf)
655 ALLOCATE(lbuf%ROB(nel*l_rob), stat=err)
656 lbuf%ROB(1:nel*l_rob) = rbuf(iad+1:iad+nel*l_rob)
658 ALLOCATE(lbuf%DAM (nel*l_dam) ,stat=err)
659 lbuf%DAM(1:nel*l_dam) = rbuf(iad+1:iad+nel*l_dam)
661 ALLOCATE(lbuf%DSUM (nel*l_dsum) ,stat=err)
662 lbuf%DSUM(1:nel*l_dsum) = rbuf(iad+1:iad+nel*l_dsum)
664 ALLOCATE(lbuf%DGLO (nel*l_dglo) ,stat=err)
665 lbuf%DGLO(1:nel*l_dglo) = rbuf(iad+1:iad+nel*l_dglo)
667 ALLOCATE(lbuf%CRAK (nel*l_crak) ,stat=err)
668 lbuf%CRAK(1:nel*l_crak) = rbuf(iad+1:iad+nel
670 ALLOCATE(lbuf%ANG (nel*l_ang) ,stat=err)
671 lbuf%ANG(1:nel*l_ang) = rbuf(iad+1:iad+nel*l_ang
673 ALLOCATE(lbuf%EPE (nel*l_epe) ,stat=err)
674 lbuf%EPE(1:nel*l_epe) = rbuf(iad+1:iad+nel*l_epe)
676 ALLOCATE(lbuf%EPC (nel*l_epc) ,stat=err)
677 lbuf%EPC(1:nel*l_epc) = rbuf(iad+1:iad+nel*l_epc)
679 ALLOCATE(lbuf%XST (nel*l_xst) ,stat=err)
680 lbuf%XST(1:nel*l_xst) = rbuf(iad+1:iad+nel*l_xst)
682 ALLOCATE(lbuf%SSP (nel*l_ssp) ,stat=err)
683 lbuf%SSP(1:nel*l_ssp) = rbuf(iad+1:iad+nel*l_ssp)
685 ALLOCATE(lbuf%Z (nel*l_z) ,stat=err)
686 lbuf%Z(1:nel*l_z) = rbuf(iad+1:iad+nel*l_z)
688 ALLOCATE(lbuf%VISC (nel*l_visc) ,stat=err)
689 lbuf%VISC(1:nel*l_visc) = rbuf(iad+1:iad+nel*l_visc)
691 ALLOCATE(lbuf%SIGL (nel*l_sigl) ,stat=err)
692 lbuf%SIGL(1:nel*l_sigl) = rbuf(iad+1:iad+nel*l_sigl)
694 ALLOCATE(lbuf%SIGV (nel*l_sigv) ,stat=err)
695 lbuf%SIGV(1:nel*l_sigv) = rbuf(iad+1:iad+nel*l_sigv)
697 ALLOCATE(lbuf%SIGA (nel*l_siga) ,stat=err)
698 lbuf%SIGA(1:nel*l_siga) = rbuf(iad+1:iad+nel*l_siga)
700 ALLOCATE(lbuf%SIGB (nel*l_sigb) ,stat
701 lbuf%SIGB(1:nel*l_sigb) = rbuf(iad+1:iad+nel*l_sigb)
703 ALLOCATE(lbuf%SIGC (nel*l_sigc) ,stat=err)
704 lbuf%SIGC(1:nel*l_sigc) = rbuf(iad+1:iad+nel*l_sigc)
706 ALLOCATE(lbuf%SIGD (nel*l_sigd) ,stat=err)
707 lbuf%SIGD(1:nel*l_sigd) = rbuf(iad+1:iad+nel*l_sigd)
709 ALLOCATE(lbuf%SIGF (nel*l_sigf) ,stat=err)
710 lbuf%SIGF(1:nel*l_sigf) = rbuf(iad+1:iad+nel*l_sigf)
712 ALLOCATE(lbuf%SIG(nel*l_sig) ,stat=err)
713 lbuf%SIG(1:nel*l_sig) = rbuf(iad+1:iad+nel*l_sig)
715 ALLOCATE(lbuf%SIGPLY(nel*l_sigply) ,stat=err)
716 lbuf%SIGPLY(1:nel*l_sigply) = rbuf(iad+1:iad+nel*l_sigply)
717 iad = iad+nel*l_sigply
718 ALLOCATE(lbuf%FOR (nel*l_for) ,stat=err)
719 lbuf%FOR(1:nel*l_for) = rbuf(iad+1:iad+nel*l_for)
721 ALLOCATE(lbuf%MOM (nel*l_mom) ,stat=err)
722 lbuf%MOM(1:nel*l_mom) = rbuf(iad+1:iad+nel*l_mom)
724 ALLOCATE(lbuf%THK (nel*l_thk) ,stat=err)
725 lbuf%THK(1:nel*l_thk) = rbuf(iad+1:iad+nel*l_thk)
727 ALLOCATE(lbuf%SMSTR (nel*l_smstr) ,stat=err)
728 lbuf%SMSTR(1:nel*l_smstr) = rbuf(iad+1:iad+nel*l_smstr)
729 iad = iad+nel*l_smstr
730 ALLOCATE(lbuf%DMG (nel*l_dmg) ,stat=err)
731 lbuf%DMG(1:nel*l_dmg) = rbuf(iad+1:iad+nel*l_dmg)
733 ALLOCATE(lbuf%FORTH (nel*l_forth) ,stat=err)
734 lbuf%FORTH(1:nel*l_forth) = rbuf(iad+1:iad+nel*l_forth)
735 iad = iad+nel*l_forth
736 ALLOCATE(lbuf%EINTTH (nel*l_eintth) ,stat=err)
737 lbuf%EINTTH(1:nel*l_eintth) = rbuf(iad+1:iad+nel*l_eintth)
738 iad = iad+nel*l_eintth
739 ALLOCATE(lbuf%SEQ (nel*l_seq) ,stat=err)
740 lbuf%SEQ(1:nel*l_seq) = rbuf(iad+1:iad+nel*l_seq)
742 ALLOCATE(lbuf%JAC_I (nel*l_jac_i) ,stat=err)
743 lbuf%JAC_I(1:nel*l_jac_i) = rbuf(iad+1:iad+nel*l_jac_i)
744 iad = iad+nel*l_jac_i
745 ALLOCATE(lbuf%FAC_YLD (nel*l_fac_yld) ,stat=err)
746 lbuf%FAC_YLD(1:nel*l_fac_yld) = rbuf(iad+1:iad+nel*l_fac_yld)
747 iad = iad+nel*l_fac_yld
748 ALLOCATE(lbuf%ABURN(nel*l_aburn) ,stat=err)
749 lbuf%ABURN(1:nel*l_aburn) = rbuf(iad+1:iad+nel*l_aburn)
750 iad = iad+nel*l_aburn
751 ALLOCATE(lbuf%MU(nel*l_mu) ,stat=err)
752 lbuf%MU(1:nel*l_mu) = rbuf(iad+1:iad+nel*l_mu)
754 ALLOCATE(lbuf%PLANL(nel*l_planl) ,stat=err)
755 lbuf%PLANL(1:nel*l_planl) = rbuf(iad+1:iad+nel*l_planl)
756 iad = iad+nel*l_planl
757 ALLOCATE(lbuf%EPSDNL(nel*l_epsdnl) ,stat=err)
758 lbuf%EPSDNL(1:nel*l_epsdnl) = rbuf(iad+1:iad+nel*l_epsdnl)
759 iad = iad+nel*l_epsdnl
760 ALLOCATE(lbuf%DMGSCL(nel*l_dmgscl) ,stat=err)
761 lbuf%DMGSCL(1:nel*l_dmgscl) = rbuf(iad+1:iad+nel*l_dmgscl)
762 iad = iad+nel*l_dmgscl
763 ALLOCATE(lbuf%TSAIWU(nel*l_tsaiwu) ,stat=err)
764 lbuf%TSAIWU(1:nel*l_tsaiwu) = rbuf(iad+1:iad+nel*l_tsaiwu)
765 iad = iad+nel*l_tsaiwu
766 ALLOCATE(lbuf%PIJ(nel*l_pij) ,stat=err)
767 lbuf%PIJ(1:nel*l_pij) = rbuf(iad+1:iad+nel*l_pij)
770 ALLOCATE(lbuf%VOL0DP(nel*l_vol0dp) ,stat=err)
771 sdp_rbuf = sdp_rbuf + nel*l_vol0dp
776 IF(idrape > 0 .AND. (igtyp == 51 .OR. igtyp == 52))
THEN
778 lbuf_dir => elbuf_str%BUFLY(il)%LBUF_DIR(it)
779 ALLOCATE(lbuf_dir%DIRA(nel*ly_dira))
780 lbuf_dir%DIRA(1:nel*ly_dira) = rbuf(iad+1:iad+nel*ly_dira)
781 iad = iad + nel*ly_dira
782 ALLOCATE(lbuf_dir%DIRB(nel*ly_dirb))
783 lbuf_dir%DIRB(1:nel*ly_dirb) = rbuf(iad+1:iad+nel*ly_dirb)
784 iad = iad + nel*ly_dirb
789 IF (ity == 3 .OR. ity == 7)
THEN
791 DO il=1,elbuf_str%NLAY
792 npttot = npttot + npg*elbuf_str%BUFLY(il)%NPTT
794 IF (npt == 0) npttot = npt
802 bufly => elbuf_str%BUFLY(il)
803 IF (igtyp == 51 .OR. igtyp == 52)
THEN
806 lbuf => bufly%LBUF(1,1,it)
807 bufly%PLAPT(1:nel*ly_plapt) => lbuf%PLA(1:nel*l_pla)
808 bufly%SIGPT(1:nel*ly_sigpt) => lbuf%SIG(1:nel*l_sig)
811 lbuf => bufly%LBUF(1,1,1)
812 bufly%PLAPT(1:nel*ly_plapt) => lbuf%PLA(1:nel*l_pla)
813 bufly%SIGPT(1:nel*ly_sigpt) => lbuf%SIG(1:nel*l_sig)
817 bufly => elbuf_str%BUFLY(1)
818 IF (igtyp == 51 .OR. igtyp == 52)
THEN
821 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,ipt)
822 len1 = 1+(ipt-1)*nel*ly_plapt
823 len2 = ipt*nel*ly_plapt
824 bufly%PLAPT(len1:len2) => lbuf%PLA(1:nel*l_pla)
825 len1 = 1+(ipt-1)*nel*ly_sigpt
826 len2 = ipt*nel*ly_sigpt
827 bufly%SIGPT(len1:len2) => lbuf%SIG(1:nel*l_sig)
831 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,ipt)
832 len1 = 1+(ipt-1)*nel*ly_plapt
833 len2 = ipt*nel*ly_plapt
834 bufly%PLAPT(len1:len2) => lbuf%PLA(1:nel*l_pla)
836 len2 = ipt*nel*ly_sigpt
837 bufly%SIGPT(len1:len2) => lbuf%SIG(1:nel*l_sig)
846 nuvar = elbuf_str%BUFLY(il)%NVAR_MAT
847 nvartmp = elbuf_str%BUFLY(il)%NVARTMP
848 IF (igtyp == 51 .OR. igtyp == 52)
THEN
849 nptt = elbuf_str%BUFLY(il)%NPTT
851 nptt = elbuf_str%NPTT
856 matbuf => elbuf_str%BUFLY(il)%MAT(ir,is,it)
857 ALLOCATE (matbuf%VAR(nel*nuvar), stat=err)
858 matbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
860 ALLOCATE (matbuf%VARTMP(nel*nvartmp), stat=err)
861 matbuf%VARTMP(1:nel*nvartmp) = 0
871 nfail = elbuf_str% BUFLY(il)%NFAIL
872 IF (igtyp == 51 .OR. igtyp == 52)
THEN
873 nptt = elbuf_str%BUFLY(il)%NPTT
875 nptt = elbuf_str%NPTT
880 ALLOCATE (elbuf_str%BUFLY(il)%FAIL(ir,is,it)%FLOC(nfail)
883 floc=>elbuf_str%BUFLY(il)%FAIL(ir,is,it)%FLOC(k)
885 floc%ILAWF = rbuf(iad+1)
888 floc%IDFAIL = rbuf(iad+1)
899 lf_dammx = rbuf(iad+1)
901 floc%LF_DAMMX = lf_dammx
903 lf_damini = rbuf(iad+1)
905 floc%LF_DAMINI = lf_damini
907 lf_tdel = rbuf(iad+1)
909 floc%LF_TDEL = lf_tdel
911 lf_indx = rbuf(iad+1)
913 floc%LF_INDX = lf_indx
919 ALLOCATE(floc%VAR(nel*nuvar), stat=err)
920 floc%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
921 iad = iad + nel*nuvar
923 ALLOCATE(floc%DAM(nel*lf_dam), stat=err)
924 floc%DAM(1:nel*lf_dam) = rbuf(iad+1:iad+nel*lf_dam)
925 iad = iad + nel*lf_dam
927 ALLOCATE(floc%DAMMX(nel*lf_dammx), stat=err)
928 floc%DAMMX(1:nel*lf_dammx) = rbuf(iad+1:iad+nel*lf_dammx)
929 iad = iad + nel*lf_dammx
931 ALLOCATE(floc%DAMINI(nel*lf_damini), stat=err)
932 floc%DAMINI(1:nel*lf_damini) = rbuf(iad+1:iad+nel*lf_damini
933 iad = iad + nel*lf_damini
935 ALLOCATE(floc%TDEL(nel*lf_tdel), stat=err)
936 floc%TDEL(1:nel*lf_tdel) = rbuf(iad+1:iad+nel
937 iad = iad + nel*lf_tdel
939 ALLOCATE(floc%INDX(nel*lf_indx), stat=err)
940 floc%INDX(1:nel*lf_indx) = rbuf(iad+1:iad+nel*lf_indx)
941 iad = iad + nel*lf_indx
943 ALLOCATE(floc%OFF(nel*lf_off), stat=err)
944 floc%OFF(1:nel*lf_off) = rbuf(iad+1:iad+nel*lf_off)
945 iad = iad + nel*lf_off
955 nuvar = elbuf_str% BUFLY(il)%NVAR_VISC
956 IF (igtyp == 51 .OR. igtyp == 52)
THEN
957 nptt = elbuf_str%BUFLY(il)%NPTT
959 nptt = elbuf_str%NPTT
964 viscbuf => elbuf_str%BUFLY(il)%VISC(ir,is,it)
965 ALLOCATE (viscbuf%VAR(nel*nuvar), stat=err)
966 viscbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
976 IF ((ity==3).OR.(ity==7))
THEN
978 nptt = elbuf_str%NPTT
979 IF ((inloc>0).AND.(nptt>1))
THEN
981 ALLOCATE(elbuf_str%NLOC(nptr,npts), stat=err)
992 bufnl => elbuf_str%NLOC(ir,is)
994 ALLOCATE(bufnl%MASSTH(nel,nonl), stat=err)
996 ALLOCATE(bufnl%UNLTH(nel,nonl) , stat=err)
998 ALLOCATE(bufnl%VNLTH(nel,nonl) , stat=err)
1000 ALLOCATE(bufnl%FNLTH(nel,nonl) , stat=err)
1004 bufnl%MASSTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1007 bufnl%UNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1010 bufnl%VNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1013 bufnl%FNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1022 ELSEIF (ity==1)
THEN
1024 nptt = elbuf_str%NLAY
1025 IF ((inloc>0).AND.(nptt>1))
THEN
1027 ALLOCATE(elbuf_str%NLOCTS(nptr,npts), stat=err)
1034 bufnlts => elbuf_str%NLOCTS(ir,is)
1036 ALLOCATE(bufnlts%MASSTH(nel,nonl), stat=err)
1038 ALLOCATE(bufnlts%UNLTH(nel,nonl) , stat=err)
1040 ALLOCATE(bufnlts%VNLTH(nel,nonl) , stat=err)
1042 ALLOCATE(bufnlts%FNLTH(nel,nonl) , stat=err)
1046 bufnlts%MASSTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1049 bufnlts%UNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1052 bufnlts%VNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1054 ! non-local in-thickness forces
1055 bufnlts%FNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1066 nvar_eos = elbuf_str%BUFLY(il)%NVAR_EOS
1067 nvartmp_eos = elbuf_str%BUFLY(il)%NVARTMP_EOS
1068 IF (igtyp == 51 .OR. igtyp == 52)
THEN
1069 nptt = elbuf_str%BUFLY(il)%NPTT
1071 nptt = elbuf_str%NPTT
1076 ALLOCATE( elbuf_str%BUFLY(il)%EOS(ir,is,it)%VAR(nel*nvar_eos), stat=err)
1077 ALLOCATE( elbuf_str%BUFLY(il)%EOS(ir,is,it)%VARTMP(nel*nvartmp_eos), stat=err)
1078 elbuf_str%BUFLY(il)%EOS(ir,is,it)%VAR(1:nel*nvar_eos) = rbuf(iad+1:iad+nel*nvar_eos)
1079 elbuf_str%BUFLY(il)%EOS(ir,is,it)%VARTMP(1:nel*nvartmp_eos) = 0
1080 iad = iad+nel*nvar_eos
1094 DO il = 1,elbuf_str%NINTLAY
1095 intlay => elbuf_str%INTLAY(il)
1098 intlay%ILAW = nint(rbuf(iad+1))
1100 intlay%IMAT = nint(rbuf(iad+1))
1102 intlay%NFAIL = nint(rbuf(iad+1))
1104 intlay%NVAR_MAT = nint(rbuf(iad+1))
1106 intlay%NVARTMP = nint(rbuf(iad+1))
1111 ALLOCATE(intlay%EINT (nel*ly_eint) ,stat=err)
1112 intlay%EINT(1:nel*ly_eint) = rbuf(iad+1:iad+nel*ly_eint)
1113 iad = iad+nel*ly_eint
1114 ALLOCATE(intlay%COUNT (nel*ly_count) ,stat=err)
1115 intlay%COUNT(1:nel*ly_count) = rbuf(iad+1:iad+nel*ly_count)
1116 iad = iad+nel*ly_count
1120 ilbuf => elbuf_str%INTLAY(il)%ILBUF(ir,is)
1122 ALLOCATE(ilbuf%EPS (nel*l_eps) ,stat=err)
1123 ilbuf%EPS(1:nel*l_eps) = rbuf(iad+1:iad+nel*l_eps)
1125 ALLOCATE(ilbuf%SIG (nel*l_sig) ,stat=err)
1126 ilbuf%SIG(1:nel*l_sig) = rbuf(iad+1:iad+nel*l_sig)
1136 nuvar = elbuf_str%INTLAY(il)%NVAR_MAT
1137 nvartmp = elbuf_str%BUFLY(il)%NVARTMP
1140 matbuf => elbuf_str%INTLAY(il)%MAT(ir,is)
1141 ALLOCATE (matbuf%VAR(nel*nuvar), stat=err)
1142 matbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
1144 ALLOCATE (matbuf%VARTMP(nel*nvartmp), stat=err)
1145 matbuf%VARTMP(1:nel*nvartmp) = 0
1153 nfail = elbuf_str% INTLAY(il)%NFAIL
1156 ALLOCATE (elbuf_str%INTLAY(il)%FAIL(ir,is)%FLOC(nfail),
1159 floc=>elbuf_str%INTLAY(il)%FAIL(ir,is)%FLOC(k)
1160 floc%ILAWF = rbuf(iad+1)
1162 floc%IDFAIL = rbuf(iad+1)
1167 ALLOCATE(floc%VAR(nel*nuvar), stat=err)
1168 floc%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
1192 gbuf => elbuf_str%GBUF
1194 gbuf%NVAR_GLOB = nint(rbuf(iad+1))
1197 g_noff = nint(rbuf(iad+1))
1199 g_ierr = nint(rbuf(iad+1))
1201 g_off = nint(rbuf(iad+1))
1203 g_gama = nint(rbuf(iad+1))
1205 g_smstr= nint(rbuf(iad+1))
1207 g_hourg= nint(rbuf(iad+1))
1209 g_bfrac= nint(rbuf(iad+1))
1211 g_eint = nint(rbuf(iad+1))
1213 g_eins = nint(rbuf(iad+1))
1215 g_rho = nint(rbuf(iad+1))
1217 g_qvis = nint(rbuf(iad+1))
1219 g_deltax= nint(rbuf(iad+1))
1221 g_vol = nint(rbuf(iad+1))
1223 g_epsd = nint(rbuf(iad+1))
1225 g_epsq = nint(rbuf(iad+1))
1227 g_pla = nint(rbuf(iad+1))
1229 g_wpla = nint(rbuf(iad+1))
1231 g_temp = nint(rbuf(iad+1))
1233 g_tb = nint(rbuf(iad+1))
1235 g_rk = nint(rbuf(iad+1))
1237 g_re = nint(rbuf(iad+1))
1239 g_sig = nint(rbuf(iad+1))
1241 g_for = nint(rbuf(iad+1))
1243 g_mom = nint(rbuf(iad+1))
1245 g_thk = nint(rbuf(iad+1))
1247 g_tag22 = nint(rbuf(iad+1))
1249 g_stra = nint(rbuf(iad+1))
1251 g_sigi = nint(rbuf(iad+1))
1253 g_dmg = nint(rbuf(iad+1))
1255 g_forpg = nint(rbuf(iad+1))
1257 g_mompg = nint(rbuf(iad+1))
1260 g_forpgpinch = nint(rbuf(iad+1))
1262 g_mompgpinch = nint(rbuf(iad+1))
1264 g_epgpinchxz = nint(rbuf(iad+1))
1266 g_epgpinchyz = nint(rbuf(iad+1))
1268 g_epgpinchzz = nint(rbuf(iad+1))
1271 g_strpg = nint(rbuf(iad+1))
1273 g_uelr = nint(rbuf(iad+1))
1275 g_uelr1 = nint(rbuf(iad+1))
1277 g_damdl = nint(rbuf(iad+1))
1279 g_forth = nint(rbuf(iad+1))
1281 g_eintth = nint(rbuf(iad+1))
1283 g_fill = nint(rbuf(iad+1))
1285 g_seq = nint(rbuf(iad+1))
1287 g_strw = nint(rbuf(iad+1))
1289 g_strwpg = nint(rbuf(iad+1))
1291 g_thk_i = nint(rbuf(iad+1))
1293 g_jac_i = nint(rbuf(iad+1))
1295 g_dt = nint(rbuf(iad+1))
1297 g_isms = nint(rbuf(iad+1))
1299 g_bpreld = nint(rbuf(iad+1))
1301 g_aburn= nint(rbuf(iad+1))
1303 g_mu= nint(rbuf(iad+1))
1305 g_planl= nint(rbuf(iad+1))
1307 g_epsdnl= nint(rbuf(iad+1))
1309 g_dmgscl= nint(rbuf(iad+1))
1311 g_tsaiwu= nint(rbuf(iad+1))
1313 g_tempg = nint(rbuf(iad+1))
1315 g_cor_nf = nint(rbuf(iad+1))
1317 g_cor_fr = nint(rbuf(iad+1))
1319 g_cor_xr = nint(rbuf(iad+1))
1321 g_maxfrac= nint(rbuf(iad+1))
1323 g_maxeps= nint(rbuf(iad+1))
1325 g_betaorth = nint(rbuf(iad+1))
1327 g_amu= nint(rbuf(iad+1))
1329 g_sh_ioffset= nint(rbuf(iad+1))
1331 g_eint_distor= nint(rbuf(iad+1))
1333 g_for_g= nint(rbuf(iad+1))
1335 g_forpg_g= nint(rbuf(iad+1))
1340 g_area = nint(rbuf(iad+1))
1342 g_skew = nint(rbuf(iad+1))
1344 g_length = nint(rbuf(iad+1))
1346 g_totdepl = nint(rbuf(iad+1))
1348 g_totrot = nint(rbuf(iad+1))
1350 g_forep = nint(rbuf(iad+1))
1352 g_momep = nint(rbuf(iad+1))
1354 g_dep_in_tens = nint(rbuf(iad+1))
1356 g_dep_in_comp = nint(rbuf(iad+1))
1358 g_rot_in_tens = nint(rbuf(iad+1))
1360 g_rot_in_comp = nint(rbuf(iad+1))
1362 g_posx = nint(rbuf(iad+1))
1364 g_posy = nint(rbuf(iad+1))
1366 g_posz = nint(rbuf(iad+1))
1368 g_posxx = nint(rbuf(iad+1))
1370 g_posyy = nint(rbuf(iad+1))
1372 g_poszz = nint(rbuf(iad+1))
1374 g_yield = nint(rbuf(iad+1))
1376 g_length_err = nint(rbuf(iad+1))
1378 g_dv = nint(rbuf(iad+1))
1380 g_dfs = nint(rbuf(iad+1))
1382 g_skew_err = nint(rbuf(iad+1))
1384 g_e6 = nint(rbuf(iad+1))
1386 g_ruptcrit = nint(rbuf(iad+1))
1388 g_mass = nint(rbuf(iad+1))
1390 g_v_repcvt = nint(rbuf(iad+1))
1392 g_vr_repcvt = nint(rbuf(iad+1))
1394 g_nuvar = nint(rbuf(iad+1))
1396 g_nuvarn = nint(rbuf(iad+1))
1398 g_defini = nint(rbuf(iad+1))
1400 g_forini = nint(rbuf(iad+1))
1402 g_inifric = nint(rbuf(iad+1))
1404 g_strhg = nint(rbuf(iad+1))
1406 g_etotsh = nint(rbuf(iad+1))
1409 g_dt_piter_old = nint(rbuf(iad+1))
1414 IF(g_dt_piter_old/=0)
THEN
1415 g_dt_piter=g_dt_piter_old
1416 ELSEIF(idt1tet10 > 1)
THEN
1418 newlen =newlen+30*nel
1422 g_skew_id = nint(rbuf(iad+1))
1424 g_gama_r = nint(rbuf(iad+1))
1426 g_yield_in_comp = nint(rbuf(iad+1))
1428 g_xxold_in_comp = nint(rbuf(iad+1))
1433 g_slipring_id = nint(rbuf(iad+1))
1435 g_slipring_fram_id = nint(rbuf(iad+1))
1437 g_slipring_strand = nint(rbuf(iad+1))
1439 g_retractor_id = nint(rbuf(iad+1))
1441 g_ringslip = nint(rbuf(iad+1))
1443 g_add_node = nint(rbuf(iad+1))
1445 g_update = nint(rbuf(iad+1))
1447 g_fram_factor = nint(rbuf(iad+1))
1449 g_intvar = nint(rbuf(iad+1))
1451 g_idt_tsh = nint(rbuf(iad+1))
1454 g_tm_yield = nint(rbuf(iad+1))
1456 g_tm_seq = nint(rbuf(iad+1))
1458 g_tm_eint = nint(rbuf(iad+1))
1460 g_tm_dmg = nint(rbuf(iad+1))
1462 g_tm_sig = nint(rbuf(iad+1))
1464 g_tm_stra = nint(rbuf(iad+1))
1467 gbuf%G_NOFF = g_noff
1468 gbuf%G_IERR = g_ierr
1470 gbuf%G_GAMA = g_gama
1471 gbuf%G_SMSTR = g_smstr
1472 gbuf%G_HOURG = g_hourg
1473 gbuf%G_BFRAC = g_bfrac
1474 gbuf%G_EINT = g_eint
1475 gbuf%G_EINS = g_eins
1477 gbuf%G_QVIS = g_qvis
1478 gbuf%G_DELTAX = g_deltax
1480 gbuf%G_EPSD = g_epsd
1481 gbuf%G_EPSQ = g_epsq
1483 gbuf%G_WPLA = g_wpla
1484 gbuf%G_TEMP = g_temp
1492 gbuf%G_TAG22 = g_tag22
1493 gbuf%G_STRA = g_stra
1494 gbuf%G_SIGI = g_sigi
1496 gbuf%G_FORPG = g_forpg
1497 gbuf%G_MOMPG = g_mompg
1499 gbuf%G_FORPGPINCH = g_forpgpinch
1500 gbuf%G_MOMPGPINCH = g_mompgpinch
1501 gbuf%G_EPGPINCHXZ = g_epgpinchxz
1502 gbuf%G_EPGPINCHYZ = g_epgpinchyz
1503 gbuf%G_EPGPINCHZZ = g_epgpinchzz
1505 gbuf%G_STRPG = g_strpg
1506 gbuf%G_UELR = g_uelr
1507 gbuf%G_UELR1 = g_uelr1
1508 gbuf%G_DAMDL = g_damdl
1509 gbuf%G_FORTH = g_forth
1510 gbuf%G_EINTTH = g_eintth
1511 gbuf%G_FILL = g_fill
1513 gbuf%G_STRW = g_strw
1514 gbuf%G_STRWPG = g_strwpg
1515 gbuf%G_THK_I = g_thk_i
1516 gbuf%G_JAC_I = g_jac_i
1518 gbuf%G_ISMS = g_isms
1519 gbuf%G_BPRELD = g_bpreld
1520 gbuf%G_ABURN = g_aburn
1522 gbuf%G_PLANL = g_planl
1523 gbuf%G_EPSDNL = g_epsdnl
1524 gbuf%G_DMGSCL = g_dmgscl
1525 gbuf%G_TSAIWU = g_tsaiwu
1526 gbuf%G_TEMPG = g_tempg
1527 gbuf%G_COR_NF = g_cor_nf
1528 gbuf%G_COR_FR = g_cor_fr
1529 gbuf%G_COR_XR = g_cor_xr
1530 gbuf%G_MAXFRAC= g_maxfrac
1531 gbuf%G_MAXEPS = g_maxeps
1532 gbuf%G_BETAORTH = g_betaorth
1534 gbuf%G_SH_IOFFSET = g_sh_ioffset
1535 gbuf%G_EINT_DISTOR = g_eint_distor
1536 gbuf%G_FOR_G = g_for_g
1537 gbuf%G_FORPG_G = g_forpg_g
1541 gbuf%G_AREA = g_area
1542 gbuf%G_SKEW = g_skew
1543 gbuf%G_LENGTH = g_length
1544 gbuf%G_TOTDEPL = g_totdepl
1545 gbuf%G_TOTROT = g_totrot
1546 gbuf%G_FOREP = g_forep
1547 gbuf%G_MOMEP = g_momep
1548 gbuf%G_DEP_IN_TENS = g_dep_in_tens
1549 gbuf%G_DEP_IN_COMP = g_dep_in_comp
1550 gbuf%G_ROT_IN_TENS = g_rot_in_tens
1551 gbuf%G_ROT_IN_COMP = g_rot_in_comp
1552 gbuf%G_POSX = g_posx
1553 gbuf%G_POSY = g_posy
1554 gbuf%G_POSZ = g_posz
1555 gbuf%G_POSXX = g_posxx
1556 gbuf%G_POSYY = g_posyy
1558 gbuf%G_YIELD = g_yield
1559 gbuf%G_LENGTH_ERR = g_length_err
1562 gbuf%G_SKEW_ERR = g_skew_err
1564 gbuf%G_RUPTCRIT = g_ruptcrit
1565 gbuf%G_MASS = g_mass
1566 gbuf%G_V_REPCVT = g_v_repcvt
1567 gbuf%G_VR_REPCVT = g_vr_repcvt
1568 gbuf%G_NUVAR = g_nuvar
1569 gbuf%G_NUVARN = g_nuvarn
1570 gbuf%G_DEFINI = g_defini
1571 gbuf%G_FORINI = g_forini
1572 gbuf%G_INIFRIC = g_inifric
1573 gbuf%G_STRHG = g_strhg
1574 gbuf%G_ETOTSH = g_etotsh
1575 gbuf%G_DT_PITER = g_dt_piter
1576 gbuf%G_SKEW_ID = g_skew_id
1577 gbuf%G_GAMA_R = g_gama_r
1578 gbuf%G_YIELD_IN_COMP = g_yield_in_comp
1579 gbuf%G_XXOLD_IN_COMP = g_xxold_in_comp
1583 gbuf%G_SLIPRING_ID = g_slipring_id
1584 gbuf%G_SLIPRING_FRAM_ID = g_slipring_fram_id
1585 gbuf%G_SLIPRING_STRAND = g_slipring_strand
1586 gbuf%G_RETRACTOR_ID= g_retractor_id
1587 gbuf%G_RINGSLIP = g_ringslip
1588 gbuf%G_ADD_NODE = g_add_node
1589 gbuf%G_UPDATE = g_update
1590 gbuf%G_FRAM_FACTOR = g_fram_factor
1591 gbuf%G_INTVAR = g_intvar
1592 gbuf%G_IDT_TSH = g_idt_tsh
1594 gbuf%G_TM_YIELD = g_tm_yield
1595 gbuf%G_TM_SEQ = g_tm_seq
1596 gbuf%G_TM_EINT = g_tm_eint
1597 gbuf%G_TM_DMG = g_tm_dmg
1598 gbuf%G_TM_SIG = g_tm_sig
1599 gbuf%G_TM_STRA = g_tm_stra
1602 ALLOCATE(elbuf_str%GBUF%ETOTSH(nel*g_etotsh) ,stat=err)
1605 ALLOCATE(elbuf_str%GBUF%GAMA_R(nel*g_gama_r) ,stat=err)
1608 ALLOCATE (elbuf_str%GBUF%NOFF(nel*g_noff) ,stat=err)
1609 gbuf%NOFF(1:nel*g_noff) = nint(rbuf(iad+1:iad+nel*g_noff))
1610 iad = iad+nel*g_noff
1611 ALLOCATE (elbuf_str%GBUF%IERR(nel*g_ierr) ,stat=err)
1612 gbuf%IERR(1:nel*g_ierr) = nint(rbuf(iad+1:iad+nel*g_ierr))
1613 iad = iad+nel*g_ierr
1614 ALLOCATE (elbuf_str%GBUF%GAMA(nel*g_gama) ,stat=err)
1615 gbuf%GAMA(1:nel*g_gama) = rbuf(iad+1:iad+nel*g_gama)
1616 iad = iad+nel*g_gama
1620 ALLOCATE (elbuf_str%GBUF%HOURG(nel*g_hourg) ,stat=err)
1621 gbuf%HOURG(1:nel*g_hourg) = rbuf(iad+1:iad+nel*g_hourg)
1622 iad = iad+nel*g_hourg
1623 ALLOCATE (elbuf_str%GBUF%TAG22(nel*g_tag22) ,stat=err)
1624 gbuf%TAG22(1:nel*g_tag22) = rbuf(iad+1:iad+nel*g_tag22)
1625 iad = iad+nel*g_tag22
1626 ALLOCATE (elbuf_str%GBUF%STRA(nel*g_stra) ,stat=err)
1627 gbuf%STRA(1:nel*g_stra) = rbuf(iad+1:iad+nel*g_stra)
1628 iad = iad+nel*g_stra
1629 ALLOCATE (elbuf_str%GBUF%SIGI(nel*g_sigi) ,stat=err)
1630 gbuf%SIGI(1:nel*g_sigi) = rbuf(iad+1:iad+nel*g_sigi)
1631 iad = iad+nel*g_sigi
1632 ALLOCATE (elbuf_str%GBUF%DMG(nel*g_dmg) ,stat=err)
1633 gbuf%DMG(1:nel*g_dmg) = rbuf(iad+1:iad+nel*g_dmg)
1635 ALLOCATE (elbuf_str%GBUF%UELR(nel*g_uelr) ,stat=err)
1636 gbuf%UELR(1:nel*g_uelr) = rbuf(iad+1:iad+nel*g_uelr)
1637 iad = iad+nel*g_uelr
1638 ALLOCATE (elbuf_str%GBUF%UELR1(nel*g_uelr1) ,stat=err)
1639 gbuf%UELR1(1:nel*g_uelr1) = rbuf(iad+1:iad+nel*g_uelr1)
1640 iad = iad+nel*g_uelr1
1641 ALLOCATE (elbuf_str%GBUF%DAMDL(nel*g_damdl) ,stat=err)
1642 gbuf%DAMDL(1:nel*g_damdl) = rbuf(iad+1:iad+nel*g_damdl)
1643 iad = iad+nel*g_damdl
1644 ALLOCATE (elbuf_str%GBUF%FOR (nel*g_for) ,stat=err)
1645 gbuf%FOR(1:nel*g_for) = rbuf(iad+1:iad+nel*g_for)
1647 ALLOCATE (elbuf_str%GBUF%MOM (nel*g_mom) ,stat=err)
1648 gbuf%MOM(1:nel*g_mom) = rbuf(iad+1:iad+nel*g_mom)
1650 ALLOCATE (elbuf_str%GBUF%THK (nel*g_thk) ,stat=err)
1651 gbuf%THK(1:nel*g_thk) = rbuf(iad+1:iad+nel*g_thk)
1653 ALLOCATE (elbuf_str%GBUF%STRW(nel*g_strw),stat=err)
1654 gbuf%STRW(1:nel*g_strw)=rbuf(iad+1:iad+nel*g_strw)
1655 iad = iad+nel*g_strw
1656 ALLOCATE (elbuf_str%GBUF%THK_I(nel*g_thk_i),stat=err)
1657 gbuf%THK_I(1:nel*g_thk_i)=rbuf(iad+1:iad+nel*g_thk_i)
1658 iad = iad+nel*g_thk_i
1659 ALLOCATE (elbuf_str%GBUF%JAC_I(nel*g_jac_i),stat=err)
1660 gbuf%JAC_I(1:nel*g_jac_i)=rbuf(iad+1:iad+nel*g_jac_i)
1661 iad = iad+nel*g_jac_i
1662 ALLOCATE (elbuf_str%GBUF%DT(nel*g_dt),stat=err)
1663 gbuf%DT(1:nel*g_dt)=rbuf(iad+1:iad+nel*g_dt)
1665 ALLOCATE (elbuf_str%GBUF%ISMS(nel*g_isms),stat=err)
1666 gbuf%ISMS(1:nel*g_isms)=rbuf(iad+1:iad+nel*g_isms)
1667 iad = iad+nel*g_isms
1668 ALLOCATE (elbuf_str%GBUF%BPRELD(nel*g_bpreld),stat=err)
1669 gbuf%BPRELD(1:nel*g_bpreld)=rbuf(iad+1:iad+nel*g_bpreld)
1670 iad = iad+nel*g_bpreld
1671 ALLOCATE (elbuf_str%GBUF%COR_NF(nel*g_cor_nf),stat=err)
1672 gbuf%COR_NF(1:nel*g_cor_nf)=rbuf(iad+1:iad+nel*g_cor_nf)
1673 iad = iad+nel*g_cor_nf
1674 ALLOCATE (elbuf_str%GBUF%COR_FR(nel*g_cor_fr),stat=err)
1675 gbuf%COR_FR(1:nel*g_cor_fr)=rbuf(iad+1:iad+nel*g_cor_fr)
1676 iad = iad+nel*g_cor_fr
1677 ALLOCATE (elbuf_str%GBUF%COR_XR(nel*g_cor_xr),stat=err)
1678 gbuf%COR_XR(1:nel*g_cor_xr)=rbuf(iad+1:iad+nel*g_cor_xr)
1679 iad = iad+nel*g_cor_xr
1680 ALLOCATE (elbuf_str%GBUF%MAXFRAC(nel*g_maxfrac),stat=err)
1681 gbuf%MAXFRAC(1:nel*g_maxfrac)=rbuf(iad+1:iad+nel*g_maxfrac)
1682 iad = iad+nel*g_maxfrac
1683 ALLOCATE (elbuf_str%GBUF%MAXEPS(nel*g_maxeps),stat=err)
1684 gbuf%MAXEPS(1:nel*g_maxeps
1685 iad = iad+nel*g_maxeps
1686 ALLOCATE (elbuf_str%GBUF%BETAORTH (nel*g_betaorth) ,stat=err)
1687 gbuf%BETAORTH(1:nel*g_betaorth) = rbuf(iad+1:iad+nel*g_betaorth)
1688 iad = iad+nel*g_betaorth
1689 ALLOCATE (elbuf_str%GBUF%AMU(nel*g_amu),stat=err)
1690 gbuf%AMU(1:nel*g_amu)=rbuf(iad+1:iad+nel*g_amu)
1692 ALLOCATE (elbuf_str%GBUF%SH_IOFFSET(nel*g_sh_ioffset),stat=err)
1693 gbuf%SH_IOFFSET(1:nel*g_sh_ioffset)=rbuf(iad+1:iad+nel*g_sh_ioffset)
1694 iad = iad+nel*g_sh_ioffset
1695 ALLOCATE (elbuf_str%GBUF%EINT_DISTOR(nel*g_eint_distor),stat=err)
1696 gbuf%EINT_DISTOR(1:nel*g_eint_distor)=rbuf(iad+1:iad+nel*g_eint_distor)
1697 iad = iad+nel*g_eint_distor
1698 ALLOCATE (elbuf_str%GBUF%FOR_G(nel*g_for_g),stat=err)
1699 gbuf%FOR_G(1:nel*g_for_g)=rbuf(iad+1:iad+nel*g_for_g)
1700 iad = iad+nel*g_for_g
1701 ALLOCATE (elbuf_str%GBUF%WPLA (nel*g_wpla) ,stat=err)
1702 gbuf%WPLA(1:nel*g_wpla) = rbuf(iad+1:iad+nel*g_wpla)
1703 iad = iad+nel*g_wpla
1706 elbuf_str%GBUF%FORPG => gbuf%FOR
1707 elbuf_str%GBUF%MOMPG => gbuf%MOM
1715 IF (g_strpg<=g_stra)
THEN
1716 elbuf_str%GBUF%STRPG => gbuf%STRA
1718 ALLOCATE (elbuf_str%GBUF%STRPG(nel*g_strpg) ,stat=err)
1719 gbuf%STRPG(1:nel*g_strpg) = rbuf(iad+1:iad+nel*g_strpg)
1720 iad = iad+nel*g_strpg
1722 elbuf_str%GBUF%FORPG_G => gbuf%FOR_G
1723 ELSEIF (npg > 1)
THEN
1724 ALLOCATE (elbuf_str%GBUF%FORPG(nel*g_forpg) ,stat=err)
1725 gbuf%FORPG(1:nel*g_forpg) = rbuf(iad+1:iad+nel*g_forpg)
1726 iad = iad+nel*g_forpg
1727 ALLOCATE (elbuf_str%GBUF%MOMPG(nel*g_mompg) ,stat=err)
1728 gbuf%MOMPG(1:nel*g_mompg) = rbuf(iad+1:iad+nel*g_mompg)
1729 iad = iad+nel*g_mompg
1731 ALLOCATE (elbuf_str%GBUF%FORPGPINCH(nel*g_forpgpinch) ,stat=err)
1732 gbuf%FORPGPINCH(1:nel*g_forpgpinch) = rbuf(iad+1:iad+nel*g_forpgpinch)
1733 iad = iad+nel*g_forpgpinch
1734 ALLOCATE (elbuf_str%GBUF%MOMPGPINCH(nel*g_mompgpinch) ,stat=err)
1735 gbuf%MOMPG(1:nel*g_mompgpinch) = rbuf(iad+1:iad+nel*g_mompgpinch)
1736 iad = iad+nel*g_mompgpinch
1737 ALLOCATE (elbuf_str%GBUF%EPGPINCHXZ(nel*g_epgpinchxz) ,stat=err)
1738 gbuf%EPGPINCHXZ(1:nel*g_epgpinchxz) = rbuf(iad+1:iad+nel*g_epgpinchxz)
1739 iad = iad+nel*g_epgpinchxz
1740 ALLOCATE (elbuf_str%GBUF%EPGPINCHYZ(nel*g_epgpinchyz) ,stat=err)
1741 gbuf%EPGPINCHYZ(1:nel*g_epgpinchyz) = rbuf(iad+1:iad+nel*g_epgpinchyz)
1742 iad = iad+nel*g_epgpinchyz
1743 ALLOCATE (elbuf_str%GBUF%EPGPINCHZZ(nel*g_epgpinchzz) ,stat=err)
1744 gbuf%EPGPINCHZZ(1:nel*g_epgpinchzz) = rbuf(iad+1:iad+nel*g_epgpinchzz)
1745 iad = iad+nel*g_epgpinchzz
1747 ALLOCATE (elbuf_str%GBUF%STRPG(nel*g_strpg) ,stat=err)
1748 gbuf%STRPG(1:nel*g_strpg) = rbuf(iad+1:iad+nel*g_strpg)
1749 iad = iad+nel*g_strpg
1750 ALLOCATE (elbuf_str%GBUF%STRWPG(nel*g_strwpg),stat=err)
1751 gbuf%STRWPG(1:nel*g_strwpg)=rbuf(iad+1:iad+nel*g_strwpg)
1752 iad = iad+nel*g_strwpg
1753 ALLOCATE (elbuf_str%GBUF%FORPG_G(nel*g_forpg_g),stat=err)
1754 gbuf%FORPG_G(1:nel*g_forpg_g)=rbuf(iad+1:iad+nel*g_forpg_g)
1755 iad = iad+nel*g_forpg_g
1758 IF (npttot == 1)
THEN
1759 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,1)
1760 elbuf_str%GBUF%BFRAC => lbuf%BFRAC
1761 elbuf_str%GBUF%OFF => lbuf%OFF
1762 elbuf_str%GBUF%EINT => lbuf%EINT
1763 elbuf_str%GBUF%EINS => lbuf%EINS
1764 elbuf_str%GBUF%RHO => lbuf%RHO
1765 elbuf_str%GBUF%QVIS => lbuf%QVIS
1766 elbuf_str%GBUF%DELTAX => lbuf%DELTAX
1767 elbuf_str%GBUF%VOL => lbuf%VOL
1768 elbuf_str%GBUF%EPSD => lbuf%EPSD
1769 elbuf_str%GBUF%EPSQ => lbuf%EPSQ
1770 elbuf_str%GBUF%PLA => lbuf%PLA
1771 elbuf_str%GBUF%WPLA => lbuf%WPLA
1772 elbuf_str%GBUF%TEMP => lbuf%TEMP
1773 elbuf_str%GBUF%TB => lbuf%TB
1774 elbuf_str%GBUF%RK => lbuf%RK
1775 elbuf_str%GBUF%RE => lbuf%RE
1776 elbuf_str%GBUF%SIG => lbuf%SIG
1777 elbuf_str%GBUF%FORTH => lbuf%FORTH
1778 elbuf_str%GBUF%EINTTH => lbuf%EINTTH
1779 elbuf_str%GBUF%SEQ => lbuf%SEQ
1780 elbuf_str%GBUF%ABURN => lbuf%ABURN
1781 elbuf_str%GBUF%MU => lbuf%MU
1784 ALLOCATE (elbuf_str%GBUF%BFRAC(nel*g_bfrac) ,stat=err)
1785 gbuf%BFRAC(1:nel*g_bfrac) = rbuf(iad+1:iad+nel*g_bfrac)
1786 iad = iad+nel*g_bfrac
1787 ALLOCATE (elbuf_str%GBUF%OFF (nel*g_off) ,stat=err)
1788 gbuf%OFF(1:nel*g_off) = rbuf(iad+1:iad+nel*g_off)
1790 ALLOCATE (elbuf_str%GBUF%EINT(nel*g_eint) ,stat=err)
1791 gbuf%EINT(1:nel*g_eint) = rbuf(iad+1:iad+nel*g_eint)
1792 iad = iad+nel*g_eint
1793 ALLOCATE (elbuf_str%GBUF%EINS(nel*g_eins) ,stat=err)
1794 gbuf%EINS(1:nel*g_eins) = rbuf(iad+1:iad+nel*g_eins)
1795 iad = iad+nel*g_eins
1796 ALLOCATE (elbuf_str%GBUF%RHO (nel*g_rho) ,stat=err)
1797 gbuf%RHO(1:nel*g_rho) = rbuf(iad+1:iad+nel*g_rho)
1799 ALLOCATE (elbuf_str%GBUF%QVIS(nel*g_qvis
1800 gbuf%QVIS(1:nel*g_qvis) = rbuf(iad+1:iad+nel*g_qvis)
1801 iad = iad+nel*g_qvis
1802 ALLOCATE (elbuf_str%GBUF%DELTAX(nel*g_deltax),stat=err)
1803 gbuf%DELTAX(1:nel*g_deltax) = rbuf(iad+1:iad+nel*g_deltax)
1804 iad = iad+nel*g_deltax
1805 ALLOCATE (elbuf_str%GBUF%VOL (nel*g_vol) ,stat=err)
1806 gbuf%VOL(1:nel*g_vol) = rbuf(iad+1:iad+nel*g_vol)
1808 ALLOCATE (elbuf_str%GBUF%EPSD(nel*g_epsd) ,stat=err)
1809 gbuf%EPSD(1:nel*g_epsd) = rbuf(iad+1:iad+nel*g_epsd)
1810 iad = iad+nel*g_epsd
1811 ALLOCATE (elbuf_str%GBUF%EPSQ(nel*g_epsq) ,stat=err)
1812 gbuf%EPSQ(1:nel*g_epsq) = rbuf(iad+1:iad+nel*g_epsq)
1813 iad = iad+nel*g_epsq
1814 ALLOCATE (elbuf_str%GBUF%PLA (nel*g_pla) ,stat=err)
1815 gbuf%PLA(1:nel*g_pla) = rbuf(iad+1:iad+nel*g_pla)
1817 ALLOCATE (elbuf_str%GBUF%TEMP(nel*g_temp) ,stat=err)
1818 gbuf%TEMP(1:nel*g_temp) = rbuf(iad+1:iad+nel*g_temp)
1819 iad = iad+nel*g_temp
1820 ALLOCATE (elbuf_str%GBUF%TB(nel*g_tb) ,stat=err)
1821 gbuf%TB(1:nel*g_tb) = rbuf(iad+1:iad+nel*g_tb)
1823 ALLOCATE (elbuf_str%GBUF%RK(nel*g_rk) ,stat=err)
1824 gbuf%RK(1:nel*g_rk) = rbuf(iad+1:iad+nel*g_rk)
1826 ALLOCATE (elbuf_str%GBUF%RE(nel*g_re) ,stat=err)
1827 gbuf%RE(1:nel*g_re) = rbuf(iad+1:iad+nel*g_re)
1829 ALLOCATE (elbuf_str%GBUF%SIG (nel*g_sig) ,stat=err)
1830 gbuf%SIG(1:nel*g_sig) = rbuf(iad+1:iad+nel*g_sig)
1832 ALLOCATE (elbuf_str%GBUF%FORTH (nel*g_forth) ,stat=err)
1833 gbuf%FORTH(1:nel*g_forth) = rbuf(iad+1:iad+nel*g_forth)
1834 iad = iad+nel*g_forth
1835 ALLOCATE (elbuf_str%GBUF%EINTTH (nel*g_eintth) ,stat=err)
1836 gbuf%EINTTH(1:nel*g_eintth) = rbuf(iad+1:iad+nel*g_eintth)
1837 iad = iad+nel*g_eintth
1838 ALLOCATE (elbuf_str%GBUF%SEQ (nel*g_seq) ,stat=err)
1839 gbuf%SEQ(1:nel*g_seq) = rbuf(iad+1:iad+nel*g_seq)
1841 ALLOCATE (elbuf_str%GBUF%ABURN(nel*g_aburn) ,stat=err)
1842 gbuf%ABURN(1:nel*g_aburn) = rbuf(iad+1:iad+nel*g_aburn)
1843 iad = iad+nel*g_aburn
1844 ALLOCATE (elbuf_str%GBUF%MU(nel*g_mu) ,stat=err)
1845 gbuf%MU(1:nel*g_mu) = rbuf(iad+1:iad+nel*g_mu)
1850 ALLOCATE (elbuf_str%GBUF%FILL (nel*g_fill) ,stat=err)
1851 gbuf%FILL(1:nel*g_fill) = rbuf(iad+1:iad+nel*g_fill)
1852 iad = iad+nel*g_fill
1857 IF(igtyp == 3 .and. ity == 5)
THEN
1858 ALLOCATE (elbuf_str%GBUF%DMGSCL (nel*g_dmgscl) ,stat=err)
1859 gbuf%DMGSCL(1:nel*g_dmgscl) = rbuf(iad+1:iad+nel*g_dmgscl)
1860 iad = iad+nel*g_dmgscl
1862 ALLOCATE (elbuf_str%GBUF%AREA(nel*g_area) ,stat=err)
1863 gbuf%AREA(1:nel*g_area) = rbuf(iad+1:iad+nel*g_area)
1864 iad = iad+nel*g_area
1865 ALLOCATE (elbuf_str%GBUF%SKEW(nel*g_skew) ,stat=err)
1866 gbuf%SKEW(1:nel*g_skew) = rbuf(iad+1:iad+nel*g_skew)
1867 iad = iad+nel*g_skew
1868 ALLOCATE (elbuf_str%GBUF%LENGTH(nel*g_length) ,stat=err)
1869 gbuf%LENGTH(1:nel*g_length) = rbuf(iad+1:iad+nel*g_length)
1870 iad = iad+nel*g_length
1871 ALLOCATE (elbuf_str%GBUF%TOTDEPL(nel*g_totdepl) ,stat=err)
1872 gbuf%TOTDEPL(1:nel*g_totdepl) = rbuf(iad+1:iad+nel*g_totdepl)
1873 iad = iad+nel*g_totdepl
1874 ALLOCATE (elbuf_str%GBUF%TOTROT(nel*g_totrot) ,stat=err)
1875 gbuf%TOTROT(1:nel*g_totrot) = rbuf(iad+1:iad+nel*g_totrot)
1876 iad = iad+nel*g_totrot
1877 ALLOCATE (elbuf_str%GBUF%FOREP(nel*g_forep) ,stat=err)
1878 gbuf%FOREP(1:nel*g_forep) = rbuf(iad+1:iad+nel*g_forep)
1879 iad = iad+nel*g_forep
1880 ALLOCATE (elbuf_str%GBUF%MOMEP(nel*g_momep) ,stat=err
1881 gbuf%MOMEP(1:nel*g_momep) = rbuf(iad+1:iad+nel*g_momep)
1882 iad = iad+nel*g_momep
1883 ALLOCATE (elbuf_str%GBUF%DEP_IN_TENS(nel*g_dep_in_tens) ,stat=err)
1884 gbuf%DEP_IN_TENS(1:nel*g_dep_in_tens) = rbuf(iad+1:iad+nel*g_dep_in_tens)
1885 iad = iad+nel*g_dep_in_tens
1886 ALLOCATE (elbuf_str%GBUF%DEP_IN_COMP(nel*g_dep_in_comp) ,stat=err)
1887 gbuf%DEP_IN_COMP(1:nel*g_dep_in_comp) = rbuf(iad+1:iad+nel*g_dep_in_comp)
1888 iad = iad+nel*g_dep_in_comp
1889 ALLOCATE (elbuf_str%GBUF%ROT_IN_TENS(nel*g_rot_in_tens) ,stat=err)
1890 gbuf%ROT_IN_TENS(1:nel*g_rot_in_tens) = rbuf(iad+1:iad+nel*g_rot_in_tens)
1891 iad = iad+nel*g_rot_in_tens
1892 ALLOCATE (elbuf_str%GBUF%ROT_IN_COMP(nel*g_rot_in_comp) ,stat=err)
1893 gbuf%ROT_IN_COMP(1:nel*g_rot_in_comp) = rbuf(iad+1:iad+nel*g_rot_in_comp)
1894 iad = iad+nel*g_rot_in_comp
1895 ALLOCATE (elbuf_str%GBUF%POSX(nel*g_posx) ,stat=err)
1896 gbuf%POSX(1:nel*g_posx) = rbuf(iad+1:iad+nel*g_posx)
1897 iad = iad+nel*g_posx
1898 ALLOCATE (elbuf_str%GBUF%POSY(nel*g_posy) ,stat=err)
1899 gbuf%POSY(1:nel*g_posy) = rbuf(iad+1:iad+nel*g_posy)
1900 iad = iad+nel*g_posy
1901 ALLOCATE (elbuf_str%GBUF%POSZ(nel*g_posz) ,stat=err)
1902 gbuf%POSZ(1:nel*g_posz) = rbuf(iad+1:iad+nel*g_posz)
1903 iad = iad+nel*g_posz
1904 ALLOCATE (elbuf_str%GBUF%POSXX(nel*g_posxx) ,stat=err)
1905 gbuf%POSXX(1:nel*g_posxx) = rbuf(iad+1:iad+nel*g_posxx)
1906 iad = iad+nel*g_posxx
1907 ALLOCATE (elbuf_str%GBUF%POSYY(nel*g_posyy) ,stat=err)
1908 gbuf%POSYY(1:nel*g_posyy) = rbuf(iad+1:iad+nel*g_posyy)
1909 iad = iad+nel*g_posyy
1910 ALLOCATE (elbuf_str%GBUF%POSZZ(nel*g_poszz) ,stat=err)
1911 gbuf%POSZZ(1:nel*g_poszz) = rbuf(iad+1:iad+nel*g_poszz)
1912 iad = iad+nel*g_poszz
1913 ALLOCATE (elbuf_str%GBUF%YIELD(nel*g_yield) ,stat=err)
1914 gbuf%YIELD(1:nel*g_yield) = rbuf(iad+1:iad+nel*g_yield)
1915 iad = iad+nel*g_yield
1916 ALLOCATE (elbuf_str%GBUF%LENGTH_ERR(nel*g_length_err) ,stat=err)
1917 gbuf%LENGTH_ERR(1:nel*g_length_err) = rbuf(iad+1:iad+nel*g_length_err)
1918 iad = iad+nel*g_length_err
1919 ALLOCATE (elbuf_str%GBUF%DV(nel*g_dv) ,stat=err)
1920 gbuf%DV(1:nel*g_dv) = rbuf(iad+1:iad+nel*g_dv)
1922 ALLOCATE (elbuf_str%GBUF%DFS(nel*g_dfs) ,stat=err)
1923 gbuf%DFS(1:nel*g_dfs) = rbuf(iad+1:iad+nel*g_dfs)
1925 ALLOCATE (elbuf_str%GBUF%SKEW_ERR(nel*g_skew_err) ,stat=err)
1926 gbuf%SKEW_ERR(1:nel*g_skew_err) = rbuf(iad+1:iad+nel*g_skew_err)
1927 iad = iad+nel*g_skew_err
1928 ALLOCATE (elbuf_str%GBUF%E6(nel*g_e6) ,stat=err)
1929 gbuf%E6(1:nel*g_e6) = rbuf(iad+1:iad+nel*g_e6)
1931 ALLOCATE (elbuf_str%GBUF%RUPTCRIT(nel*g_ruptcrit) ,stat=err)
1932 gbuf%RUPTCRIT(1:nel*g_ruptcrit) = rbuf(iad+1:iad+nel*g_ruptcrit)
1933 iad = iad+nel*g_ruptcrit
1934 ALLOCATE (elbuf_str%GBUF%MASS(nel*g_mass) ,stat=err)
1935 gbuf%MASS(1:nel*g_mass) = rbuf(iad+1:iad+nel*g_mass)
1936 iad = iad+nel*g_mass
1937 ALLOCATE (elbuf_str%GBUF%V_REPCVT(nel*g_v_repcvt) ,stat=err)
1938 gbuf%V_REPCVT(1:nel*g_v_repcvt) = rbuf(iad+1:iad+nel*g_v_repcvt)
1939 iad = iad+nel*g_v_repcvt
1940 ALLOCATE (elbuf_str%GBUF%VR_REPCVT(nel*g_vr_repcvt) ,stat=err)
1941 gbuf%VR_REPCVT(1:nel*g_vr_repcvt) = rbuf(iad+1:iad+nel*g_vr_repcvt)
1942 iad = iad+nel*g_vr_repcvt
1943 ALLOCATE (elbuf_str%GBUF%VAR(nel*g_nuvar) ,stat=err)
1944 gbuf%VAR(1:nel*g_nuvar) = rbuf(iad+1:iad+nel*g_nuvar)
1945 iad = iad+nel*g_nuvar
1946 ALLOCATE (elbuf_str%GBUF%VARN(nel*g_nuvarn) ,stat=err)
1947 gbuf%VARN(1:nel*g_nuvarn) = rbuf(iad+1:iad+nel*g_nuvarn)
1948 iad = iad+nel*g_nuvarn
1949 ALLOCATE (elbuf_str%GBUF%DEFINI(nel*g_defini) ,stat=err)
1950 gbuf%DEFINI(1:nel*g_defini) = rbuf(iad+1:iad+nel*g_defini)
1951 iad = iad+nel*g_defini
1952 ALLOCATE (elbuf_str%GBUF%FORINI(nel*g_forini) ,stat=err)
1953 gbuf%FORINI(1:nel*g_forini) = rbuf(iad+1:iad+nel*g_forini)
1954 iad = iad+nel*g_forini
1955 ALLOCATE (elbuf_str%GBUF%INIFRIC(nel*g_inifric) ,stat=err)
1956 gbuf%INIFRIC(1:nel*g_inifric) = rbuf(iad+1:iad+nel*g_inifric)
1957 iad = iad+nel*g_inifric
1958 ALLOCATE (elbuf_str%GBUF%STRHG(nel*g_strhg) ,stat=err)
1959 gbuf%STRHG(1:nel*g_strhg) = rbuf(iad+1:iad+nel*g_strhg)
1960 iad = iad+nel*g_strhg
1961 ALLOCATE (elbuf_str%GBUF%DT_PITER(nel*g_dt_piter) ,stat=err)
1963 IF(idt1tet10 > 1)
THEN
1964 IF(g_dt_piter_old==0)
THEN
1965 gbuf%DT_PITER(1:nel*g_dt_piter) = zero
1967 gbuf%DT_PITER(1:nel*g_dt_piter) = rbuf(iad+1:iad+nel*g_dt_piter)
1971 iad = iad+nel*g_dt_piter_old
1974 elbuf_str%GBUF%TEMPG => gbuf%TEMP
1977 ALLOCATE (elbuf_str%GBUF%SKEW_ID(nel*g_skew_id) ,stat=err)
1978 gbuf%SKEW_ID(1:nel*g_skew_id) = nint(rbuf(iad+1:iad+nel*g_skew_id))
1979 iad = iad+nel*g_skew_id
1980 ALLOCATE (elbuf_str%GBUF%YIELD_IN_COMP(nel*g_yield_in_comp),stat=err)
1981 gbuf%YIELD_IN_COMP(1:nel*g_yield_in_comp) = nint(rbuf(iad+1:iad+nel*g_yield_in_comp))
1982 iad = iad+nel*g_yield_in_comp
1983 ALLOCATE (elbuf_str%GBUF%XXOLD_IN_COMP(nel*g_xxold_in_comp),stat=err)
1984 gbuf%XXOLD_IN_COMP(1:nel*g_xxold_in_comp) = nint(rbuf(iad+1:iad+nel*g_xxold_in_comp))
1985 iad = iad+nel*g_xxold_in_comp
1990 IF (igtyp == 3 .and. g_noff > 0)
THEN
1991 ALLOCATE(elbuf_str%GBUF%FAIL(1) ,stat=err)
1992 gbuf%FAIL(1)%ILAWF = nint(rbuf(iad+1))
1994 gbuf%FAIL(1)%IDFAIL = nint(rbuf(iad+1))
1996 gbuf%FAIL(1)%NVAR = nint(rbuf(iad+1))
1998 gbuf%FAIL(1)%LF_DAM = nint(rbuf(iad+1))
2000 gbuf%FAIL(1)%LF_DAMMX = nint(rbuf(iad+1))
2002 gbuf%FAIL(1)%LF_DAMINI = nint(rbuf(iad+1))
2004 gbuf%FAIL(1)%LF_TDEL = nint(rbuf(iad+1))
2006 gbuf%FAIL(1)%LF_INDX = nint(rbuf(iad+1))
2008 gbuf%FAIL(1)%LF_OFF = nint(rbuf(iad+1))
2011 ALLOCATE(elbuf_str%GBUF%FAIL(1)%VAR(gbuf%FAIL(1)%NVAR*nel) ,stat=err)
2012 gbuf%FAIL(1)%VAR(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%NVAR)
2013 iad = iad + nel*gbuf%FAIL(1)%NVAR
2014 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAM(gbuf%FAIL(1)%LF_DAM*nel) ,stat=err)
2015 gbuf%FAIL(1)%DAM(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_DAM)
2016 iad = iad + nel*gbuf%FAIL(1)%LF_DAM
2017 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAMMX(gbuf%FAIL(1)%LF_DAMMX*nel) ,stat=err)
2018 gbuf%FAIL(1)%DAMMX(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_DAMMX)
2019 iad = iad + nel*gbuf%FAIL(1)%LF_DAMMX
2020 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAMINI(gbuf%FAIL(1)%LF_DAMINI*nel) ,stat=err)
2021 gbuf%FAIL(1)%DAMINI(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_DAMINI)
2022 iad = iad + nel*gbuf%FAIL(1)%LF_DAMINI
2023 ALLOCATE(elbuf_str%GBUF%FAIL(1)%TDEL(gbuf%FAIL(1)%LF_TDEL*nel) ,stat=err)
2024 gbuf%FAIL(1)%TDEL(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_TDEL)
2025 iad = iad + nel*gbuf%FAIL(1)%LF_TDEL
2026 ALLOCATE(elbuf_str%GBUF%FAIL(1)%INDX(gbuf%FAIL(1)%LF_INDX*nel) ,stat=err)
2027 gbuf%FAIL(1)%INDX(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_INDX)
2028 iad = iad + nel*gbuf%FAIL(1)%LF_INDX
2029 ALLOCATE(elbuf_str%GBUF%FAIL(1)%OFF(gbuf%FAIL(1)%LF_OFF*nel) ,stat=err)
2030 gbuf%FAIL(1)%OFF(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_OFF)
2036 ALLOCATE (elbuf_str%GBUF%SLIPRING_ID(nel*g_slipring_id) ,stat=err)
2037 gbuf%SLIPRING_ID(1:nel*g_slipring_id) = nint(rbuf(iad+1:iad+nel*g_slipring_id))
2038 iad = iad+nel*g_slipring_id
2039 ALLOCATE (elbuf_str%GBUF%SLIPRING_FRAM_ID(nel*g_slipring_fram_id) ,stat=err)
2040 gbuf%SLIPRING_FRAM_ID(1:nel*g_slipring_fram_id) = nint(rbuf(iad+1:iad+nel*g_slipring_fram_id))
2041 iad = iad+nel*g_slipring_fram_id
2042 ALLOCATE (elbuf_str%GBUF%SLIPRING_STRAND(nel*g_slipring_strand) ,stat=err)
2043 gbuf%SLIPRING_STRAND(1:nel*g_slipring_strand) = nint(rbuf(iad+1:iad+nel*g_slipring_strand))
2044 iad = iad+nel*g_slipring_strand
2045 ALLOCATE (elbuf_str%GBUF%RETRACTOR_ID(nel*g_retractor_id) ,stat=err)
2046 gbuf%RETRACTOR_ID(1:nel*g_retractor_id) = nint(rbuf(iad+1:iad+nel*g_retractor_id))
2047 iad = iad+nel*g_retractor_id
2048 ALLOCATE (elbuf_str%GBUF%RINGSLIP(nel*g_ringslip) ,stat=err)
2049 gbuf%RINGSLIP(1:nel*g_ringslip) = rbuf(iad
2050 iad = iad+nel*g_ringslip
2051 ALLOCATE (elbuf_str%GBUF%ADD_NODE(nel*g_add_node) ,stat=err)
2052 gbuf%ADD_NODE(1:nel*g_add_node) = nint(rbuf(iad+1:iad+nel*g_add_node))
2053 iad = iad+nel*g_add_node
2054 ALLOCATE (elbuf_str%GBUF%UPDATE(nel*g_update) ,stat=err)
2055 gbuf%UPDATE(1:nel*g_update) = nint(rbuf(iad+1:iad+nel*g_update))
2056 iad = iad+nel*g_update
2057 ALLOCATE (elbuf_str%GBUF%FRAM_FACTOR(nel*g_fram_factor) ,stat=err)
2058 gbuf%FRAM_FACTOR(1:nel*g_fram_factor) = rbuf(iad+1:iad+nel*g_fram_factor)
2059 iad = iad+nel*g_fram_factor
2060 ALLOCATE (elbuf_str%GBUF%INTVAR(nel*g_intvar) ,stat=err)
2061 gbuf%INTVAR(1:nel*g_intvar) = rbuf(iad+1:iad+nel*g_intvar)
2062 iad = iad+nel*g_intvar
2063 ALLOCATE (elbuf_str%GBUF%IDT_TSH(nel*g_idt_tsh) ,stat=err)
2064 gbuf%IDT_TSH(1:nel*g_idt_tsh) = nint(rbuf(iad+1:iad+nel*g_idt_tsh))
2065 iad = iad+nel*g_idt_tsh
2067 ALLOCATE (elbuf_str%GBUF%TEMPG(nel*gbuf%G_TEMPG),stat=err)
2068 gbuf%TEMPG(1:nel*gbuf%G_TEMPG)=rbuf(iad+1:iad+nel*gbuf%G_TEMPG)
2069 iad = iad + nel*gbuf%G_TEMPG
2074 ALLOCATE (elbuf_str%GBUF%TM_YIELD(nel*g_tm_yield) ,stat=err)
2075 gbuf%TM_YIELD(1:nel*g_tm_yield) = rbuf(iad+1:iad+nel*g_tm_yield)
2076 iad = iad+nel*g_tm_yield
2077 ALLOCATE (elbuf_str%GBUF%TM_SEQ(nel*g_tm_seq) ,stat=err)
2078 gbuf%TM_SEQ(1:nel*g_tm_seq) = rbuf(iad+1:iad+nel*g_tm_seq)
2079 iad = iad+nel*g_tm_seq
2080 ALLOCATE (elbuf_str%GBUF%TM_EINT(nel*g_tm_eint) ,stat=err)
2081 gbuf%TM_EINT(1:nel*g_tm_eint) = rbuf(iad+1:iad+nel*g_tm_eint)
2082 iad = iad+nel*g_tm_eint
2083 ALLOCATE (elbuf_str%GBUF%TM_DMG(nel*g_tm_dmg) ,stat=err)
2084 gbuf%TM_DMG(1:nel*g_tm_dmg) = rbuf(iad+1:iad+nel*g_tm_dmg)
2085 iad = iad+nel*g_tm_dmg
2086 ALLOCATE (elbuf_str%GBUF%TM_SIG1(nel*g_tm_sig) ,stat=err)
2087 gbuf%TM_SIG1(1:nel*g_tm_sig) = rbuf(iad+1:iad+nel*g_tm_sig)
2088 iad = iad+nel*g_tm_sig
2089 ALLOCATE (elbuf_str%GBUF%TM_SIG3(nel*g_tm_sig) ,stat=err)
2090 gbuf%TM_SIG3(1:nel*g_tm_sig) = rbuf(iad+1:iad+nel*g_tm_sig)
2091 iad = iad+nel*g_tm_sig
2092 ALLOCATE (elbuf_str%GBUF%TM_STRA1(nel*g_tm_stra) ,stat=err)
2093 gbuf%TM_STRA1(1:nel*g_tm_stra) = rbuf(iad+1:iad+nel*g_tm_stra)
2094 iad = iad+nel*g_tm_stra
2095 ALLOCATE (elbuf_str%GBUF%TM_STRA3(nel*g_tm_stra) ,stat=err)
2096 gbuf%TM_STRA3(1:nel*g_tm_stra) = rbuf(iad+1:iad+nel
2097 iad = iad+nel*g_tm_stra
2099 IF (g_tm_sig>0)
ALLOCATE (elbuf_str%GBUF%TM_PSIG(nel*2),stat=err)
2100 IF (g_tm_stra>0)
ALLOCATE (elbuf_str%GBUF%TM_PSTRA(nel*2),stat=err)
2103 IF (
ALLOCATED(rbuf))
DEALLOCATE (rbuf)
2104 sdp_rbuf = sdp_rbuf + gbuf%G_SMSTR * nel
2105 ALLOCATE (elbuf_str%GBUF%SMSTR(nel*g_smstr) ,stat=err)
2106 IF (sdp_rbuf > 0)
THEN
2107 ALLOCATE (dp_rbuf(sdp_rbuf))
2113 IF (l_vol0dp > 0)
THEN
2114 DO il = 1,elbuf_str%NLAY
2116 bufly => elbuf_str%BUFLY(il)
2117 IF (igtyp == 51 .OR. igtyp == 52)
THEN
2118 nptt = elbuf_str%BUFLY(il)%NPTT
2120 nptt = elbuf_str%NPTT
2125 lbuf => elbuf_str%BUFLY(il)%LBUF(ir,is,it)
2126 lbuf%VOL0DP(1:nel*l_vol0dp) = dp_rbuf
2127 iadp = iadp+nel*l_vol0dp
2134 gbuf%SMSTR(1:nel*g_smstr) = dp_rbuf(iadp+1:iadp