34 SUBROUTINE allocbuf_auto(ELBUF_STR,BUFLEN,NPT,ITY,ISNOD,NEWLEN,INLOC,ISOLNOD,
44#include "implicit_f.inc"
48#include "com_xfem1.inc"
53 INTEGER BUFLEN,NPT,ITY,ISNOD,NEWLEN,INLOC
54 INTEGER,
INTENT(IN) ::
55 INTEGER,
INTENT(IN) ::
56 TYPE(elbuf_struct_) ,
TARGET :: ELBUF_STR
60 INTEGER I,K,IL,IR,IS,IT,IAD,ERR,,NVARTMP,NVARTMP_EOS,IGTYP,IXFEM,NLAY,
61 . nintlay,nptr,npts,nptt,nel,npg,ipt,len1,len2,ilaw,imat,ieos,nonl,
65 . g_gama,g_sig,g_off,g_noff,g_eint,g_eins,g_temp,
66 . g_rho,g_pla,g_vol,g_epsd,g_qvis,g_deltax,g_tb,g_rk,g_re,g_wpla,
67 . g_smstr,g_hourg,g_bfrac,g_thk,g_for,g_mom,g_tag22,g_stra,g_sigi,
68 . g_dmg,g_forpg,g_mompg,g_strpg,g_uelr,g_uelr1,g_epsq,g_ierr,
69 . g_damdl,g_forth,g_eintth,g_fill,g_seq,g_strw,g_strwpg,g_thk_i,
70 . g_jac_i,g_area,g_skew,g_length,g_totdepl,g_totrot,g_forep,g_momep,
71 . g_dep_in_tens,g_dep_in_comp,g_rot_in_tens,g_rot_in_comp,
72 . g_posx,g_posy,g_posz,g_posxx,g_posyy,g_poszz,g_yield,g_length_err,
73 . g_dv,g_dfs,g_skew_err,g_e6,g_ruptcrit,g_mass,g_v_repcvt,
74 . g_vr_repcvt,g_nuvar,g_nuvarn,g_inifric,g_dt,g_isms,g_strhg,g_etotsh,
75 . g_bpreld,g_aburn,g_mu,g_planl,g_epsdnl,g_dt_piter_old,g_dt_piter,g_tempg,
76 . g_cor_nf,g_cor_fr,g_cor_xr,g_defini,g_forini,g_idt_tsh,
77 . g_forpgpinch,g_mompgpinch,g_epgpinchxz,g_epgpinchyz,g_epgpinchzz,
78 . g_skew_id,g_maxfrac,g_maxeps,g_gama_r,g_slipring_id,g_slipring_strand,g_retractor_id,
79 . g_ringslip,g_add_node,g_update,g_intvar,g_betaorth,g_amu,g_slipring_fram_id,g_fram_factor,
80 . g_tm_yield,g_tm_seq,g_tm_eint,g_tm_dmg,g_tm_sig,g_tm_stra,g_tsaiwu,g_dmgscl,
81 . g_sh_ioffset,g_eint_distor,g_for_g,g_forpg_g,g_yield_in_comp,g_xxold_in_comp
83 . ly_dmg,ly_gama,ly_dira,ly_dirb,ly_crkdir,ly_plapt,ly_sigpt,ly_smstr,
84 . ly_hourg,ly_uelr,ly_uelr1,ly_offpg,ly_off,len_plapt,len_sigpt
88 . l_off,l_eint,l_eins,l_rho,l_dp_drho,l_vol,l_pla,l_tb,l_temp,
89 . l_rk,l_re,l_vk,l_rob,l_sig,l_sigv,l_sigl,l_siga,l_sigd,l_sigb,
90 . l_sigc,l_sigf,l_stra,l_epsa,l_epsd,l_epsq,l_epsf,l_crak,l_wpla,
91 . l_ang,l_sf,l_qvis,l_deltax,l_gama,
92 . l_dam,l_dsum,l_dglo,l_epe,l_epc,l_xst,l_ssp,l_z,l_frac,l_visc,
93 . l_thk,l_for,l_mom,l_eps,l_smstr,l_bfrac,l_dmg,l_forth,l_eintth,
94 . l_seq,l_sigply,l_jac_i,l_fac_yld,l_aburn,l_mu,l_planl,l_epsdnl,
95 . l_dmgscl,l_pij,l_vol0dp,l_tsaiwu
97 . lf_dam,lf_dammx,lf_damini,lf_tdel,lf_indx,lf_off
99 .
ALLOCATABLE,
DIMENSION(:) :: rbuf
101 DOUBLE PRECISION,
DIMENSION(:),
ALLOCATABLE :: DP_RBUF
105 TYPE(buf_mat_) ,
POINTER :: MATBUF
106 TYPE(buf_lay_) ,
POINTER :: BUFLY
107 TYPE(buf_intlay_) ,
POINTER :: INTLAY
108 TYPE(buf_visc_) ,
POINTER :: VISCBUF
109 TYPE(g_bufel_) ,
POINTER :: GBUF
110 TYPE(l_bufel_) ,
POINTER :: LBUF
111 TYPE(buf_intloc_) ,
POINTER :: ILBUF
112 TYPE(fail_loc_) ,
POINTER :: FLOC
113 TYPE(buf_nloc_) ,
POINTER :: BUFNL
114 TYPE(buf_nlocts_) ,
POINTER :: BUFNLTS
115 TYPE(l_bufel_dir_) ,
POINTER :: LBUF_DIR
127 ALLOCATE (rbuf(buflen) ,stat=err)
136 igtyp = nint(rbuf(iad+1))
138 nel = nint(rbuf(iad+1))
140 nlay = nint(rbuf(iad+1))
142 nintlay= nint(rbuf(iad+1))
144 nptr = nint(rbuf(iad+1))
146 npts = nint(rbuf(iad+1))
148 nptt = nint(rbuf(iad+1))
150 ixfem = nint(rbuf(iad+1))
152 nxel = nint(rbuf(iad+1))
154 idrape = nint(rbuf(iad+1))
157 elbuf_str%IGTYP = igtyp
159 elbuf_str%NLAY = nlay
160 elbuf_str%NINTLAY= nintlay
161 elbuf_str%NPTR = nptr
162 elbuf_str%NPTS = npts
163 elbuf_str%NPTT = nptt
164 elbuf_str%IXFEM = ixfem
165 elbuf_str%NXEL = nxel
166 elbuf_str%IDRAPE = idrape
167 npt = nlay*nptr*npts*nptt
169 IF (ity == 1 .OR. ity == 2 .OR. ity == 51 .OR. ity == 101)
THEN
172 ELSEIF (ity == 3 .OR. ity == 7)
THEN
175 ELSEIF (ity == 4 .OR. ity == 5 .OR. ity == 6 .OR. ity == 100)
THEN
182 IF (idamp_freq_range > 0)
THEN
183 ALLOCATE (elbuf_str%DAMP_RANGE%ALPHA(3) ,stat=err
184 ALLOCATE (elbuf_str%DAMP_RANGE%TAU(3) ,stat=err)
185 elbuf_str%DAMP_RANGE%ALPHA(1:3) = rbuf(iad+1:iad+3)
186 elbuf_str%DAMP_RANGE%TAU(1:3) = rbuf(iad+4:iad+6)
190 ALLOCATE (elbuf_str%BUFLY(nlay) ,stat=err)
204 ALLOCATE (elbuf_str%INTLAY(nintlay) ,stat=err)
206 ALLOCATE(elbuf_str%INTLAY(il)%ILBUF(nptr,npts),stat=err)
207 ALLOCATE(elbuf_str%INTLAY(il)%MAT (nptr,npts),stat=err)
208 ALLOCATE(elbuf_str%INTLAY(il)%FAIL (nptr,npts),stat=err)
213 DO il = 1,elbuf_str%NLAY
216 bufly => elbuf_str%BUFLY(il)
218 ilaw = nint(rbuf(iad+1))
220 imat = nint(rbuf(iad+1))
222 ieos = nint(rbuf(iad+1))
224 ivisc = nint(rbuf(iad+1))
226 iporo = nint(rbuf(iad+1))
228 nfail = nint(rbuf(iad+1))
230 nvar_mat = nint(rbuf(iad+1))
232 nvar_eos = nint(rbuf(iad+1))
234 nvartmp = nint(rbuf(iad+1))
236 nvartmp_eos = nint(rbuf(iad+1))
238 nvar_visc= nint(rbuf(iad+1))
240 nvar_loc = nint(rbuf(iad+1))
242 nvar_lay = nint(rbuf(iad+1))
244 nptt = nint(rbuf(iad+1))
249 ly_dmg = nint(rbuf(iad+1))
251 ly_gama = nint(rbuf(iad+1))
253 ly_dira = nint(rbuf(iad+1))
255 ly_dirb = nint(rbuf(iad
257 ly_crkdir= nint(rbuf(iad+1))
259 ly_plapt = nint(rbuf(iad+1))
261 ly_sigpt = nint(rbuf(iad+1))
263 ly_hourg = nint(rbuf(iad+1))
265 ly_uelr = nint(rbuf(iad+1))
267 ly_uelr1 = nint(rbuf(iad+1))
269 ly_offpg = nint(rbuf(iad+1))
271 ly_off = nint(rbuf(iad+1))
276 l_off = nint(rbuf(iad+1))
278 l_gama = nint(rbuf(iad+1))
280 l_stra = nint(rbuf(iad+1))
282 l_frac = nint(rbuf(iad+1))
284 l_bfrac = nint(rbuf(iad+1))
286 l_eint = nint(rbuf(iad+1))
288 l_eins = nint(rbuf(iad+1))
290 l_rho = nint(rbuf(iad+1))
292 l_dp_drho= nint(rbuf(iad+1))
294 l_qvis = nint(rbuf(iad+1))
296 l_deltax = nint(rbuf(iad+1))
298 l_vol = nint(rbuf(iad+1))
300 l_epsa = nint(rbuf(iad+1))
302 l_epsd = nint(rbuf(iad+1))
304 l_epsq = nint(rbuf(iad+1))
306 l_epsf = nint(rbuf(iad+1))
308 l_pla = nint(rbuf(iad+1))
310 l_wpla = nint(rbuf(iad+1))
312 l_temp = nint(rbuf(iad+1))
314 l_tb = nint(rbuf(iad+1))
316 l_rk = nint(rbuf(iad+1))
318 l_re = nint(rbuf(iad+1))
320 l_vk = nint(rbuf(iad+1))
322 l_sf = nint(rbuf(iad+1))
324 l_rob = nint(rbuf(iad+1))
326 l_dam = nint(rbuf(iad+1))
328 l_dsum = nint(rbuf(iad+1))
330 l_dglo = nint(rbuf(iad+1))
332 l_crak = nint(rbuf(iad+1))
334 l_ang = nint(rbuf(iad+1))
336 l_epe = nint(rbuf(iad+1))
338 l_epc = nint(rbuf(iad+1))
340 l_xst = nint(rbuf(iad+1))
342 l_ssp = nint(rbuf(iad+1))
344 l_z = nint(rbuf(iad+1))
346 l_visc = nint(rbuf(iad+1))
348 l_sigl = nint(rbuf(iad+1))
350 l_sigv = nint(rbuf(iad+1))
352 l_siga = nint(rbuf(iad+1))
354 l_sigb = nint(rbuf(iad+1))
356 l_sigc = nint(rbuf(iad+1))
358 l_sigd = nint(rbuf(iad+1))
360 l_sigf = nint(rbuf(iad+1))
362 l_sig = nint(rbuf(iad+1))
364 l_sigply = nint(rbuf(iad+1))
366 l_for = nint(rbuf(iad+1))
368 l_mom = nint(rbuf(iad+1))
370 l_thk = nint(rbuf(iad+1))
372 l_smstr = nint(rbuf(iad+1))
374 l_dmg = nint(rbuf(iad+1))
376 l_forth = nint(rbuf(iad+1))
378 l_eintth = nint(rbuf(iad+1))
380 l_seq = nint(rbuf(iad+1))
382 l_jac_i = nint(rbuf(iad+1))
384 l_fac_yld = nint(rbuf(iad+1))
386 l_aburn = nint(rbuf(iad+1))
388 l_mu = nint(rbuf(iad+1))
390 l_planl = nint(rbuf(iad+1))
392 l_epsdnl = nint(rbuf(iad+1))
394 l_dmgscl = nint(rbuf(iad+1))
396 l_tsaiwu = nint(rbuf(iad+1))
398 l_pij = nint(rbuf(iad+1))
400 l_vol0dp = nint(rbuf(iad+1))
410 bufly%NVAR_MAT = nvar_mat
411 bufly%NVAR_EOS = nvar_eos
412 bufly%NVARTMP = nvartmp
413 bufly%NVARTMP_EOS = nvartmp_eos
414 bufly%NVAR_VISC = nvar_visc
415 bufly%NVAR_LOC = nvar_loc
416 bufly%NVAR_LAY = nvar_lay
419 bufly%LY_DMG = ly_dmg
420 bufly%LY_GAMA = ly_gama
421 bufly%LY_DIRA = ly_dira
422 bufly%LY_DIRB = ly_dirb
423 bufly%LY_CRKDIR = ly_crkdir
424 bufly%LY_PLAPT = ly_plapt
425 bufly%LY_SIGPT = ly_sigpt
426 bufly%LY_HOURG = ly_hourg
427 bufly%LY_UELR = ly_uelr
428 bufly%LY_UELR1 = ly_uelr1
429 bufly%LY_OFFPG = ly_offpg
430 bufly%LY_OFF = ly_off
433 bufly%L_GAMA = l_gama
434 bufly%L_STRA = l_stra
435 bufly%L_FRAC = l_frac
436 bufly%L_BFRAC = l_bfrac
437 bufly%L_EINT = l_eint
438 bufly%L_EINS = l_eins
440 bufly%L_DP_DRHO = l_dp_drho
441 bufly%L_QVIS = l_qvis
442 bufly%L_DELTAX = l_deltax
444 bufly%L_EPSA = l_epsa
445 bufly%L_EPSD = l_epsd
446 bufly%L_EPSQ = l_epsq
447 bufly%L_EPSF = l_epsf
449 bufly%L_WPLA = l_wpla
450 bufly%L_TEMP = l_temp
458 bufly%L_DSUM = l_dsum
459 bufly%L_DGLO = l_dglo
460 bufly%L_CRAK = l_crak
467 bufly%L_VISC = l_visc
468 bufly%L_SIGL = l_sigl
469 bufly%L_SIGV = l_sigv
470 bufly%L_SIGA = l_siga
471 bufly%L_SIGB = l_sigb
472 bufly%L_SIGC = l_sigc
473 bufly%L_SIGD = l_sigd
474 bufly%L_SIGF = l_sigf
476 bufly%L_SIGPLY = l_sigply
480 bufly%L_SMSTR = l_smstr
482 bufly%L_FORTH = l_forth
483 bufly%L_EINTTH = l_eintth
485 bufly%L_JAC_I = l_jac_i
486 bufly%L_FAC_YLD = l_fac_yld
487 bufly%L_ABURN = l_aburn
489 bufly%L_PLANL = l_planl
490 bufly%L_EPSDNL = l_epsdnl
491 bufly%L_DMGSCL = l_dmgscl
492 bufly%L_TSAIWU = l_tsaiwu
494 bufly%L_VOL0DP = l_vol0dp
496 IF (igtyp == 51 .OR. igtyp == 52)
THEN
497 nptt = elbuf_str%BUFLY(il)%NPTT
499 nptt = elbuf_str%NPTT
501 ALLOCATE(elbuf_str%BUFLY(il)%LBUF(nptr,npts,nptt),stat=err)
502 ALLOCATE(elbuf_str%BUFLY(il)%MAT (nptr,npts,nptt),stat=err)
503 ALLOCATE(elbuf_str%BUFLY(il)%FAIL(nptr,npts,nptt),stat=err)
504 ALLOCATE(elbuf_str%BUFLY(il)%PROP(nptr,npts,nptt),stat=err)
505 ALLOCATE(elbuf_str%BUFLY(il)%EOS (nptr,npts,nptt),stat=err)
506 ALLOCATE(elbuf_str%BUFLY(il)%VISC(nptr,npts,nptt),stat=err)
507 ALLOCATE(elbuf_str%BUFLY(il)%PORO(nptr,npts,nptt),stat=err)
508 IF(idrape > 0 .AND. (igtyp == 51 .OR. igtyp ==52))
509 .
ALLOCATE(elbuf_str%BUFLY(il)%LBUF_DIR(nptt),stat=err)
514 ALLOCATE(bufly%DMG (nel*ly_dmg) ,stat=err)
515 bufly%DMG(1:nel*ly_dmg) = rbuf(iad+1:iad+nel*ly_dmg)
517 ALLOCATE(bufly%GAMA (nel*ly_gama) ,stat=err)
518 bufly%GAMA(1:nel*ly_gama) = rbuf(iad+1:iad+nel*ly_gama)
519 iad = iad+nel*ly_gama
520 IF(idrape == 0 .OR. (idrape > 0 .AND. igtyp == 17))
THEN
521 ALLOCATE(bufly%DIRA (nel*ly_dira) ,stat=err)
522 bufly%DIRA(1:nel*ly_dira) = rbuf(iad+1:iad+nel*ly_dira)
523 iad = iad+nel*ly_dira
524 ALLOCATE(bufly%DIRB (nel*ly_dirb) ,stat=err)
525 bufly%DIRB(1:nel*ly_dirb) = rbuf(iad+1:iad+nel*ly_dirb)
526 iad = iad+nel*ly_dirb
528 ALLOCATE(bufly%CRKDIR(nel*ly_crkdir) ,stat=err)
529 bufly%CRKDIR(1:nel*ly_crkdir) = rbuf(iad+1:iad+nel*ly_crkdir)
530 iad = iad+nel*ly_crkdir
534 len_plapt = nel*ly_plapt
535 len_sigpt = nel*ly_sigpt
537 IF (igtyp /= 51 .AND. igtyp /= 52)
THEN
538 len_plapt = nel*ly_plapt*npt
539 len_sigpt = nel*ly_sigpt*npt
541 len_plapt = nel*ly_plapt*bufly%NPTT
542 len_sigpt = nel*ly_sigpt*bufly%NPTT
545 ALLOCATE(bufly%PLAPT(len_plapt) ,stat=err)
546 bufly%PLAPT(1:len_plapt) = rbuf(iad+1:iad+len_plapt)
547 iad = iad + len_plapt
548 ALLOCATE(bufly%SIGPT (len_sigpt) ,stat=err)
549 bufly%SIGPT(1:len_sigpt) = rbuf(iad+1:iad+len_sigpt)
551 ELSEIF (npg == 1)
THEN
556 ALLOCATE(bufly%HOURG(nel*ly_hourg) ,stat=err)
557 bufly%HOURG(1:nel*ly_hourg) = rbuf(iad+1:iad+nel*ly_hourg)
558 iad = iad+nel*ly_hourg
559 ALLOCATE(bufly%UELR(nel*ly_uelr) ,stat=err)
560 bufly%UELR(1:nel*ly_uelr) = rbuf(iad+1:iad+nel*ly_uelr)
561 iad = iad+nel*ly_uelr
562 ALLOCATE(bufly%UELR1(nel*ly_uelr1) ,stat=err)
563 bufly%UELR1(1:nel*ly_uelr1) = rbuf(iad+1:iad+nel*ly_uelr1)
564 iad = iad+nel*ly_uelr1
565 ALLOCATE(bufly%OFFPG(nel*ly_offpg) ,stat=err)
566 bufly%OFFPG(1:nel*ly_offpg) = rbuf(iad+1:iad+nel*ly_offpg)
567 iad = iad+nel*ly_offpg
568 ALLOCATE(bufly%OFF(nel*ly_off) ,stat=err)
569 bufly%OFF(1:nel*ly_off) = rbuf(iad+1:iad+nel*ly_off)
577 lbuf => elbuf_str%BUFLY(il)%LBUF(ir,is,it)
579 lbuf%MLAW = nint(rbuf(iad+1))
581 lbuf%lawID = nint(rbuf(iad+1))
584 ALLOCATE(lbuf%OFF (nel*l_off) ,stat=err)
585 lbuf%OFF(1:nel*l_off) = rbuf(iad+1:iad+nel*l_off)
587 ALLOCATE (lbuf%GAMA(nel*l_gama) ,stat=err)
588 lbuf%GAMA(1:nel*l_gama) = rbuf(iad+1:iad+nel*l_gama)
590 ALLOCATE(lbuf%STRA (nel*l_stra) ,stat=err)
591 lbuf%STRA(1:nel*l_stra) = rbuf(iad+1:iad+nel*l_stra)
593 ALLOCATE(lbuf%FRAC (nel*l_frac) ,stat=err)
594 lbuf%FRAC(1:nel*l_frac) = rbuf(iad+1:iad+nel*l_frac)
596 ALLOCATE(lbuf%BFRAC(nel*l_bfrac) ,stat=err)
597 lbuf%BFRAC(1:nel*l_bfrac) = rbuf(iad+1:iad+nel*l_bfrac)
598 iad = iad+nel*l_bfrac
599 ALLOCATE(lbuf%EINT(nel*l_eint) ,stat=err)
600 lbuf%EINT(1:nel*l_eint) = rbuf(iad+1:iad+nel*l_eint)
602 ALLOCATE(lbuf%EINS(nel*l_eins) ,stat=err)
603 lbuf%EINS(1:nel*l_eins) = rbuf(iad+1:iad+nel*l_eins)
605 ALLOCATE(lbuf%RHO(nel*l_rho) ,stat=err)
606 lbuf%RHO(1:nel*l_rho) = rbuf(iad+1:iad+nel*l_rho)
608 ALLOCATE(lbuf%DP_DRHO(nel*l_dp_drho) ,stat=err)
609 lbuf%DP_DRHO(1:nel*l_dp_drho)=rbuf(iad+1:iad+nel*l_dp_drho)
610 iad = iad+nel*l_dp_drho
611 ALLOCATE(lbuf%QVIS(nel*l_qvis) ,stat=err)
612 lbuf%QVIS(1:nel*l_qvis) = rbuf(iad+1:iad+nel*l_qvis)
614 ALLOCATE(lbuf%DELTAX(nel*l_deltax),stat=err)
615 lbuf%DELTAX(1:nel*l_deltax)=rbuf(iad+1:iad+nel*l_deltax)
616 iad = iad+nel*l_deltax
617 ALLOCATE(lbuf%VOL (nel*l_vol) ,stat=err)
618 lbuf%VOL(1:nel*l_vol) = rbuf(iad+1:iad+nel*l_vol)
620 ALLOCATE(lbuf%EPSA (nel*l_epsa) ,stat=err)
621 lbuf%EPSA(1:nel*l_epsa) = rbuf(iad+1:iad+nel*l_epsa)
623 ALLOCATE(lbuf%EPSD (nel*l_epsd) ,stat=err)
624 lbuf%EPSD(1:nel*l_epsd) = rbuf(iad+1:iad+nel*l_epsd)
626 ALLOCATE(lbuf%EPSQ (nel*l_epsq) ,stat=err)
627 lbuf%EPSQ(1:nel*l_epsq) = rbuf(iad+1:iad+nel*l_epsq)
629 ALLOCATE(lbuf%EPSF (nel*l_epsf) ,stat=err)
630 lbuf%EPSF(1:nel*l_epsf) = rbuf(iad+1:iad+nel*l_epsf)
632 ALLOCATE(lbuf%PLA (nel*l_pla) ,stat=err)
633 lbuf%PLA(1:nel*l_pla) = rbuf(iad+1:iad+nel*l_pla)
635 ALLOCATE(lbuf%WPLA (nel*l_wpla) ,stat=err)
636 lbuf%WPLA(1:nel*l_wpla) = rbuf(iad+1:iad+nel*l_wpla)
638 ALLOCATE(lbuf%TEMP(nel*l_temp), stat=err)
639 lbuf%TEMP(1:nel*l_temp) = rbuf(iad+1:iad+nel*l_temp)
641 ALLOCATE(lbuf%TB(nel*l_tb), stat=err)
642 lbuf%TB(1:nel*l_tb) = rbuf(iad+1:iad+nel*l_tb)
644 ALLOCATE(lbuf%RK(nel*l_rk), stat=err)
645 lbuf%RK(1:nel*l_rk) = rbuf(iad+1:iad+nel*l_rk)
647 ALLOCATE(lbuf%RE(nel*l_re), stat=err)
648 lbuf%RE(1:nel*l_re) = rbuf(iad+1:iad+nel*l_re)
650 ALLOCATE(lbuf%VK(nel*l_vk), stat=err)
651 lbuf%VK(1:nel*l_vk) = rbuf(iad+1:iad+nel*l_vk)
653 ALLOCATE(lbuf%SF(nel*l_sf), stat=err)
654 lbuf%SF(1:nel*l_sf) = rbuf(iad+1:iad+nel*l_sf)
656 ALLOCATE(lbuf%ROB(nel*l_rob), stat=err)
657 lbuf%ROB(1:nel*l_rob) = rbuf(iad+1:iad+nel*l_rob)
659 ALLOCATE(lbuf%DAM (nel*l_dam) ,stat=err)
660 lbuf%DAM(1:nel*l_dam) = rbuf(iad+1:iad+nel*l_dam)
662 ALLOCATE(lbuf%DSUM (nel*l_dsum) ,stat=err)
663 lbuf%DSUM(1:nel*l_dsum) = rbuf(iad+1:iad+nel*l_dsum)
665 ALLOCATE(lbuf%DGLO (nel*l_dglo) ,stat=err)
666 lbuf%DGLO(1:nel*l_dglo) = rbuf(iad+1:iad+nel*l_dglo)
668 ALLOCATE(lbuf%CRAK (nel*l_crak) ,stat=err)
669 lbuf%CRAK(1:nel*l_crak) = rbuf(iad+1:iad+nel*l_crak)
671 ALLOCATE(lbuf%ANG (nel*l_ang) ,stat=err)
672 lbuf%ANG(1:nel*l_ang) = rbuf(iad+1:iad+nel*l_ang)
674 ALLOCATE(lbuf%EPE (nel*l_epe) ,stat=err)
675 lbuf%EPE(1:nel*l_epe) = rbuf(iad+1:iad+nel*l_epe)
677 ALLOCATE(lbuf%EPC (nel*l_epc) ,stat=err)
678 lbuf%EPC(1:nel*l_epc) = rbuf(iad+1:iad+nel*l_epc)
680 ALLOCATE(lbuf%XST (nel*l_xst) ,stat=err)
681 lbuf%XST(1:nel*l_xst) = rbuf(iad+1:iad+nel*l_xst)
683 ALLOCATE(lbuf%SSP (nel*l_ssp) ,stat=err)
684 lbuf%SSP(1:nel*l_ssp) = rbuf(iad+1:iad+nel*l_ssp)
686 ALLOCATE(lbuf%Z (nel*l_z) ,stat=err)
687 lbuf%Z(1:nel*l_z) = rbuf(iad+1:iad+nel*l_z)
689 ALLOCATE(lbuf%VISC (nel*l_visc) ,stat=err)
690 lbuf%VISC(1:nel*l_visc) = rbuf(iad+1:iad+nel*l_visc)
692 ALLOCATE(lbuf%SIGL (nel*l_sigl) ,stat=err)
693 lbuf%SIGL(1:nel*l_sigl) = rbuf(iad+1:iad+nel*l_sigl)
695 ALLOCATE(lbuf%SIGV (nel*l_sigv) ,stat=err)
696 lbuf%SIGV(1:nel*l_sigv) = rbuf(iad+1:iad+nel*l_sigv)
698 ALLOCATE(lbuf%SIGA (nel*l_siga) ,stat=err)
699 lbuf%SIGA(1:nel*l_siga) = rbuf(iad+1:iad+nel*l_siga)
701 ALLOCATE(lbuf%SIGB (nel*l_sigb) ,stat=err)
702 lbuf%SIGB(1:nel*l_sigb) = rbuf(iad+1:iad+nel*l_sigb)
704 ALLOCATE(lbuf%SIGC (nel*l_sigc) ,stat=err)
705 lbuf%SIGC(1:nel*l_sigc) = rbuf(iad+1:iad+nel*l_sigc)
707 ALLOCATE(lbuf%SIGD (nel*l_sigd) ,stat=err)
708 lbuf%SIGD(1:nel*l_sigd) = rbuf(iad+1:iad+nel*l_sigd)
710 ALLOCATE(lbuf%SIGF (nel*l_sigf) ,stat=err)
711 lbuf%SIGF(1:nel*l_sigf) = rbuf(iad+1:iad+nel*l_sigf)
713 ALLOCATE(lbuf%SIG(nel*l_sig) ,stat=err)
714 lbuf%SIG(1:nel*l_sig) = rbuf(iad+1:iad+nel*l_sig)
716 ALLOCATE(lbuf%SIGPLY(nel*l_sigply) ,stat=err)
717 lbuf%SIGPLY(1:nel*l_sigply) = rbuf(iad+1:iad+nel*l_sigply)
718 iad = iad+nel*l_sigply
719 ALLOCATE(lbuf%FOR (nel*l_for) ,stat=err)
720 lbuf%FOR(1:nel*l_for) = rbuf(iad+1:iad+nel*l_for)
722 ALLOCATE(lbuf%MOM (nel*l_mom) ,stat=err)
723 lbuf%MOM(1:nel*l_mom) = rbuf(iad+1:iad+nel*l_mom)
725 ALLOCATE(lbuf%THK (nel*l_thk) ,stat=err)
726 lbuf%THK(1:nel*l_thk) = rbuf(iad+1:iad+nel*l_thk)
728 ALLOCATE(lbuf%SMSTR (nel*l_smstr) ,stat=err)
729 lbuf%SMSTR(1:nel*l_smstr) = rbuf(iad+1:iad+nel*l_smstr)
730 iad = iad+nel*l_smstr
731 ALLOCATE(lbuf%DMG (nel*l_dmg) ,stat=err)
732 lbuf%DMG(1:nel*l_dmg) = rbuf(iad+1:iad+nel*l_dmg)
734 ALLOCATE(lbuf%FORTH (nel*l_forth) ,stat=err)
735 lbuf%FORTH(1:nel*l_forth) = rbuf(iad+1:iad+nel*l_forth)
736 iad = iad+nel*l_forth
737 ALLOCATE(lbuf%EINTTH (nel*l_eintth) ,stat=err)
738 lbuf%EINTTH(1:nel*l_eintth) = rbuf(iad+1:iad+nel*l_eintth)
739 iad = iad+nel*l_eintth
740 ALLOCATE(lbuf%SEQ (nel*l_seq) ,stat=err)
741 lbuf%SEQ(1:nel*l_seq) = rbuf(iad+1:iad+nel*l_seq)
743 ALLOCATE(lbuf%JAC_I (nel*l_jac_i) ,stat=err)
744 lbuf%JAC_I(1:nel*l_jac_i) = rbuf(iad+1:iad+nel*l_jac_i)
745 iad = iad+nel*l_jac_i
746 ALLOCATE(lbuf%FAC_YLD (nel*l_fac_yld) ,stat=err)
747 lbuf%FAC_YLD(1:nel*l_fac_yld) = rbuf(iad+1:iad+nel*l_fac_yld)
748 iad = iad+nel*l_fac_yld
749 ALLOCATE(lbuf%ABURN(nel*l_aburn) ,stat=err)
750 lbuf%ABURN(1:nel*l_aburn) = rbuf(iad+1:iad+nel*l_aburn)
751 iad = iad+nel*l_aburn
752 ALLOCATE(lbuf%MU(nel*l_mu) ,stat=err)
753 lbuf%MU(1:nel*l_mu) = rbuf(iad+1:iad+nel*l_mu)
755 ALLOCATE(lbuf%PLANL(nel*l_planl) ,stat=err)
756 lbuf%PLANL(1:nel*l_planl) = rbuf(iad+1:iad+nel*l_planl)
757 iad = iad+nel*l_planl
758 ALLOCATE(lbuf%EPSDNL(nel*l_epsdnl) ,stat=err)
759 lbuf%EPSDNL(1:nel*l_epsdnl) = rbuf(iad+1:iad+nel*l_epsdnl)
760 iad = iad+nel*l_epsdnl
761 ALLOCATE(lbuf%DMGSCL(nel*l_dmgscl) ,stat=err)
762 lbuf%DMGSCL(1:nel*l_dmgscl) = rbuf(iad+1:iad+nel*l_dmgscl)
763 iad = iad+nel*l_dmgscl
764 ALLOCATE(lbuf%TSAIWU(nel*l_tsaiwu) ,stat=err)
765 lbuf%TSAIWU(1:nel*l_tsaiwu) = rbuf(iad+1:iad+nel*l_tsaiwu)
766 iad = iad+nel*l_tsaiwu
767 ALLOCATE(lbuf%PIJ(nel*l_pij) ,stat=err)
768 lbuf%PIJ(1:nel*l_pij) = rbuf(iad+1:iad+nel*l_pij)
771 ALLOCATE(lbuf%VOL0DP(nel*l_vol0dp) ,stat=err)
772 sdp_rbuf = sdp_rbuf + nel*l_vol0dp
773!----------------------------------------------------------
777 IF(idrape > 0 .AND. (igtyp == 51 .OR. igtyp == 52))
THEN
779 lbuf_dir => elbuf_str%BUFLY(il)%LBUF_DIR(it)
780 ALLOCATE(lbuf_dir%DIRA(nel*ly_dira))
781 lbuf_dir%DIRA(1:nel*ly_dira) = rbuf(iad+1:iad+nel*ly_dira)
782 iad = iad + nel*ly_dira
783 ALLOCATE(lbuf_dir%DIRB(nel*ly_dirb))
784 lbuf_dir%DIRB(1:nel*ly_dirb) = rbuf(iad+1:iad+nel*ly_dirb)
785 iad = iad + nel*ly_dirb
790 IF (ity == 3 .OR. ity == 7)
THEN
792 DO il=1,elbuf_str%NLAY
793 npttot = npttot + npg*elbuf_str%BUFLY(il)%NPTT
795 IF (npt == 0) npttot = npt
803 bufly => elbuf_str%BUFLY(il)
804 IF (igtyp == 51 .OR. igtyp == 52)
THEN
807 lbuf => bufly%LBUF(1,1,it)
808 bufly%PLAPT(1:nel*ly_plapt) => lbuf%PLA(1:nel*l_pla)
809 bufly%SIGPT(1:nel*ly_sigpt) => lbuf%SIG(1:nel*l_sig)
812 lbuf => bufly%LBUF(1,1,1)
813 bufly%PLAPT(1:nel*ly_plapt) => lbuf%PLA(1:nel*l_pla)
814 bufly%SIGPT(1:nel*ly_sigpt) => lbuf%SIG(1:nel*l_sig)
818 bufly => elbuf_str%BUFLY(1)
819 IF (igtyp == 51 .OR. igtyp == 52)
THEN
822 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,ipt)
823 len1 = 1+(ipt-1)*nel*ly_plapt
824 len2 = ipt*nel*ly_plapt
825 bufly%PLAPT(len1:len2) => lbuf%PLA(1:nel*l_pla)
826 len1 = 1+(ipt-1)*nel*ly_sigpt
827 len2 = ipt*nel*ly_sigpt
828 bufly%SIGPT(len1:len2) => lbuf%SIG(1:nel*l_sig)
832 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,ipt)
833 len1 = 1+(ipt-1)*nel*ly_plapt
834 len2 = ipt*nel*ly_plapt
835 bufly%PLAPT(len1:len2) => lbuf%PLA(1:nel*l_pla)
836 len1 = 1+(ipt-1)*nel*ly_sigpt
837 len2 = ipt*nel*ly_sigpt
838 bufly%SIGPT(len1:len2) => lbuf%SIG(1:nel*l_sig)
847 nuvar = elbuf_str%BUFLY(il)%NVAR_MAT
848 nvartmp = elbuf_str%BUFLY(il)%NVARTMP
849 IF (igtyp == 51 .OR. igtyp == 52)
THEN
850 nptt = elbuf_str%BUFLY(il)%NPTT
852 nptt = elbuf_str%NPTT
857 matbuf => elbuf_str%BUFLY(il)%MAT(ir,is,it)
858 ALLOCATE (matbuf%VAR(nel*nuvar), stat=err)
859 matbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
861 ALLOCATE (matbuf%VARTMP(nel*nvartmp), stat=err)
862 matbuf%VARTMP(1:nel*nvartmp) = 0
872 nfail = elbuf_str% BUFLY(il)%NFAIL
873 IF (igtyp == 51 .OR. igtyp == 52)
THEN
874 nptt = elbuf_str%BUFLY(il)%NPTT
876 nptt = elbuf_str%NPTT
881 ALLOCATE (elbuf_str%BUFLY(il)%FAIL(ir,is,it)%FLOC(nfail),
884 floc=>elbuf_str%BUFLY(il)%FAIL(ir,is,it)%FLOC(k)
886 floc%ILAWF = rbuf(iad+1)
889 floc%IDFAIL = rbuf(iad+1)
900 lf_dammx = rbuf(iad+1)
902 floc%LF_DAMMX = lf_dammx
904 lf_damini = rbuf(iad+1)
906 floc%LF_DAMINI = lf_damini
908 lf_tdel = rbuf(iad+1)
910 floc%LF_TDEL = lf_tdel
912 lf_indx = rbuf(iad+1)
914 floc%LF_INDX = lf_indx
920 ALLOCATE(floc%VAR(nel*nuvar), stat=err
922 iad = iad + nel*nuvar
924 ALLOCATE(floc%DAM(nel*lf_dam), stat=err)
925 floc%DAM(1:nel*lf_dam) = rbuf(iad+1:iad+nel*lf_dam)
926 iad = iad + nel*lf_dam
928 ALLOCATE(floc%DAMMX(nel*lf_dammx), stat=err)
929 floc%DAMMX(1:nel*lf_dammx) = rbuf(iad+1:iad+nel*lf_dammx)
930 iad = iad + nel*lf_dammx
932 ALLOCATE(floc%DAMINI(nel*lf_damini), stat=err)
933 floc%DAMINI(1:nel*lf_damini) = rbuf
934 iad = iad + nel*lf_damini
936 ALLOCATE(floc%TDEL(nel*lf_tdel), stat=err)
937 floc%TDEL(1:nel*lf_tdel) = rbuf(iad+1:iad+nel*lf_tdel)
938 iad = iad + nel*lf_tdel
940 ALLOCATE(floc%INDX(nel*lf_indx), stat=err)
941 floc%INDX(1:nel*lf_indx) = rbuf(iad+1:iad+nel*lf_indx)
942 iad = iad + nel*lf_indx
944 ALLOCATE(floc%OFF(nel*lf_off), stat=err)
945 floc%OFF(1:nel*lf_off) = rbuf(iad+1:iad+nel*lf_off)
946 iad = iad + nel*lf_off
956 nuvar = elbuf_str% BUFLY(il)%NVAR_VISC
957 IF (igtyp == 51 .OR. igtyp == 52)
THEN
958 nptt = elbuf_str%BUFLY(il)%NPTT
960 nptt = elbuf_str%NPTT
965 viscbuf => elbuf_str%BUFLY(il)%VISC(ir,is,it)
966 ALLOCATE (viscbuf%VAR(nel*nuvar), stat=err)
967 viscbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
977 IF ((ity==3).OR.(ity==7))
THEN
979 nptt = elbuf_str%NPTT
980 IF ((inloc>0).AND.(nptt>1))
THEN
982 ALLOCATE(elbuf_str%NLOC(nptr,npts), stat=err)
993 bufnl => elbuf_str%NLOC(ir,is)
995 ALLOCATE(bufnl%MASSTH(nel,nonl), stat=err)
997 ALLOCATE(bufnl%UNLTH(nel,nonl) , stat=err)
999 ALLOCATE(bufnl%VNLTH(nel,nonl) , stat=err)
1001 ALLOCATE(bufnl%FNLTH(nel,nonl) , stat=err)
1005 bufnl%MASSTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1008 bufnl%UNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1011 bufnl%VNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1014 bufnl%FNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1023 ELSEIF (ity==1)
THEN
1025 IF ((inloc>0).AND.(isolnod == 8))
THEN
1026 ALLOCATE(elbuf_str%NLOCS%NL_ISOLNOD(nel))
1027 elbuf_str%NLOCS%NL_ISOLNOD(1:nel) = 0
1028 ALLOCATE(elbuf_str%NLOCS%NL_SOLNOD(8,nel))
1029 elbuf_str%NLOCS%NL_SOLNOD(1:8,1:nel) = 0
1031 ALLOCATE(elbuf_str%NLOCS%NL_ISOLNOD(0))
1032 ALLOCATE(elbuf_str%NLOCS%NL_SOLNOD(0,0))
1035 nptt = elbuf_str%NLAY
1036 IF ((inloc>0).AND.(nptt>1))
THEN
1038 ALLOCATE(elbuf_str%NLOCTS(nptr,npts), stat=err)
1045 bufnlts => elbuf_str%NLOCTS(ir,is)
1047 ALLOCATE(bufnlts%MASSTH(nel,nonl), stat=err)
1049 ALLOCATE(bufnlts%UNLTH(nel,nonl) , stat=err
1051 ALLOCATE(bufnlts%VNLTH(nel,nonl) , stat=err)
1053 ALLOCATE(bufnlts%FNLTH(nel,nonl) , stat=err)
1057 bufnlts%MASSTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1060 bufnlts%UNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1063 bufnlts%VNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1066 bufnlts%FNLTH(i,1:nonl) = rbuf(iad+1:iad+nonl)
1077 nvar_eos = elbuf_str%BUFLY(il)%NVAR_EOS
1078 nvartmp_eos = elbuf_str%BUFLY(il)%NVARTMP_EOS
1079 IF (igtyp == 51 .OR. igtyp == 52)
THEN
1080 nptt = elbuf_str%BUFLY(il)%NPTT
1082 nptt = elbuf_str%NPTT
1087 ALLOCATE( elbuf_str%BUFLY(il)%EOS(ir,is,it)%VAR(nel*nvar_eos), stat=err)
1088 ALLOCATE( elbuf_str%BUFLY(il)%EOS(ir,is,it)%VARTMP(nel*nvartmp_eos), stat=err)
1089 elbuf_str%BUFLY(il)%EOS(ir,is,it)%VAR(1:nel*nvar_eos) = rbuf(iad+1:iad+nel*nvar_eos)
1090 elbuf_str%BUFLY(il)%EOS(ir,is,it)%VARTMP(1:nel*nvartmp_eos) = 0
1091 iad = iad+nel*nvar_eos
1105 DO il = 1,elbuf_str%NINTLAY
1106 intlay => elbuf_str%INTLAY(il)
1109 intlay%ILAW = nint(rbuf(iad+1))
1111 intlay%IMAT = nint(rbuf(iad+1))
1113 intlay%NFAIL = nint(rbuf(iad+1))
1115 intlay%NVAR_MAT = nint(rbuf(iad+1))
1117 intlay%NVARTMP = nint(rbuf(iad+1))
1122 ALLOCATE(intlay%EINT (nel*ly_eint) ,stat=err)
1123 intlay%EINT(1:nel*ly_eint) = rbuf(iad+1:iad+nel*ly_eint)
1124 iad = iad+nel*ly_eint
1125 ALLOCATE(intlay%COUNT (nel*ly_count
1126 intlay%COUNT(1:nel*ly_count) = rbuf(iad+1:iad+nel*ly_count)
1127 iad = iad+nel*ly_count
1131 ilbuf => elbuf_str%INTLAY(il)%ILBUF(ir,is)
1133 ALLOCATE(ilbuf%EPS (nel*l_eps) ,stat=err)
1134 ilbuf%EPS(1:nel*l_eps) = rbuf(iad+1:iad+nel
1136 ALLOCATE(ilbuf%SIG (nel*l_sig) ,stat=err)
1137 ilbuf%SIG(1:nel*l_sig) = rbuf(iad+1:iad+nel*l_sig)
1147 nuvar = elbuf_str%INTLAY(il)%NVAR_MAT
1148 nvartmp = elbuf_str%BUFLY(il)%NVARTMP
1151 matbuf => elbuf_str%INTLAY(il)%MAT(ir,is)
1152 ALLOCATE (matbuf%VAR(nel*nuvar), stat=err)
1153 matbuf%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
1155 ALLOCATE (matbuf%VARTMP(nel*nvartmp), stat=err)
1156 matbuf%VARTMP(1:nel*nvartmp) = 0
1164 nfail = elbuf_str% INTLAY(il)%NFAIL
1167 ALLOCATE (elbuf_str%INTLAY(il)%FAIL(ir,is)%FLOC(nfail),
1170 floc=>elbuf_str%INTLAY(il)%FAIL(ir,is)%FLOC(k)
1171 floc%ILAWF = rbuf(iad+1)
1173 floc%IDFAIL = rbuf(iad+1)
1178 ALLOCATE(floc%VAR(nel*nuvar), stat=err)
1179 floc%VAR(1:nel*nuvar) = rbuf(iad+1:iad+nel*nuvar)
1195! iad = iad+nel*nuvar
1203 gbuf => elbuf_str%GBUF
1205 gbuf%NVAR_GLOB = nint(rbuf(iad+1))
1208 g_noff = nint(rbuf(iad+1))
1210 g_ierr = nint(rbuf(iad+1))
1212 g_off = nint(rbuf(iad+1))
1214 g_gama = nint(rbuf(iad+1))
1216 g_smstr= nint(rbuf(iad+1))
1218 g_hourg= nint(rbuf(iad+1))
1220 g_bfrac= nint(rbuf(iad+1))
1222 g_eint = nint(rbuf(iad+1))
1224 g_eins = nint(rbuf(iad+1))
1226 g_rho = nint(rbuf(iad+1))
1228 g_qvis = nint(rbuf(iad+1))
1230 g_deltax= nint(rbuf(iad+1))
1232 g_vol = nint(rbuf(iad+1))
1234 g_epsd = nint(rbuf(iad+1))
1236 g_epsq = nint(rbuf(iad+1))
1238 g_pla = nint(rbuf(iad+1))
1240 g_wpla = nint(rbuf(iad+1))
1242 g_temp = nint(rbuf(iad+1))
1244 g_tb = nint(rbuf(iad+1))
1246 g_rk = nint(rbuf(iad+1))
1248 g_re = nint(rbuf(iad+1))
1250 g_sig = nint(rbuf(iad+1))
1252 g_for = nint(rbuf(iad+1))
1254 g_mom = nint(rbuf(iad+1))
1256 g_thk = nint(rbuf(iad+1))
1258 g_tag22 = nint(rbuf(iad+1))
1260 g_stra = nint(rbuf(iad+1))
1262 g_sigi = nint(rbuf(iad+1))
1264 g_dmg = nint(rbuf(iad+1))
1266 g_forpg = nint(rbuf(iad+1))
1268 g_mompg = nint(rbuf(iad+1))
1271 g_forpgpinch = nint(rbuf(iad+1))
1273 g_mompgpinch = nint(rbuf(iad+1))
1275 g_epgpinchxz = nint(rbuf(iad+1))
1277 g_epgpinchyz = nint(rbuf(iad+1))
1279 g_epgpinchzz = nint(rbuf(iad+1))
1282 g_strpg = nint(rbuf(iad+1))
1284 g_uelr = nint(rbuf(iad+1))
1286 g_uelr1 = nint(rbuf(iad+1))
1288 g_damdl = nint(rbuf(iad+1))
1290 g_forth = nint(rbuf(iad+1))
1292 g_eintth = nint(rbuf(iad+1))
1294 g_fill = nint(rbuf(iad+1))
1296 g_seq = nint(rbuf(iad+1))
1298 g_strw = nint(rbuf(iad+1))
1300 g_strwpg = nint(rbuf(iad+1))
1302 g_thk_i = nint(rbuf(iad+1))
1304 g_jac_i = nint(rbuf(iad+1))
1306 g_dt = nint(rbuf(iad+
1308 g_isms = nint(rbuf(iad+1))
1310 g_bpreld = nint(rbuf(iad+1))
1312 g_aburn= nint(rbuf(iad+1))
1314 g_mu= nint(rbuf(iad+1))
1316 g_planl= nint(rbuf(iad+1))
1318 g_epsdnl= nint(rbuf(iad+1))
1320 g_dmgscl= nint(rbuf(iad+1))
1322 g_tsaiwu= nint(rbuf(iad+1))
1324 g_tempg = nint(rbuf(iad+1))
1326 g_cor_nf = nint(rbuf(iad+1))
1328 g_cor_fr = nint(rbuf(iad+1))
1330 g_cor_xr = nint(rbuf(iad+1))
1332 g_maxfrac= nint(rbuf(iad+1))
1334 g_maxeps= nint(rbuf(iad+1))
1336 g_betaorth = nint(rbuf(iad+1))
1338 g_amu= nint(rbuf(iad+1))
1340 g_sh_ioffset= nint(rbuf(iad+1))
1342 g_eint_distor= nint(rbuf(iad+1))
1344 g_for_g= nint(rbuf(iad+1))
1346 g_forpg_g= nint(rbuf(iad+1))
1351 g_area = nint(rbuf(iad+1))
1353 g_skew = nint(rbuf(iad+1))
1355 g_length = nint(rbuf(iad+1))
1357 g_totdepl = nint(rbuf(iad+1))
1359 g_totrot = nint(rbuf(iad+1))
1361 g_forep = nint(rbuf(iad+1))
1363 g_momep = nint(rbuf(iad+1))
1365 g_dep_in_tens = nint(rbuf(iad+1))
1367 g_dep_in_comp = nint(rbuf(iad+1))
1369 g_rot_in_tens = nint(rbuf(iad+1))
1371 g_rot_in_comp = nint(rbuf(iad+1))
1373 g_posx = nint(rbuf(iad+1))
1375 g_posy = nint(rbuf(iad+1))
1377 g_posz = nint(rbuf(iad+1))
1379 g_posxx = nint(rbuf(iad+1))
1381 g_posyy = nint(rbuf(iad+1))
1383 g_poszz = nint(rbuf(iad+1))
1385 g_yield = nint(rbuf(iad+1))
1387 g_length_err = nint(rbuf(iad+1))
1389 g_dv = nint(rbuf(iad+1))
1391 g_dfs = nint(rbuf(iad+1))
1393 g_skew_err = nint(rbuf(iad+1))
1395 g_e6 = nint(rbuf(iad+1))
1397 g_ruptcrit = nint(rbuf(iad+1))
1399 g_mass = nint(rbuf(iad+1))
1401 g_v_repcvt = nint(rbuf(iad+1))
1403 g_vr_repcvt = nint(rbuf(iad+1))
1405 g_nuvar = nint(rbuf(iad+1))
1407 g_nuvarn = nint(rbuf(iad
1411 g_forini = nint(rbuf(iad+1))
1415 g_strhg = nint(rbuf(iad+1))
1417 g_etotsh = nint(rbuf(iad+1))
1420 g_dt_piter_old = nint(rbuf(iad+1))
1425 IF(g_dt_piter_old/=
THEN
1426 g_dt_piter=g_dt_piter_old
1427 ELSEIF(idt1tet10 > 1)
THEN
1429 newlen =newlen+30*nel
1433 g_skew_id = nint(rbuf(iad+1))
1435 g_gama_r = nint(rbuf(iad+1))
1437 g_yield_in_comp = nint(rbuf(iad+1))
1439 g_xxold_in_comp = nint(rbuf(iad+1))
1444 g_slipring_id = nint(rbuf(iad+1))
1446 g_slipring_fram_id = nint(rbuf(iad+1))
1448 g_slipring_strand = nint(rbuf(iad+1))
1450 g_retractor_id = nint(rbuf(iad+1))
1452 g_ringslip = nint(rbuf(iad+1))
1454 g_add_node = nint(rbuf(iad+1))
1456 g_update = nint(rbuf(iad+1))
1458 g_fram_factor = nint(rbuf(iad+1))
1460 g_intvar = nint(rbuf(iad+1))
1462 g_idt_tsh = nint(rbuf(iad+1))
1465 g_tm_yield = nint(rbuf(iad+1))
1467 g_tm_seq = nint(rbuf(iad+1))
1469 g_tm_eint = nint(rbuf(iad+1))
1471 g_tm_dmg = nint(rbuf(iad+1))
1473 g_tm_sig = nint(rbuf(iad+1))
1475 g_tm_stra = nint(rbuf(iad+1))
1478 gbuf%G_NOFF = g_noff
1479 gbuf%G_IERR = g_ierr
1481 gbuf%G_GAMA = g_gama
1482 gbuf%G_SMSTR = g_smstr
1483 gbuf%G_HOURG = g_hourg
1484 gbuf%G_BFRAC = g_bfrac
1485 gbuf%G_EINT = g_eint
1486 gbuf%G_EINS = g_eins
1488 gbuf%G_QVIS = g_qvis
1489 gbuf%G_DELTAX = g_deltax
1491 gbuf%G_EPSD = g_epsd
1492 gbuf%G_EPSQ = g_epsq
1494 gbuf%G_WPLA = g_wpla
1495 gbuf%G_TEMP = g_temp
1503 gbuf%G_TAG22 = g_tag22
1504 gbuf%G_STRA = g_stra
1505 gbuf%G_SIGI = g_sigi
1507 gbuf%G_FORPG = g_forpg
1508 gbuf%G_MOMPG = g_mompg
1510 gbuf%G_FORPGPINCH = g_forpgpinch
1511 gbuf%G_MOMPGPINCH = g_mompgpinch
1512 gbuf%G_EPGPINCHXZ = g_epgpinchxz
1513 gbuf%G_EPGPINCHYZ = g_epgpinchyz
1514 gbuf%G_EPGPINCHZZ = g_epgpinchzz
1516 gbuf%G_STRPG = g_strpg
1517 gbuf%G_UELR = g_uelr
1518 gbuf%G_UELR1 = g_uelr1
1519 gbuf%G_DAMDL = g_damdl
1520 gbuf%G_FORTH = g_forth
1521 gbuf%G_EINTTH = g_eintth
1522 gbuf%G_FILL = g_fill
1524 gbuf%G_STRW = g_strw
1525 gbuf%G_STRWPG = g_strwpg
1526 gbuf%G_THK_I = g_thk_i
1527 gbuf%G_JAC_I = g_jac_i
1529 gbuf%G_ISMS = g_isms
1530 gbuf%G_BPRELD = g_bpreld
1531 gbuf%G_ABURN = g_aburn
1533 gbuf%G_PLANL = g_planl
1534 gbuf%G_EPSDNL = g_epsdnl
1535 gbuf%G_DMGSCL = g_dmgscl
1536 gbuf%G_TSAIWU = g_tsaiwu
1537 gbuf%G_TEMPG = g_tempg
1538 gbuf%G_COR_NF = g_cor_nf
1539 gbuf%G_COR_FR = g_cor_fr
1540 gbuf%G_COR_XR = g_cor_xr
1541 gbuf%G_MAXFRAC= g_maxfrac
1542 gbuf%G_MAXEPS = g_maxeps
1543 gbuf%G_BETAORTH = g_betaorth
1545 gbuf%G_SH_IOFFSET = g_sh_ioffset
1546 gbuf%G_EINT_DISTOR = g_eint_distor
1547 gbuf%G_FOR_G = g_for_g
1548 gbuf%G_FORPG_G = g_forpg_g
1552 gbuf%G_AREA = g_area
1553 gbuf%G_SKEW = g_skew
1554 gbuf%G_LENGTH = g_length
1555 gbuf%G_TOTDEPL = g_totdepl
1556 gbuf%G_TOTROT = g_totrot
1557 gbuf%G_FOREP = g_forep
1558 gbuf%G_MOMEP = g_momep
1559 gbuf%G_DEP_IN_TENS = g_dep_in_tens
1560 gbuf%G_DEP_IN_COMP = g_dep_in_comp
1561 gbuf%G_ROT_IN_TENS = g_rot_in_tens
1562 gbuf%G_ROT_IN_COMP = g_rot_in_comp
1563 gbuf%G_POSX = g_posx
1564 gbuf%G_POSY = g_posy
1565 gbuf%G_POSZ = g_posz
1566 gbuf%G_POSXX = g_posxx
1567 gbuf%G_POSYY = g_posyy
1568 gbuf%G_POSZZ = g_poszz
1569 gbuf%G_YIELD = g_yield
1570 gbuf%G_LENGTH_ERR = g_length_err
1573 gbuf%G_SKEW_ERR = g_skew_err
1575 gbuf%G_RUPTCRIT = g_ruptcrit
1576 gbuf%G_MASS = g_mass
1577 gbuf%G_V_REPCVT = g_v_repcvt
1578 gbuf%G_VR_REPCVT = g_vr_repcvt
1579 gbuf%G_NUVAR = g_nuvar
1580 gbuf%G_NUVARN = g_nuvarn
1581 gbuf%G_DEFINI = g_defini
1582 gbuf%G_FORINI = g_forini
1583 gbuf%G_INIFRIC = g_inifric
1584 gbuf%G_STRHG = g_strhg
1585 gbuf%G_ETOTSH = g_etotsh
1586 gbuf%G_DT_PITER = g_dt_piter
1587 gbuf%G_SKEW_ID = g_skew_id
1588 gbuf%G_GAMA_R = g_gama_r
1589 gbuf%G_YIELD_IN_COMP = g_yield_in_comp
1590 gbuf%G_XXOLD_IN_COMP = g_xxold_in_comp
1594 gbuf%G_SLIPRING_ID = g_slipring_id
1595 gbuf%G_SLIPRING_FRAM_ID = g_slipring_fram_id
1596 gbuf%G_SLIPRING_STRAND = g_slipring_strand
1597 gbuf%G_RETRACTOR_ID= g_retractor_id
1598 gbuf%G_RINGSLIP = g_ringslip
1599 gbuf%G_ADD_NODE = g_add_node
1600 gbuf%G_UPDATE = g_update
1601 gbuf%G_FRAM_FACTOR = g_fram_factor
1602 gbuf%G_INTVAR = g_intvar
1603 gbuf%G_IDT_TSH = g_idt_tsh
1605 gbuf%G_TM_YIELD = g_tm_yield
1606 gbuf%G_TM_SEQ = g_tm_seq
1608 gbuf%G_TM_DMG = g_tm_dmg
1609 gbuf%G_TM_SIG = g_tm_sig
1610 gbuf%G_TM_STRA = g_tm_stra
1613 ALLOCATE(elbuf_str%GBUF%ETOTSH(nel*g_etotsh) ,stat=err)
1616 ALLOCATE(elbuf_str%GBUF%GAMA_R(nel*g_gama_r) ,stat=err)
1619 ALLOCATE (elbuf_str%GBUF%NOFF(nel*g_noff) ,stat=err)
1620 gbuf%NOFF(1:nel*g_noff) = nint(rbuf(iad+1:iad+nel*g_noff))
1621 iad = iad+nel*g_noff
1622 ALLOCATE (elbuf_str%GBUF%IERR(nel*g_ierr) ,stat=err)
1623 gbuf%IERR(1:nel*g_ierr) = nint(rbuf(iad+1:iad+nel*g_ierr))
1624 iad = iad+nel*g_ierr
1625 ALLOCATE (elbuf_str%GBUF%GAMA(nel*g_gama) ,stat=err)
1626 gbuf%GAMA(1:nel*g_gama) = rbuf(iad+1:iad+nel*g_gama)
1627 iad = iad+nel*g_gama
1631 ALLOCATE (elbuf_str%GBUF%HOURG(nel*g_hourg) ,stat=err)
1632 gbuf%HOURG(1:nel*g_hourg) = rbuf(iad+1:iad+nel*g_hourg)
1633 iad = iad+nel*g_hourg
1634 ALLOCATE (elbuf_str%GBUF%TAG22(nel*g_tag22) ,stat=err)
1635 gbuf%TAG22(1:nel*g_tag22) = rbuf(iad+1:iad+nel*g_tag22)
1636 iad = iad+nel*g_tag22
1637 ALLOCATE (elbuf_str%GBUF%STRA(nel*g_stra) ,stat=err)
1638 gbuf%STRA(1:nel*g_stra) = rbuf(iad+1:iad+nel*g_stra)
1639 iad = iad+nel*g_stra
1640 ALLOCATE (elbuf_str%GBUF%SIGI(nel*g_sigi) ,stat=err)
1641 gbuf%SIGI(1:nel*g_sigi) = rbuf(iad+1:iad+nel*g_sigi)
1642 iad = iad+nel*g_sigi
1643 ALLOCATE (elbuf_str%GBUF%DMG(nel*g_dmg) ,stat=err)
1644 gbuf%DMG(1:nel*g_dmg) = rbuf(iad+1:iad+nel*g_dmg)
1646 ALLOCATE (elbuf_str%GBUF%UELR(nel*g_uelr) ,stat=err)
1647 gbuf%UELR(1:nel*g_uelr) = rbuf(iad+1:iad+nel*g_uelr)
1648 iad = iad+nel*g_uelr
1649 ALLOCATE (elbuf_str%GBUF%UELR1(nel*g_uelr1) ,stat=err)
1650 gbuf%UELR1(1:nel*g_uelr1) = rbuf(iad+1:iad+nel*g_uelr1)
1651 iad = iad+nel*g_uelr1
1652 ALLOCATE (elbuf_str%GBUF%DAMDL(nel*g_damdl) ,stat=err)
1653 gbuf%DAMDL(1:nel*g_damdl) = rbuf(iad+1:iad+nel*g_damdl)
1654 iad = iad+nel*g_damdl
1655 ALLOCATE (elbuf_str%GBUF%FOR (nel*g_for) ,stat=err)
1656 gbuf%FOR(1:nel*g_for) = rbuf(iad+1:iad+nel*g_for)
1658 ALLOCATE (elbuf_str%GBUF%MOM (nel*g_mom) ,stat=err)
1659 gbuf%MOM(1:nel*g_mom) = rbuf(iad+1:iad+nel*g_mom)
1661 ALLOCATE (elbuf_str%GBUF%THK (nel*g_thk) ,stat=err)
1662 gbuf%THK(1:nel*g_thk) = rbuf(iad+1:iad+nel*g_thk)
1664 ALLOCATE (elbuf_str%GBUF%STRW(nel*g_strw),stat=err)
1665 gbuf%STRW(1:nel*g_strw)=rbuf(iad+1:iad+nel*g_strw)
1666 iad = iad+nel*g_strw
1667 ALLOCATE (elbuf_str%GBUF%THK_I(nel*g_thk_i),stat=err)
1668 gbuf%THK_I(1:nel*g_thk_i)=rbuf(iad+1:iad+nel*g_thk_i)
1669 iad = iad+nel*g_thk_i
1670 ALLOCATE (elbuf_str%GBUF%JAC_I(nel*g_jac_i),stat=err)
1671 gbuf%JAC_I(1:nel*g_jac_i)=rbuf(iad+1:iad+nel*g_jac_i)
1672 iad = iad+nel*g_jac_i
1673 ALLOCATE (elbuf_str%GBUF%DT(nel*g_dt),stat=err)
1674 gbuf%DT(1:nel*g_dt)=rbuf(iad+1:iad+nel*g_dt)
1676 ALLOCATE (elbuf_str%GBUF%ISMS(nel*g_isms),stat=err)
1677 gbuf%ISMS(1:nel*g_isms)=rbuf(iad+1:iad+nel*g_isms)
1678 iad = iad+nel*g_isms
1679 ALLOCATE (elbuf_str%GBUF%BPRELD(nel*g_bpreld),stat=err)
1680 gbuf%BPRELD(1:nel*g_bpreld)=rbuf(iad+1:iad+nel*g_bpreld)
1681 iad = iad+nel*g_bpreld
1682 ALLOCATE (elbuf_str%GBUF%COR_NF(nel*g_cor_nf),stat=err)
1683 gbuf%COR_NF(1:nel*g_cor_nf)=rbuf(iad+1:iad+nel*g_cor_nf)
1684 iad = iad+nel*g_cor_nf
1685 ALLOCATE (elbuf_str%GBUF%COR_FR(nel*g_cor_fr),stat=err)
1686 gbuf%COR_FR(1:nel*g_cor_fr)=rbuf(iad+1:iad+nel*g_cor_fr)
1687 iad = iad+nel*g_cor_fr
1688 ALLOCATE (elbuf_str%GBUF%COR_XR(nel*g_cor_xr),stat=err)
1689 gbuf%COR_XR(1:nel*g_cor_xr)=rbuf(iad+1:iad+nel*g_cor_xr)
1690 iad = iad+nel*g_cor_xr
1691 ALLOCATE (elbuf_str%GBUF%MAXFRAC(nel*g_maxfrac),stat=err)
1692 gbuf%MAXFRAC(1:nel*g_maxfrac)=rbuf(iad+1:iad+nel*g_maxfrac)
1693 iad = iad+nel*g_maxfrac
1694 ALLOCATE (elbuf_str%GBUF%MAXEPS(nel*g_maxeps),stat=err)
1695 gbuf%MAXEPS(1:nel*g_maxeps)=rbuf(iad+1:iad+nel
1697 ALLOCATE (elbuf_str%GBUF%BETAORTH (nel*g_betaorth) ,stat=err)
1698 gbuf%BETAORTH(1:nel*g_betaorth) = rbuf(iad+1:iad+nel*g_betaorth)
1699 iad = iad+nel*g_betaorth
1701 gbuf%AMU(1:nel*g_amu)=rbuf(iad+1:iad+nel*g_amu)
1703 ALLOCATE (elbuf_str%GBUF%SH_IOFFSET(nel*g_sh_ioffset),stat=err)
1704 gbuf%SH_IOFFSET(1:nel*g_sh_ioffset)=rbuf(iad+1:iad+nel*g_sh_ioffset)
1705 iad = iad+nel*g_sh_ioffset
1706 ALLOCATE (elbuf_str%GBUF%EINT_DISTOR(nel*g_eint_distor),stat=err)
1707 gbuf%EINT_DISTOR(1:nel*g_eint_distor)=rbuf(iad+1:iad+nel*g_eint_distor)
1708 iad = iad+nel*g_eint_distor
1709 ALLOCATE (elbuf_str%GBUF%FOR_G(nel*g_for_g),stat=err)
1710 gbuf%FOR_G(1:nel*g_for_g)=rbuf(iad+1:iad+nel*g_for_g)
1711 iad = iad+nel*g_for_g
1712 ALLOCATE (elbuf_str%GBUF%WPLA (nel*g_wpla) ,stat=err)
1713 gbuf%WPLA(1:nel*g_wpla) = rbuf(iad+1:iad+nel*g_wpla)
1714 iad = iad+nel*g_wpla
1717 elbuf_str%GBUF%FORPG => gbuf%FOR
1718 elbuf_str%GBUF%MOMPG => gbuf%MOM
1726 IF (g_strpg<=g_stra)
THEN
1727 elbuf_str%GBUF%STRPG => gbuf%STRA
1729 ALLOCATE (elbuf_str%GBUF%STRPG(nel*g_strpg) ,stat=err)
1730 gbuf%STRPG(1:nel*g_strpg) = rbuf(iad+1:iad+nel*g_strpg)
1731 iad = iad+nel*g_strpg
1733 elbuf_str%GBUF%FORPG_G => gbuf%FOR_G
1734 ELSEIF (npg > 1)
THEN
1735 ALLOCATE (elbuf_str%GBUF%FORPG(nel*g_forpg) ,stat=err)
1736 gbuf%FORPG(1:nel*g_forpg) = rbuf(iad+1:iad+nel*g_forpg)
1737 iad = iad+nel*g_forpg
1738 ALLOCATE (elbuf_str%GBUF%MOMPG(nel*g_mompg) ,stat=err)
1739 gbuf%MOMPG(1:nel*g_mompg) = rbuf(iad+1:iad+nel*g_mompg)
1740 iad = iad+nel*g_mompg
1742 ALLOCATE (elbuf_str%GBUF%FORPGPINCH(nel*g_forpgpinch) ,stat=err)
1743 gbuf%FORPGPINCH(1:nel*g_forpgpinch) = rbuf(iad+1:iad+nel*g_forpgpinch)
1744 iad = iad+nel*g_forpgpinch
1745 ALLOCATE (elbuf_str%GBUF%MOMPGPINCH(nel*g_mompgpinch) ,stat=err)
1746 gbuf%MOMPG(1:nel*g_mompgpinch) = rbuf(iad+1:iad+nel*g_mompgpinch)
1747 iad = iad+nel*g_mompgpinch
1748 ALLOCATE (elbuf_str%GBUF%EPGPINCHXZ(nel*g_epgpinchxz) ,stat=err)
1749 gbuf%EPGPINCHXZ(1:nel*g_epgpinchxz) = rbuf(iad+1:iad+nel*g_epgpinchxz)
1750 iad = iad+nel*g_epgpinchxz
1751 ALLOCATE (elbuf_str%GBUF%EPGPINCHYZ(nel*g_epgpinchyz) ,stat=err)
1752 gbuf%EPGPINCHYZ(1:nel*g_epgpinchyz) = rbuf(iad+1:iad+nel*g_epgpinchyz)
1753 iad = iad+nel*g_epgpinchyz
1754 ALLOCATE (elbuf_str%GBUF%EPGPINCHZZ(nel*g_epgpinchzz) ,stat=err)
1755 gbuf%EPGPINCHZZ(1:nel*g_epgpinchzz) = rbuf(iad+1:iad+nel*g_epgpinchzz)
1756 iad = iad+nel*g_epgpinchzz
1758 ALLOCATE (elbuf_str%GBUF%STRPG(nel*g_strpg) ,stat=err)
1759 gbuf%STRPG(1:nel*g_strpg) = rbuf(iad+1:iad+nel*g_strpg)
1760 iad = iad+nel*g_strpg
1761 ALLOCATE (elbuf_str%GBUF%STRWPG(nel*g_strwpg),stat=err)
1762 gbuf%STRWPG(1:nel*g_strwpg)=rbuf(iad+1:iad+nel*g_strwpg)
1763 iad = iad+nel*g_strwpg
1764 ALLOCATE (elbuf_str%GBUF%FORPG_G(nel*g_forpg_g),stat=err)
1765 gbuf%FORPG_G(1:nel*g_forpg_g)=rbuf(iad+1:iad+nel*g_forpg_g)
1766 iad = iad+nel*g_forpg_g
1769 IF (npttot == 1)
THEN
1770 lbuf => elbuf_str%BUFLY(1)%LBUF(1,1,1)
1771 elbuf_str%GBUF%BFRAC => lbuf%BFRAC
1772 elbuf_str%GBUF%OFF => lbuf%OFF
1773 elbuf_str%GBUF%EINT => lbuf%EINT
1774 elbuf_str%GBUF%EINS => lbuf%EINS
1775 elbuf_str%GBUF%RHO => lbuf%RHO
1776 elbuf_str%GBUF%QVIS => lbuf%QVIS
1777 elbuf_str%GBUF%DELTAX => lbuf%DELTAX
1778 elbuf_str%GBUF%VOL => lbuf%VOL
1779 elbuf_str%GBUF%EPSD => lbuf%EPSD
1780 elbuf_str%GBUF%EPSQ => lbuf%EPSQ
1781 elbuf_str%GBUF%PLA => lbuf%PLA
1782 elbuf_str%GBUF%WPLA => lbuf%WPLA
1783 elbuf_str%GBUF%TEMP => lbuf%TEMP
1784 elbuf_str%GBUF%TB => lbuf%TB
1785 elbuf_str%GBUF%RK => lbuf%RK
1786 elbuf_str%GBUF%RE => lbuf%RE
1787 elbuf_str%GBUF%SIG => lbuf%SIG
1788 elbuf_str%GBUF%FORTH
1789 elbuf_str%GBUF%EINTTH => lbuf%EINTTH
1790 elbuf_str%GBUF%SEQ => lbuf%SEQ
1791 elbuf_str%GBUF%ABURN => lbuf%ABURN
1792 elbuf_str%GBUF%MU => lbuf%MU
1795 ALLOCATE (elbuf_str%GBUF%BFRAC(nel*g_bfrac) ,stat=err)
1796 gbuf%BFRAC(1:nel*g_bfrac) = rbuf(iad+1:iad+nel*g_bfrac)
1797 iad = iad+nel*g_bfrac
1798 ALLOCATE (elbuf_str%GBUF%OFF (nel*g_off) ,stat=err)
1799 gbuf%OFF(1:nel*g_off) = rbuf(iad+1:iad+nel*g_off)
1801 ALLOCATE (elbuf_str%GBUF%EINT(nel*g_eint) ,stat=err)
1802 gbuf%EINT(1:nel*g_eint) = rbuf(iad+1:iad+nel*g_eint)
1803 iad = iad+nel*g_eint
1804 ALLOCATE (elbuf_str%GBUF%EINS(nel*g_eins) ,stat=err)
1805 gbuf%EINS(1:nel*g_eins) = rbuf(iad+1:iad+nel*g_eins)
1806 iad = iad+nel*g_eins
1807 ALLOCATE (elbuf_str%GBUF%RHO (nel*g_rho) ,stat=err)
1808 gbuf%RHO(1:nel*g_rho) = rbuf(iad+1:iad+nel*g_rho)
1810 ALLOCATE (elbuf_str%GBUF%QVIS(nel*g_qvis) ,stat=err)
1811 gbuf%QVIS(1:nel*g_qvis) = rbuf(iad+1:iad+nel*g_qvis)
1812 iad = iad+nel*g_qvis
1813 ALLOCATE (elbuf_str%GBUF%DELTAX(nel*g_deltax),stat=err)
1814 gbuf%DELTAX(1:nel*g_deltax) = rbuf(iad+1:iad+nel*g_deltax)
1815 iad = iad+nel*g_deltax
1816 ALLOCATE (elbuf_str%GBUF%VOL (nel*g_vol) ,stat=err)
1817 gbuf%VOL(1:nel*g_vol) = rbuf(iad+1:iad
1819 ALLOCATE (elbuf_str%GBUF%EPSD(nel*g_epsd) ,stat=err)
1820 gbuf%EPSD(1:nel*g_epsd) = rbuf(iad+1:iad+nel*g_epsd)
1821 iad = iad+nel*g_epsd
1822 ALLOCATE (elbuf_str%GBUF%EPSQ(nel*g_epsq) ,stat=err)
1823 gbuf%EPSQ(1:nel*g_epsq) = rbuf(iad+1:iad+nel*g_epsq)
1824 iad = iad+nel*g_epsq
1825 ALLOCATE (elbuf_str%GBUF%PLA (nel*g_pla) ,stat=err)
1826 gbuf%PLA(1:nel*g_pla) = rbuf(iad+1:iad+nel*g_pla)
1828 ALLOCATE (elbuf_str%GBUF%TEMP(nel*g_temp) ,stat=err)
1829 gbuf%TEMP(1:nel*g_temp) = rbuf(iad+1:iad+nel*g_temp)
1830 iad = iad+nel*g_temp
1831 ALLOCATE (elbuf_str%GBUF%TB(nel*g_tb) ,stat=err)
1832 gbuf%TB(1:nel*g_tb) = rbuf(iad+1:iad+nel*g_tb)
1834 ALLOCATE (elbuf_str%GBUF%RK(nel*g_rk) ,stat=err)
1835 gbuf%RK(1:nel*g_rk) = rbuf(iad+1:iad+nel*g_rk)
1837 ALLOCATE (elbuf_str%GBUF%RE(nel*g_re) ,stat=err)
1838 gbuf%RE(1:nel*g_re) = rbuf(iad+1:iad+nel*g_re)
1840 ALLOCATE (elbuf_str%GBUF%SIG (nel*g_sig) ,stat=err)
1841 gbuf%SIG(1:nel*g_sig) = rbuf(iad+1:iad+nel*g_sig)
1843 ALLOCATE (elbuf_str%GBUF%FORTH (nel*g_forth) ,stat=err)
1844 gbuf%FORTH(1:nel*g_forth) = rbuf(iad+1:iad+nel*g_forth)
1845 iad = iad+nel*g_forth
1846 ALLOCATE (elbuf_str%GBUF%EINTTH
1847 gbuf%EINTTH(1:nel*g_eintth) = rbuf(iad+1:iad+nel*g_eintth)
1848 iad = iad+nel*g_eintth
1849 ALLOCATE (elbuf_str%GBUF%SEQ (nel*g_seq) ,stat=err)
1850 gbuf%SEQ(1:nel*g_seq) = rbuf(iad+1:iad+nel*g_seq)
1852 ALLOCATE (elbuf_str%GBUF%ABURN
1853 gbuf%ABURN(1:nel*g_aburn) = rbuf(iad+1:iad+nel*g_aburn)
1854 iad = iad+nel*g_aburn
1855 ALLOCATE (elbuf_str%GBUF%MU(nel*g_mu) ,stat=err)
1856 gbuf%MU(1:nel*g_mu) = rbuf(iad+1:iad+nel*g_mu)
1861 ALLOCATE (elbuf_str%GBUF%FILL (nel*g_fill) ,stat=err)
1862 gbuf%FILL(1:nel*g_fill) = rbuf(iad+1:iad+nel*g_fill)
1863 iad = iad+nel*g_fill
1868 IF(igtyp == 3 .and. ity == 5)
THEN
1869 ALLOCATE (elbuf_str%GBUF%DMGSCL (nel*g_dmgscl) ,stat=err)
1870 gbuf%DMGSCL(1:nel*g_dmgscl) = rbuf(iad+1:iad+nel*g_dmgscl)
1871 iad = iad+nel*g_dmgscl
1873 ALLOCATE (elbuf_str%GBUF%AREA(nel*g_area) ,stat=err)
1874 gbuf%AREA(1:nel*g_area) = rbuf(iad+1:iad+nel*g_area)
1875 iad = iad+nel*g_area
1876 ALLOCATE (elbuf_str%GBUF%SKEW
1878 iad = iad+nel*g_skew
1879 ALLOCATE (elbuf_str%GBUF%LENGTH(nel*g_length) ,stat
1880 gbuf%LENGTH(1:nel*g_length) = rbuf(iad+1:iad+nel*g_length)
1881 iad = iad+nel*g_length
1882 ALLOCATE (elbuf_str%GBUF%TOTDEPL(nel*g_totdepl) ,stat=err)
1883 gbuf%TOTDEPL(1:nel*g_totdepl) = rbuf(iad+1:iad+nel*g_totdepl)
1884 iad = iad+nel*g_totdepl
1885 ALLOCATE (elbuf_str%GBUF%TOTROT(nel*g_totrot) ,stat=err)
1886 gbuf%TOTROT(1:nel*g_totrot) = rbuf(iad+1:iad+nel*g_totrot)
1887 iad = iad+nel*g_totrot
1888 ALLOCATE (elbuf_str%GBUF%FOREP(nel*g_forep) ,stat=err)
1889 gbuf%FOREP(1:nel*g_forep) = rbuf(iad+1:iad+nel*g_forep)
1890 iad = iad+nel*g_forep
1891 ALLOCATE (elbuf_str%GBUF%MOMEP(nel*g_momep) ,stat=err)
1892 gbuf%MOMEP(1:nel*g_momep) = rbuf(iad+1:iad+nel*g_momep)
1893 iad = iad+nel*g_momep
1894 ALLOCATE (elbuf_str%GBUF%DEP_IN_TENS(nel*g_dep_in_tens) ,stat=err)
1895 gbuf%DEP_IN_TENS(1:nel*g_dep_in_tens) = rbuf(iad+1:iad+nel*g_dep_in_tens)
1896 iad = iad+nel*g_dep_in_tens
1897 ALLOCATE (elbuf_str%GBUF%DEP_IN_COMP(nel*g_dep_in_comp) ,stat=err)
1898 gbuf%DEP_IN_COMP(1:nel*g_dep_in_comp) = rbuf(iad+1:iad+nel*g_dep_in_comp)
1899 iad = iad+nel*g_dep_in_comp
1900 ALLOCATE (elbuf_str%GBUF%ROT_IN_TENS(nel*g_rot_in_tens) ,stat=err)
1901 gbuf%ROT_IN_TENS(1:nel*g_rot_in_tens) = rbuf(iad+1:iad+nel*g_rot_in_tens)
1902 iad = iad+nel*g_rot_in_tens
1903 ALLOCATE (elbuf_str%GBUF%ROT_IN_COMP(nel*g_rot_in_comp) ,stat=err)
1904 gbuf%ROT_IN_COMP(1:nel*g_rot_in_comp) = rbuf(iad+1:iad+nel*g_rot_in_comp)
1905 iad = iad+nel*g_rot_in_comp
1906 ALLOCATE (elbuf_str%GBUF%POSX(nel*g_posx) ,stat=err)
1907 gbuf%POSX(1:nel*g_posx) = rbuf(iad+1:iad+nel*g_posx)
1908 iad = iad+nel*g_posx
1909 ALLOCATE (elbuf_str%GBUF%POSY(nel*g_posy) ,stat=err)
1910 gbuf%POSY(1:nel*g_posy) = rbuf(iad+1:iad+nel*g_posy)
1911 iad = iad+nel*g_posy
1912 ALLOCATE (elbuf_str%GBUF%POSZ(nel*g_posz) ,stat=err)
1913 gbuf%POSZ(1:nel*g_posz) = rbuf(iad+1:iad+nel*g_posz)
1914 iad = iad+nel*g_posz
1915 ALLOCATE (elbuf_str%GBUF%POSXX(nel*g_posxx) ,stat=err)
1916 gbuf%POSXX(1:nel*g_posxx) = rbuf(iad+1:iad+nel*g_posxx)
1917 iad = iad+nel*g_posxx
1918 ALLOCATE (elbuf_str%GBUF%POSYY(nel*g_posyy) ,stat=err)
1919 gbuf%POSYY(1:nel*g_posyy) = rbuf(iad+1:iad+nel*g_posyy)
1920 iad = iad+nel*g_posyy
1921 ALLOCATE (elbuf_str%GBUF%POSZZ(nel*g_poszz) ,stat=err)
1922 gbuf%POSZZ(1:nel*g_poszz) = rbuf(iad+1:iad+nel*g_poszz)
1923 iad = iad+nel*g_poszz
1924 ALLOCATE (elbuf_str%GBUF%YIELD(nel*g_yield) ,stat=err)
1925 gbuf%YIELD(1:nel*g_yield) = rbuf(iad+1:iad+nel*g_yield)
1926 iad = iad+nel*g_yield
1927 ALLOCATE (elbuf_str%GBUF%LENGTH_ERR(nel*g_length_err) ,stat=err)
1928 gbuf%LENGTH_ERR(1:nel*g_length_err) = rbuf(iad+1:iad+nel*g_length_err)
1929 iad = iad+nel*g_length_err
1930 ALLOCATE (elbuf_str%GBUF%DV(nel*g_dv) ,stat=err)
1931 gbuf%DV(1:nel*g_dv) = rbuf(iad+1:iad+nel*g_dv)
1933 ALLOCATE (elbuf_str%GBUF%DFS(nel*g_dfs) ,stat=err)
1934 gbuf%DFS(1:nel*g_dfs) = rbuf(iad+1:iad+nel*g_dfs)
1936 ALLOCATE (elbuf_str%GBUF%SKEW_ERR(nel*g_skew_err) ,stat=err)
1937 gbuf%SKEW_ERR(1:nel*g_skew_err) = rbuf(iad+1:iad+nel*g_skew_err)
1938 iad = iad+nel*g_skew_err
1939 ALLOCATE (elbuf_str%GBUF%E6(nel*g_e6) ,stat=err)
1940 gbuf%E6(1:nel*g_e6) = rbuf(iad+1:iad+nel*g_e6)
1942 ALLOCATE (elbuf_str%GBUF%RUPTCRIT(nel*g_ruptcrit) ,stat=err)
1943 gbuf%RUPTCRIT(1:nel*g_ruptcrit) = rbuf(iad+1:iad+nel*g_ruptcrit)
1944 iad = iad+nel*g_ruptcrit
1945 ALLOCATE (elbuf_str%GBUF%MASS(nel*g_mass) ,stat=err)
1946 gbuf%MASS(1:nel*g_mass) = rbuf(iad+1:iad+nel*g_mass)
1947 iad = iad+nel*g_mass
1948 ALLOCATE (elbuf_str%GBUF%V_REPCVT(nel*g_v_repcvt) ,stat=err)
1949 gbuf%V_REPCVT(1:nel*g_v_repcvt) = rbuf(iad+1:iad+nel*g_v_repcvt)
1950 iad = iad+nel*g_v_repcvt
1951 ALLOCATE (elbuf_str%GBUF%VR_REPCVT(nel*g_vr_repcvt) ,stat=err)
1952 gbuf%VR_REPCVT(1:nel*g_vr_repcvt) = rbuf(iad+1:iad+nel*g_vr_repcvt)
1953 iad = iad+nel*g_vr_repcvt
1954 ALLOCATE (elbuf_str%GBUF%VAR(nel*g_nuvar) ,stat=err)
1955 gbuf%VAR(1:nel*g_nuvar) = rbuf(iad+1:iad+nel*g_nuvar)
1956 iad = iad+nel*g_nuvar
1957 ALLOCATE (elbuf_str%GBUF%VARN(nel*g_nuvarn) ,stat=err)
1958 gbuf%VARN(1:nel*g_nuvarn) = rbuf(iad+1:iad+nel*g_nuvarn)
1959 iad = iad+nel*g_nuvarn
1960 ALLOCATE (elbuf_str%GBUF%DEFINI(nel*g_defini) ,stat=err)
1961 gbuf%DEFINI(1:nel*g_defini) = rbuf(iad+1:iad+nel*g_defini)
1962 iad = iad+nel*g_defini
1963 ALLOCATE (elbuf_str%GBUF%FORINI(nel*g_forini)
1964 gbuf%FORINI(1:nel*g_forini) = rbuf(iad
1965 iad = iad+nel*g_forini
1966 ALLOCATE (elbuf_str%GBUF%INIFRIC(nel*g_inifric) ,stat=err)
1967 gbuf%INIFRIC(1:nel*g_inifric) = rbuf(iad+1
1968 iad = iad+nel*g_inifric
1969 ALLOCATE (elbuf_str%GBUF%STRHG(nel*g_strhg) ,stat
1970 gbuf%STRHG(1:nel*g_strhg) = rbuf(iad+1:iad+nel*g_strhg)
1971 iad = iad+nel*g_strhg
1972 ALLOCATE (elbuf_str%GBUF%DT_PITER(nel*g_dt_piter) ,stat=err)
1974 IF(idt1tet10 > 1)
THEN
1975 IF(g_dt_piter_old==0)
THEN
1976 gbuf%DT_PITER(1:nel*g_dt_piter) = zero
1978 gbuf%DT_PITER(1:nel*g_dt_piter) = rbuf(iad+1:iad+nel*g_dt_piter)
1982 iad = iad+nel*g_dt_piter_old
1985 elbuf_str%GBUF%TEMPG => gbuf%TEMP
1988 ALLOCATE (elbuf_str%GBUF%SKEW_ID(nel*g_skew_id) ,stat=err)
1989 gbuf%SKEW_ID(1:nel*g_skew_id) = nint(rbuf(iad+1:iad+nel*g_skew_id))
1990 iad = iad+nel*g_skew_id
1991 ALLOCATE (elbuf_str%GBUF%YIELD_IN_COMP(nel*g_yield_in_comp),stat=err)
1992 gbuf%YIELD_IN_COMP(1:nel*g_yield_in_comp) = nint(rbuf(iad+1:iad+nel*g_yield_in_comp))
1993 iad = iad+nel*g_yield_in_comp
1994 ALLOCATE (elbuf_str%GBUF%XXOLD_IN_COMP(nel*g_xxold_in_comp),stat=err)
1995 gbuf%XXOLD_IN_COMP(1:nel*g_xxold_in_comp) = nint(rbuf(iad+1:iad+nel*g_xxold_in_comp))
1996 iad = iad+nel*g_xxold_in_comp
2001 IF (igtyp == 3 .and. g_noff > 0)
THEN
2002 ALLOCATE(elbuf_str%GBUF%FAIL(1) ,stat=err)
2003 gbuf%FAIL(1)%ILAWF = nint(rbuf(iad+1))
2005 gbuf%FAIL(1)%IDFAIL = nint(rbuf(iad+1))
2007 gbuf%FAIL(1)%NVAR = nint(rbuf(iad+1))
2009 gbuf%FAIL(1)%LF_DAM = nint(rbuf(iad+1))
2011 gbuf%FAIL(1)%LF_DAMMX = nint(rbuf(iad+1))
2013 gbuf%FAIL(1)%LF_DAMINI = nint(rbuf(iad+1))
2015 gbuf%FAIL(1)%LF_TDEL = nint(rbuf(iad+1))
2019 gbuf%FAIL(1)%LF_OFF = nint(rbuf(iad+1))
2022 ALLOCATE(elbuf_str%GBUF%FAIL(1)%VAR(gbuf%FAIL(1)%NVAR*nel) ,stat=err)
2024 iad = iad + nel*gbuf%FAIL(1)%NVAR
2025 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAM(gbuf%FAIL(1)%LF_DAM*nel) ,stat=err)
2026 gbuf%FAIL(1)%DAM(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_DAM)
2027 iad = iad + nel*gbuf%FAIL(1)%LF_DAM
2028 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAMMX(gbuf%FAIL(1)%LF_DAMMX*nel
2029 gbuf%FAIL(1)%DAMMX(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_DAMMX)
2030 iad = iad + nel*gbuf%FAIL(1)%LF_DAMMX
2031 ALLOCATE(elbuf_str%GBUF%FAIL(1)%DAMINI(gbuf%FAIL(1)%LF_DAMINI*nel) ,stat=err)
2032 gbuf%FAIL(1)%DAMINI(:) =
2033 iad = iad + nel*gbuf%FAIL(1)%LF_DAMINI
2034 ALLOCATE(elbuf_str%GBUF%FAIL(1)%TDEL(gbuf%FAIL(1)%LF_TDEL*nel) ,stat=err)
2035 gbuf%FAIL(1)%TDEL(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_TDEL)
2036 iad = iad + nel*gbuf%FAIL(
2037 ALLOCATE(elbuf_str%GBUF%FAIL(1)%INDX(gbuf%FAIL(1)%LF_INDX*nel) ,stat=err)
2038 gbuf%FAIL(1)%INDX(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_INDX)
2039 iad = iad + nel*gbuf%FAIL(1)%LF_INDX
2040 ALLOCATE(elbuf_str%GBUF%FAIL(1)%OFF(gbuf%FAIL(1)%LF_OFF*nel) ,stat=err)
2041 gbuf%FAIL(1)%OFF(:) = rbuf(iad+1:iad+nel*gbuf%FAIL(1)%LF_OFF)
2042 iad = iad + nel*gbuf%FAIL(1)%LF_OFF
2047 ALLOCATE (elbuf_str%GBUF%SLIPRING_ID(nel*g_slipring_id) ,stat=err)
2048 gbuf%SLIPRING_ID(1:nel*g_slipring_id) = nint(rbuf(iad+1:iad+nel*g_slipring_id))
2049 iad = iad+nel*g_slipring_id
2050 ALLOCATE (elbuf_str%GBUF%SLIPRING_FRAM_ID(nel*g_slipring_fram_id
2051 gbuf%SLIPRING_FRAM_ID(1:nel*g_slipring_fram_id) = nint(rbuf(iad+1:iad+nel*g_slipring_fram_id))
2052 iad = iad+nel*g_slipring_fram_id
2053 ALLOCATE (elbuf_str%GBUF%SLIPRING_STRAND(nel*g_slipring_strand) ,stat=err)
2054 gbuf%SLIPRING_STRAND(1:nel*g_slipring_strand) = nint(rbuf(iad+1:iad+nel
2055 iad = iad+nel*g_slipring_strand
2056 ALLOCATE (elbuf_str%GBUF%RETRACTOR_ID(nel*g_retractor_id) ,stat=err)
2057 gbuf%RETRACTOR_ID(1:nel*g_retractor_id) = nint(rbuf(iad+1:iad+nel*g_retractor_id))
2058 iad = iad+nel*g_retractor_id
2059 ALLOCATE (elbuf_str%GBUF%RINGSLIP(nel*g_ringslip) ,stat=err)
2060 gbuf%RINGSLIP(1:nel*g_ringslip) = rbuf(iad+1:iad+nel*g_ringslip)
2061 iad = iad+nel*g_ringslip
2062 ALLOCATE (elbuf_str%GBUF%ADD_NODE(nel*g_add_node) ,stat=err)
2063 gbuf%ADD_NODE(1:nel*g_add_node) = nint(rbuf(iad+1:iad+nel*g_add_node))
2064 iad = iad+nel*g_add_node
2065 ALLOCATE (elbuf_str%GBUF%UPDATE(nel*g_update) ,stat=err)
2066 gbuf%UPDATE(1:nel*g_update) = nint(rbuf(iad+1:iad+nel*g_update))
2067 iad = iad+nel*g_update
2068 ALLOCATE (elbuf_str%GBUF%FRAM_FACTOR(nel*g_fram_factor
2069 gbuf%FRAM_FACTOR(1:nel*g_fram_factor) = rbuf(iad+1:iad+nel*g_fram_factor)
2070 iad = iad+nel*g_fram_factor
2071 ALLOCATE (elbuf_str%GBUF%INTVAR(nel*g_intvar) ,stat=err)
2072 gbuf%INTVAR(1:nel*g_intvar) = rbuf(iad+1:iad+nel*g_intvar)
2073 iad = iad+nel*g_intvar
2074 ALLOCATE (elbuf_str%GBUF%IDT_TSH(nel*g_idt_tsh) ,stat=err)
2075 gbuf%IDT_TSH(1:nel*g_idt_tsh) = nint(rbuf(iad+1:iad+nel*g_idt_tsh))
2076 iad = iad+nel*g_idt_tsh
2078 ALLOCATE (elbuf_str%GBUF%TEMPG(nel*gbuf%G_TEMPG),stat=err)
2079 gbuf%TEMPG(1:nel*gbuf%G_TEMPG)=rbuf(iad+1:iad+nel*gbuf%G_TEMPG)
2080 iad = iad + nel*gbuf%G_TEMPG
2085 ALLOCATE (elbuf_str%GBUF%TM_YIELD(nel*g_tm_yield) ,stat=err)
2086 gbuf%TM_YIELD(1:nel*g_tm_yield) = rbuf(iad+1:iad+nel*g_tm_yield)
2087 iad = iad+nel*g_tm_yield
2088 ALLOCATE (elbuf_str%GBUF%TM_SEQ(nel*g_tm_seq) ,stat=err)
2089 gbuf%TM_SEQ(1:nel*g_tm_seq) = rbuf(iad+1:iad+nel*g_tm_seq)
2090 iad = iad+nel*g_tm_seq
2091 ALLOCATE (elbuf_str%GBUF%TM_EINT(nel*g_tm_eint
2092 gbuf%TM_EINT(1:nel*g_tm_eint) = rbuf(iad+1:iad
2093 iad = iad+nel*g_tm_eint
2094 ALLOCATE (elbuf_str%GBUF%TM_DMG(nel*g_tm_dmg) ,stat=err)
2095 gbuf%TM_DMG(1:nel*g_tm_dmg) = rbuf(iad+1:iad+nel*g_tm_dmg)
2096 iad = iad+nel*g_tm_dmg
2097 ALLOCATE (elbuf_str%GBUF%TM_SIG1(nel*g_tm_sig) ,stat=err)
2098 gbuf%TM_SIG1(1:nel*g_tm_sig) = rbuf(iad+1:iad+nel*g_tm_sig)
2099 iad = iad+nel*g_tm_sig
2100 ALLOCATE (elbuf_str%GBUF%TM_SIG3(nel*g_tm_sig) ,stat=err)
2101 gbuf%TM_SIG3(1:nel*g_tm_sig) = rbuf(iad+1:iad+nel*g_tm_sig)
2102 iad = iad+nel*g_tm_sig
2103 ALLOCATE (elbuf_str%GBUF%TM_STRA1(nel*g_tm_stra) ,stat=err)
2104 gbuf%TM_STRA1(1:nel*g_tm_stra) = rbuf(iad+1:iad+nel*g_tm_stra)
2105 iad = iad+nel*g_tm_stra
2106 ALLOCATE (elbuf_str%GBUF%TM_STRA3(nel*g_tm_stra) ,stat=err)
2107 gbuf%TM_STRA3(1:nel*g_tm_stra) = rbuf(iad+1:iad+nel*g_tm_stra)
2108 iad = iad+nel*g_tm_stra
2110 IF (g_tm_sig>0)
ALLOCATE (elbuf_str%GBUF%TM_PSIG(nel*2),stat=err)
2111 IF (g_tm_stra>0)
ALLOCATE (elbuf_str%GBUF%TM_PSTRA(nel*2),stat=err)
2114 IF (
ALLOCATED(rbuf))
DEALLOCATE (rbuf)
2115 sdp_rbuf = sdp_rbuf + gbuf%G_SMSTR * nel
2116 ALLOCATE (elbuf_str%GBUF%SMSTR(nel*g_smstr) ,stat=err)
2117 IF (sdp_rbuf > 0)
THEN
2118 ALLOCATE (dp_rbuf(sdp_rbuf))
2124 IF (l_vol0dp > 0)
THEN
2125 DO il = 1,elbuf_str%NLAY
2127 bufly => elbuf_str%BUFLY(il)
2128 IF (igtyp == 51 .OR.
THEN
2129 nptt = elbuf_str%BUFLY(il)%NPTT
2131 nptt = elbuf_str%NPTT
2136 lbuf => elbuf_str%BUFLY(il)%LBUF(ir,is,it)
2137 lbuf%VOL0DP(1:nel*l_vol0dp) = dp_rbuf(iadp+1:iadp+nel*l_vol0dp)
2138 iadp = iadp+nel*l_vol0dp
2145 gbuf%SMSTR(1:nel*g_smstr) = dp_rbuf(iadp+1:iadp+nel*g_smstr)