added particle saving routine
This commit is contained in:
parent
9f2262ca7a
commit
277e5cc037
|
|
@ -74,6 +74,10 @@ class ibmppp:
|
|||
'p': [None,None,None],
|
||||
's1': [None,None,None]
|
||||
} # (nxl,nyl,nzl) size of local chunk (without ghost cells)
|
||||
# Particles related quantities
|
||||
self.__npartg, self.__npartl, self.__ncol = None, None, None
|
||||
self.particles = None
|
||||
self.col = None
|
||||
# Evaluate flow type and set boundary conditions
|
||||
self.__setBoundaryConditions(flowType)
|
||||
# Prepare the processor layout
|
||||
|
|
@ -208,7 +212,13 @@ class ibmppp:
|
|||
pp,col = ucf.readParticles(file_input,step=1)
|
||||
# Remove time dimension, because we are dealing with a single time step exclusively
|
||||
pp = pp[:,:,0]
|
||||
# Duplicate particles over periodic boundaries
|
||||
self.__ncol = pp.shape[0]
|
||||
self.__npartg = pp.shape[1]
|
||||
# Broadcast total number of particles and columnes
|
||||
self.__ncol,self.__npartg = self.__comm.bcast((self.__ncol,self.__npartg),root=0)
|
||||
# Distribute particles among processors
|
||||
if self.__rank==0:
|
||||
# Duplicate particles over periodic boundaries and assign them a negative ID
|
||||
dirkey = ['x','y','z']
|
||||
isPeriodic = (self.__xperiodic,self.__yperiodic,self.__zperiodic)
|
||||
for idir in range(0,3):
|
||||
|
|
@ -221,11 +231,13 @@ class ibmppp:
|
|||
li_part = (pp[col[dirkey[idir]],:]<=bdmin+2.0*pp[col['r'],:])
|
||||
pptmp = pp[:,li_part].copy()
|
||||
pptmp[col[dirkey[idir]],:] += bdmax-bdmin
|
||||
pptmp[col['id'],:] = -np.abs(pptmp[col['id'],:])
|
||||
pp = np.concatenate((pp,pptmp),axis=1)
|
||||
# Shift and append particles which are within one diameter from the upper bound
|
||||
li_part = (pp[col[dirkey[idir]],:]>=bdmax-2.0*pp[col['r'],:])
|
||||
pptmp = pp[:,li_part].copy()
|
||||
pptmp[col[dirkey[idir]],:] -= bdmax-bdmin
|
||||
pptmp[col['id'],:] = -np.abs(pptmp[col['id'],:])
|
||||
pp = np.concatenate((pp,pptmp),axis=1)
|
||||
# Go through individual chunks and distribute particles
|
||||
for ip in range(0,self.__nxp):
|
||||
|
|
@ -255,8 +267,10 @@ class ibmppp:
|
|||
buffsize = 32*1024*1024
|
||||
reqrecv = self.__comm.irecv(buffsize,source=0)
|
||||
(self.particles,self.col) = reqrecv.wait()
|
||||
# Compute local number of particles (including ghost particles)
|
||||
self.__npartl = self.particles.shape[1]
|
||||
# Wait for everyone to finish
|
||||
if self.__rank==0:
|
||||
# Wait for everyone to finish
|
||||
MPI.Request.waitall(reqsend)
|
||||
|
||||
def loadField(self,key):
|
||||
|
|
@ -568,11 +582,92 @@ class ibmppp:
|
|||
fid.write('</Xdmf> \n')
|
||||
fid.close()
|
||||
|
||||
def saveParticles(self,filename,xdmf=False):
|
||||
'''Writes particles to h5 file.'''
|
||||
# Select duplicate particles (they have a negative ID)
|
||||
li_part = (self.particles[self.col['id'],:]>0)
|
||||
# Construct sendbuffer
|
||||
sendbuf = self.particles[:,li_part].flatten('F')
|
||||
recvbuf = None
|
||||
# Rank 0 gathers the length of each incoming array
|
||||
sendcounts = np.array(self.__comm.gather(len(sendbuf),root=0))
|
||||
if self.__rank==0:
|
||||
recvbuf = np.empty(sum(sendcounts))
|
||||
# Communicate data
|
||||
self.__comm.Gatherv(sendbuf=sendbuf,recvbuf=(recvbuf, sendcounts),root=0)
|
||||
# Rank 0 got all particles now, but some duplicates still exist:
|
||||
# Ghost particles have been removed, but particles affecting multiple processors not.
|
||||
# Remove them by duplicate IDs.
|
||||
if self.__rank==0:
|
||||
# Reshape to unknown number of particles
|
||||
pp = recvbuf.reshape((self.__ncol,-1),order='F')
|
||||
# Get duplicate IDs
|
||||
idx = np.unique(pp[self.col['id'],:],return_index=True)[1]
|
||||
# Slice array
|
||||
pp = pp[:,idx]
|
||||
# Validate shape
|
||||
if pp.shape!=(self.__ncol,self.__npartg):
|
||||
raise ValueError('Expected to have {} particles with {} columns, but got {} particles with {} columns.'.format(self.__npartg,self.__ncol,pp.shape[1],pp.shape[0]))
|
||||
# Rank 0 got all particles now: Write data to HDF5 file
|
||||
if self.__rank==0:
|
||||
# Construct file name
|
||||
file_h5 = filename+'.h5'
|
||||
# Open file and write general information
|
||||
fid = h5py.File(file_h5,'w')
|
||||
fid.create_dataset('npart',data=self.__npartg)
|
||||
fid.create_dataset('ncol',data=self.__ncol)
|
||||
# Loop through quantities and write one dataset per quantity
|
||||
for key in self.col:
|
||||
did = fid.create_dataset(key,data=pp[self.col[key],:].flatten('K'))
|
||||
# Close h5 file
|
||||
fid.close()
|
||||
# If XDMF is to be written, do just that
|
||||
if xdmf:
|
||||
# Construct XDMF filename
|
||||
file_xdmf = filename+'.xdmf'
|
||||
# Open file
|
||||
fid = open(file_xdmf,'w')
|
||||
# Write header
|
||||
fid.write('<?xml version="1.0"?>\n')
|
||||
fid.write('<Xdmf Version="2.0">\n')
|
||||
fid.write(' <Domain>\n')
|
||||
# Write grid, i.e. particle positions
|
||||
fid.write(' <Grid Name="particles">\n');
|
||||
fid.write(' <Topology TopologyType="Polyvertex" NumberOfElements="{:d}" />\n'.format(self.__npartg))
|
||||
fid.write(' <Geometry GeometryType="X_Y_Z">\n');
|
||||
fid.write(' <DataItem Format="HDF5" Dimensions="{:d}">\n'.format(self.__npartg))
|
||||
fid.write(' {}:/x\n'.format(os.path.basename(file_h5)))
|
||||
fid.write(' </DataItem>\n')
|
||||
fid.write(' <DataItem Format="HDF5" Dimensions="{:d}">\n'.format(self.__npartg))
|
||||
fid.write(' {}:/y\n'.format(os.path.basename(file_h5)))
|
||||
fid.write(' </DataItem>\n')
|
||||
fid.write(' <DataItem Format="HDF5" Dimensions="{:d}">\n'.format(self.__npartg))
|
||||
fid.write(' {}:/z\n'.format(os.path.basename(file_h5)))
|
||||
fid.write(' </DataItem>\n')
|
||||
fid.write(' </Geometry>\n')
|
||||
# Write data
|
||||
for key in self.col:
|
||||
if key in ('x','y','z'):
|
||||
continue
|
||||
fid.write(' <Attribute Name="{}" Center="Node">\n'.format(key))
|
||||
fid.write(' <DataItem Format="HDF5" Dimensions="{:d}">\n'.format(self.__npartg))
|
||||
fid.write(' {}:/{}\n'.format(os.path.basename(file_h5),key))
|
||||
fid.write(' </DataItem>\n')
|
||||
fid.write(' </Attribute>\n')
|
||||
# Write footer
|
||||
fid.write(' </Grid>\n')
|
||||
fid.write(' </Domain>\n')
|
||||
fid.write('</Xdmf>\n')
|
||||
# Close file
|
||||
fid.close()
|
||||
|
||||
def saveGrid(self,filename):
|
||||
'''Writes the full grid to a h5 file.'''
|
||||
if self.__rank==0:
|
||||
# Construct file name
|
||||
file_h5 = filename+'.h5'
|
||||
# Open the file and write data
|
||||
fid = h5py.File(filename,'w')
|
||||
fid = h5py.File(file_h5,'w')
|
||||
for key in self.grid:
|
||||
gid = fid.create_group('/'+key)
|
||||
gid.create_dataset('x',data=self.grid[key][0])
|
||||
|
|
@ -584,8 +679,10 @@ class ibmppp:
|
|||
def saveProcGrid(self,filename,original=False):
|
||||
'''Writes the full grid to a h5 file.'''
|
||||
if self.__rank==0:
|
||||
# Construct file name
|
||||
file_h5 = filename+'.h5'
|
||||
# Open the file and write data
|
||||
fid = h5py.File(filename,'w')
|
||||
fid = h5py.File(file_h5,'w')
|
||||
for key in self.grid:
|
||||
gid = fid.create_group('/'+key)
|
||||
if original:
|
||||
|
|
@ -840,10 +937,8 @@ class ibmppp:
|
|||
|
||||
def maskParticles(self,key,reconstruct=False,cval=np.nan,padding=0.0):
|
||||
'''Fills grid points which lie inside of solid phase with values.'''
|
||||
# Get number of particles in this processeor
|
||||
npart = self.particles.shape[1]
|
||||
# Loop through local particles
|
||||
for ipart in range(0,npart):
|
||||
for ipart in range(0,self.__npartl):
|
||||
# Slice a box from the field around the particle
|
||||
xp,yp,zp,rp = (self.particles[self.col['x'],ipart],
|
||||
self.particles[self.col['y'],ipart],
|
||||
|
|
|
|||
Loading…
Reference in New Issue