implemented single file saving with downsampling, field manipulation using statistics

This commit is contained in:
Michael Krayer 2021-02-12 02:02:19 +01:00
parent f92bd36a76
commit 43a3ab56f6
1 changed files with 143 additions and 6 deletions

View File

@ -621,6 +621,108 @@ class ibmppp:
fid.write('</Xdmf> \n') fid.write('</Xdmf> \n')
fid.close() fid.close()
def saveFieldSingleFile(self,filename,key,append=False,downsample=False,xdmf=False,verbose=False):
'''Writes field data into a single h5 file'''
# Determine filename of output file
file_out = filename+'.h5'
# Compute global nx,ny,nz
nxg = self.__procGrid[key][1][-1]-self.__procGrid[key][0][0]+1
nyg = self.__procGrid[key][3][-1]-self.__procGrid[key][2][0]+1
nzg = self.__procGrid[key][5][-1]-self.__procGrid[key][4][0]+1
# Adjust these values if downsampling is requested
if downsample:
nxg = np.ceil(nxg/2).astype('int')
nyg = np.ceil(nyg/2).astype('int')
nzg = np.ceil(nzg/2).astype('int')
# Setup the correct slicing
if not downsample:
ishift = 0
jshift = 0
kshift = 0
ifb = self.__localChunkBounds[key][0]
jfb = self.__localChunkBounds[key][2]
kfb = self.__localChunkBounds[key][4]
nxl = self.__localChunkSize[key][0]
nyl = self.__localChunkSize[key][1]
nzl = self.__localChunkSize[key][2]
step = 1
else:
ishift = 1 if self.__localChunkBounds[key][0]%2==0 else 0
jshift = 1 if self.__localChunkBounds[key][2]%2==0 else 0
kshift = 1 if self.__localChunkBounds[key][4]%2==0 else 0
ifb = np.ceil((self.__localChunkBounds[key][0]+ishift)/2).astype('int')
jfb = np.ceil((self.__localChunkBounds[key][2]+jshift)/2).astype('int')
kfb = np.ceil((self.__localChunkBounds[key][4]+kshift)/2).astype('int')
nxl = np.ceil(self.__localChunkSize[key][0]/2).astype('int')
nyl = np.ceil(self.__localChunkSize[key][1]/2).astype('int')
nzl = np.ceil(self.__localChunkSize[key][2]/2).astype('int')
step = 2
isl_ch = slice(self.__nghx+ishift,self.__nghx+self.__localChunkSize[key][0],step)
jsl_ch = slice(self.__nghy+jshift,self.__nghy+self.__localChunkSize[key][1],step)
ksl_ch = slice(self.__nghz+kshift,self.__nghz+self.__localChunkSize[key][2],step)
isl_h5 = slice(ifb-1,ifb+nxl-1)
jsl_h5 = slice(jfb-1,jfb+nyl-1)
ksl_h5 = slice(kfb-1,kfb+nzl-1)
# If append flag is set, we add a dataset to the existing file
if append:
ioflag = 'a'
else:
ioflag = 'w'
# I avoid parallel IO because h5py is not available everywhere with
# MPI support. So rank 0 does all the writing and we communicate manually.
if self.__rank==0:
if verbose:
print('[saveFieldSingleFile] rank {} / {} writing to file'.format(self.__rank,self.__nproc-1),end='\r')
fid = h5py.File(file_out,ioflag)
gid = fid.create_group('/'+key)
gid.create_dataset('nx',data=nxg)
gid.create_dataset('ny',data=nyg)
gid.create_dataset('nz',data=nzg)
did = gid.create_dataset('data',(nzg,nyg,nxg),dtype=self.__precision)
did[ksl_h5,jsl_h5,isl_h5] = np.transpose(self.field[key][isl_ch,jsl_ch,ksl_ch])
fid.close()
self.__comm.send(True, dest=self.__rank+1, tag=1)
else:
msg = self.__comm.recv(source=self.__rank-1,tag=1)
if verbose:
print('[saveFieldSingleFile] rank {} / {} writing to file'.format(self.__rank,self.__nproc-1),end='\r')
fid = h5py.File(file_out,'a')
fid['/'+key+'/data'][ksl_h5,jsl_h5,isl_h5] = np.transpose(self.field[key][isl_ch,jsl_ch,ksl_ch])
fid.close()
if self.__rank!=self.__nproc-1:
self.__comm.send(True, dest=self.__rank+1, tag=1)
# Create an XDMF file for the field
if xdmf and self.__rank==0:
if verbose:
print('[saveFieldSingleFile] generating XDMF',end='\r')
# Construct XDMF filename
file_xdmf = filename+'_'+key+'.xdmf'
filename_h5 = os.path.basename(file_out)
# Open file and write header
fid = open(file_xdmf,'w')
fid.write('<?xml version="1.0"?>\n')
fid.write('<Xdmf Version="2.0">\n')
fid.write(' <Domain>\n')
fid.write(' <Grid Name="{}">\n'.format(key))
#fid.write(' <Time Value="%.2f" />\n',params.general.simtime)
dx = self.__dx[key]*step
x0 = self.grid[key][0][0]
y0 = self.grid[key][1][0]
z0 = self.grid[key][2][0]
fid.write(' <Topology TopologyType="3DCORECTMesh" NumberOfElements="{:d} {:d} {:d}"/>\n'.format(nzg,nyg,nxg))
fid.write(' <Geometry Origin="" Type="ORIGIN_DXDYDZ">\n')
fid.write(' <DataItem DataType="Float" Dimensions="3" Format="XML" Precision="8"> {:12f} {:12f} {:12f}</DataItem>\n'.format(z0,y0,x0))
fid.write(' <DataItem DataType="Float" Dimensions="3" Format="XML" Precision="8"> {:12f} {:12f} {:12f}</DataItem>\n'.format(dx,dx,dx))
fid.write(' </Geometry>\n')
fid.write(' <Attribute Name="{}" Dimensions="{:d} {:d} {:d}">\n'.format(key,nzg,nyg,nxg))
fid.write(' <DataItem Format="HDF5" DataType="Float" Dimensions="{:d} {:d} {:d}">\n'.format(nzg,nyg,nxg))
fid.write(' {}:/{}/data </DataItem>\n'.format(filename_h5,key))
fid.write(' </Attribute>\n')
fid.write(' </Grid>\n')
fid.write(' </Domain>\n')
fid.write('</Xdmf> \n')
fid.close()
def saveStatistics(self,filename): def saveStatistics(self,filename):
'''Writes all gathered statistics to a h5 file.''' '''Writes all gathered statistics to a h5 file.'''
if self.__rank==0: if self.__rank==0:
@ -1201,17 +1303,52 @@ class ibmppp:
'''Subtract mean from field.''' '''Subtract mean from field.'''
# Check if this operation can be executed # Check if this operation can be executed
if self.__rank==0: if self.__rank==0:
if not key in spatialMean or spatialMean[key] is None: if not key in self.spatialMean or self.spatialMean[key] is None:
raise ValueError('Statistics not available') raise ValueError('Statistics not available')
# Broadcast statistics # Broadcast statistics
self.spatialMean[key] = self.__comm.bcast(self.spatialMean[key],root=0) self.spatialMean[key] = self.__comm.bcast(self.spatialMean[key],root=0)
# Get dimensions # Get dimensions
dims = self.spatialMean[key].shape dims = self.spatialMean[key].shape
# Slice chunk # Slice statistics for each processor
if dims[0]==1: ib,ie = (0,1) if dims[0]==1 else (self.__localChunkBounds[key][0]-1,self.__localChunkBounds[key][1])
slx = slice(0,1) jb,je = (0,1) if dims[1]==1 else (self.__localChunkBounds[key][2]-1,self.__localChunkBounds[key][3])
else: kb,ke = (0,1) if dims[2]==1 else (self.__localChunkBounds[key][4]-1,self.__localChunkBounds[key][5])
slx = slice(self.ib) # Subtract mean from field
self.field[key][
self.__nghx:-self.__nghx,
self.__nghy:-self.__nghy,
self.__nghz:-self.__nghz] -= self.spatialMean[key][ib:ie,jb:je,kb:ke]
# Exchange ghost cells and set boundary conditions
self.exchangeGhostCells(key)
self.imposeBoundaryConditions(key)
def normalizeByStandardDeviation(self,key):
'''Normalize field by its standard deviation.'''
# Check if this operation can be executed
if self.__rank==0:
if (not key in self.spatialMean or self.spatialMean[key] is None or
not key in self.spatialMsqr or self.spatialMsqr[key] is None):
raise ValueError('Statistics not available')
# Broadcast statistics
self.spatialMean[key] = self.__comm.bcast(self.spatialMean[key],root=0)
self.spatialMsqr[key] = self.__comm.bcast(self.spatialMsqr[key],root=0)
# Compute standard deviation
assert(self.spatialMean[key].shape==self.spatialMsqr[key].shape)
spatialStdDev = np.sqrt(self.spatialMsqr[key]-np.square(self.spatialMean[key]))
# Get dimensions
dims = spatialStdDev.shape
# Slice statistics for each processor
ib,ie = (0,1) if dims[0]==1 else (self.__localChunkBounds[key][0]-1,self.__localChunkBounds[key][1])
jb,je = (0,1) if dims[1]==1 else (self.__localChunkBounds[key][2]-1,self.__localChunkBounds[key][3])
kb,ke = (0,1) if dims[2]==1 else (self.__localChunkBounds[key][4]-1,self.__localChunkBounds[key][5])
# Subtract mean from field
self.field[key][
self.__nghx:-self.__nghx,
self.__nghy:-self.__nghy,
self.__nghz:-self.__nghz] /= spatialStdDev[ib:ie,jb:je,kb:ke]
# Exchange ghost cells and set boundary conditions
self.exchangeGhostCells(key)
self.imposeBoundaryConditions(key)
def collocateVelocity(self): def collocateVelocity(self):
'''Moves velocity components onto the pressure grid.''' '''Moves velocity components onto the pressure grid.'''