verbose mode for some functions

This commit is contained in:
Michael Krayer 2021-06-15 15:55:45 +02:00
parent cddc34722a
commit 1ca85cd7c2
1 changed files with 21 additions and 12 deletions

View File

@ -128,13 +128,13 @@ class PPP:
proc_grid_ext,nxp_ext,nyp_ext,nzp_ext,
nghbr,field,symmetries)
def load_field(self,key,io_limit=None,dtype=np.float64,verbose=False):
def load_field(self,key,io_limit=None,dtype=np.float64,verbose=True):
'''Loads the required chunks from file'''
from .field import Field3d
import numpy as np
# Verbose output
if verbose and self.rank==0:
print('[load_field] loading {} (io_limit={})'.format(key,io_limit))
print('[load_field] key={}, io_limit={}'.format(key,io_limit))
# Block execution of some processors if IO is limited
sb = SequentialBlock(io_limit,comm=self.comm,per_node=True)
# Determine which chunks are to be loaded by the current processor
@ -305,7 +305,7 @@ class PPP:
self.field[key_out] *= self.field[key2]
return
def gaussian_filter(self,key,sigma,truncate=4.0,key_out=None,iterate=False):
def gaussian_filter(self,key,sigma,truncate=4.0,key_out=None,iterate=False,verbose=True):
'''Applies a gaussian filter to a field as in-place operation. Sigma is the std of the filter in terms of grid width.'''
import numpy as np
from mpi4py import MPI
@ -315,6 +315,8 @@ class PPP:
self.copy(key,key_out,skip_data=True)
# Compute radius of Gaussian filter
radius = self.field[key].gaussian_filter_radius(sigma,truncate=truncate)
if verbose and self.rank==0:
print('[gaussian_filter] key={}, stencil radius={}'.format(key,radius))
if not iterate:
# Assert that we have sufficient amount of ghost cells
assert all([self.num_ghost[ii]>=radius[ii] for ii in range(3)]),\
@ -331,9 +333,11 @@ class PPP:
assert all([radius[ii]>0 if sigma[ii]>0.0 else True for ii in range(3)]),\
"Iterative procedure leads to invalid stencil radius: "\
"increase number of ghost cells. {}".format(radius)
parprint('Gaussian filter: iterations={}, stencil radius={}'.format(niter,radius))
if verbose and self.rank==0:
print('[gaussian_filter] iterations={}'.format(niter))
for iiter in range(niter):
parprint('Iter #{:d}: '.format(iiter),end='')
if verbose and self.rank==0:
print('[gaussian_filter] iter #{:d}: '.format(iiter),end='')
tbeg = MPI.Wtime()
# Filter field: if key_out is None, perform operation inplace
self.field[key_out] = self.field[key].gaussian_filter(sigma,
@ -343,7 +347,8 @@ class PPP:
self.impose_boundary_conditions(key_out)
# Iterate inplace from now on
key = key_out
parprint('{:g} sec'.format(MPI.Wtime()-tbeg))
if verbose and self.rank==0:
print('{:g} sec'.format(MPI.Wtime()-tbeg))
def broadcast(self,key,operation,arg,key_out=None):
'''Broadcasts an operation involving a scalar or matrix on
@ -541,9 +546,11 @@ class PPP:
gc.collect()
return
def save_state(self,file,parallel=False):
def save_state(self,file,parallel=False,verbose=True):
import h5py
from mpi4py import MPI
if verbose and self.rank==0:
print('[save_state] file={}'.format(file))
tbeg = MPI.Wtime()
ascii_type = h5py.string_dtype('ascii',32)
# Only use parallel IO if flag is set and h5py has MPIIO support
@ -624,11 +631,11 @@ class PPP:
if not parallel: sb.proceed()
self.comm.Barrier()
tend = MPI.Wtime()
if self.rank==0:
print("[save_state] Elapsed time: {:f}".format(tend-tbeg))
if verbose and self.rank==0:
print("[save_state] elapsed time: {:f}".format(tend-tbeg))
return
def save_for_vtk(self,file,key,stride=(1,1,1),truncate=True,merge_at_root=True,on_pressure_grid=True):
def save_for_vtk(self,file,key,stride=(1,1,1),truncate=True,merge_at_root=True,on_pressure_grid=True,verbose=True):
'''Saves a field for visualization purposes. This means it will only have a single
lower ghost cell if there is an upper neighbor, and both a single and an upper
ghost cell if there is no upper neighbor (or merged).'''
@ -643,6 +650,8 @@ class PPP:
return
# Since the data is usually much smaller than a full 'save_state', I only
# implement sequential IO for now.
if verbose and self.rank==0:
print('[save_for_vtk] key={}, file={}'.format(key,file))
tbeg = MPI.Wtime()
# If flag is set, shift data onto pressure grid first. Use a temporary field for this.
name = key
@ -666,8 +675,8 @@ class PPP:
self.comm.Barrier()
# Print timing
tend = MPI.Wtime()
if self.rank==0:
print("[save_for_vtk] Elapsed time: {:f}".format(tend-tbeg))
if verbose and self.rank==0:
print("[save_for_vtk] elapsed time: {:f}".format(tend-tbeg))
return
def to_vtk(self,key,stride=(1,1,1),merge_at_root=False):