major updates

This commit is contained in:
Michael Krayer 2021-05-23 23:14:26 +02:00
parent 7153315dfd
commit d23824128c
6 changed files with 514 additions and 77 deletions

View File

@ -1,4 +1,5 @@
from . import particle
from . import visu
from . import field
from . import ucf
from . import ucf
from . import helper

View File

@ -14,10 +14,10 @@ class Field3d:
def __str__(self):
str = 'Field3d with\n'
str+= ' dimension {}, {}, {}\n'.format(*self.numpoints)
str+= ' origin {:G}, {:G}, {:G}\n'.format(*self.origin)
str+= ' spacing {:G}, {:G}, {:G}\n'.format(*self.spacing)
str+= ' period {}, {}, {}'.format(*self.period)
str+= ' dimension: {}, {}, {}\n'.format(*self.numpoints)
str+= ' origin: {:G}, {:G}, {:G}\n'.format(*self.origin)
str+= ' spacing: {:G}, {:G}, {:G}\n'.format(*self.spacing)
str+= ' period: {}, {}, {}'.format(*self.period)
return str
@classmethod

67
helper.py Normal file
View File

@ -0,0 +1,67 @@
def load_mat(f,var,squeeze=True):
try:
return load_matbin(f,var,squeeze=squeeze)
except NotImplementedError:
return load_matv73(f,var,squeeze=squeeze)
def load_matbin(f,var,squeeze=True):
'''Loads variables from a matlab binary mat file (not v7.3).'''
from scipy.io import loadmat
import numpy
data = loadmat(f,variable_names=var,squeeze_me=squeeze,struct_as_record=True)
def parse(val):
if not isinstance(val,numpy.ndarray):
return val
if val.dtype.names is not None:
d = {}
for key in val.dtype.names:
d[key] = parse(val[key].item())
return d
else:
return val
if isinstance(var,(tuple,list)):
return tuple(parse(data[x]) for x in var)
else:
return parse(data[var])
def load_matv73(f,var,squeeze=True,cast_string=True,cast_integer=True):
'''Loads variables from a matlab hdf5 mat file (v7.3).'''
import numpy
import h5py
if isinstance(f,str):
f = h5py.File(f,'r')
if isinstance(var,(list,tuple)):
return tuple(load_matv73(f,x) for x in var)
def parse(val):
# String data
if cast_string and val.dtype==numpy.dtype('uint16'):
return ''.join([chr(x) for x in val.squeeze()])
# Numeric data
val = val.transpose()
if cast_integer:
val_int = numpy.rint(val)
if numpy.all(numpy.abs(val-val_int)<1e-8):
val = val_int.astype('int')
if squeeze:
val = val.squeeze()
if val.shape==():
val = val.item()
return val
t = f[var]
if isinstance(t,h5py.Dataset):
val = t[:]
if val.dtype==numpy.dtype('object'):
# Cell arrays are stored as references
for idx,x in numpy.ndenumerate(val):
val[idx] = parse(f[val[idx]][:])
return val
else:
return parse(val)
else:
d = {}
for key in t:
d[key] = load_matv73(t,key,
squeeze=squeeze,
cast_string=cast_string,
cast_integer=cast_integer)
return d

View File

@ -1,3 +1,270 @@
class Particles:
def __init__(self,pp,col,time,period=(None,None,None),select_col=None):
import numpy
assert 'id' in col, "Need column 'id' to initialize Particles"
assert 'x' in col, "Need column 'x' to initialize Particles"
assert 'y' in col, "Need column 'y' to initialize Particles"
assert 'z' in col, "Need column 'z' to initialize Particles"
assert pp.ndim==2 or pp.shape[2]==1, "'pp' cannot be a time series."
self.num = pp.shape[1]
if pp.ndim==2:
pp = numpy.expand_dims(pp,axis=2)
if select_col is not None:
for scol in select_col:
assert scol in col, "Selected column '{}' not found in 'col'.".format(scol)
# Copy attributes and sort by ID
self.attr = {}
sortidx = pp[col['id'],:,0].argsort()
idsorted = pp[col['id'],sortidx,0]
#assert numpy.isclose(idsorted[0],1), "Particle IDs do not start at 1."
#assert numpy.isclose(idsorted[-1],self.num), "Particle IDs do not end at Np."
#assert numpy.all(numpy.diff(idsorted)>0), "Particle IDs are not unique."
for key in col:
if (key!='id') and (select_col is None or key in select_col or key in ('x','y','z')):
self.add_attribute(key,pp[col[key],sortidx,0])
self.period = period
self.time = time
def __getitem__(self,key):
return self.get_attribute(key)
def __str__(self):
str = '{:d} particles with\n'.format(self.num)
str += ' attributes:'
for key in self.attr.keys():
str += ' {}'.format(key)
str += '\n'
str+= ' period: {}, {}, {}'.format(*self.period)
return str
def add_attribute(self,key,val):
import numpy
if isinstance(val,(tuple,list,numpy.ndarray)):
assert len(val)==self.num and val.ndim==1, "Invalid 'val'."
self.attr[key] = numpy.array(val)
else:
self.attr[key] = numpy.full(self.num,val)
return
def del_attribute(self,key):
del self.attr[key]
def get_attribute(self,key,id=None):
import numpy
if id is not None:
assert id>=1, "Particle IDs start with 1."
assert id<=self.num, "Requested ID exceeds number of particles."
if id is None:
return self.attr[key]
else:
return self.attr[key][id-1]
def get_position(self,axis=None,id=None):
import numpy
if axis is None:
return numpy.vstack([self.get_attribute(key,id=id) for key in ('x','y','z')])
else:
assert axis<3, "'axis' must be smaller than 3."
key = ('x','y','z')[axis]
return self.get_attribute(key)
def has_attribute(self,key):
return key in self.attr
def translate(self,translation,axis=0):
'''Translates particles while taking into account periodicity.'''
assert axis<3, "'axis' must be smaller than 3."
key = ('x','y','z')[axis]
self.attr[key] += translation
if self.period[axis] is not None:
self.attr[key] %= self.period[axis]
return
class Trajectories:
def __init__(self,part,copy=False):
import numpy
self.numtime = 1
self.numpart = part.num
self.copy = copy
# Attributes are first stored as a list of view to instances of
# Particles. Later they will be concatenated into numpy array
# and the attributes of Particles will be views.
self.attr = {}
self.part = []
self._is_data_list = True
if copy:
self.part.append(deepcopy(part))
else:
self.part.append(part)
for key in self.part[0].attr:
self.attr[key] = [self.part[0].attr[key].view()]
self.period = self.part[0].period
if any([period is not None for period in self.period]):
self._crossings = numpy.zeros((6,self.numpart,1),dtype=numpy.bool)
else:
self._crossings = None
return
self.unraveled = False
def __str__(self):
str = 'Trajectory with\n'
str+= ' time steps: {:d}\n'.format(self.numtime)
str+= ' particles: {:d}'.format(self.numpart)
return str
def __iadd__(self,other):
if isinstance(other,Trajectories):
self.add_trajectories(other)
elif isinstance(other,Particles):
self.add_particles(other)
else:
raise TypeError('Cannot add type {}'.format(type(other)))
return self
def __getitem__(self,val):
assert isinstance(val,tuple) and len(val)==2, "Trajectories must be indexed by [part,time]."
sl = []
for x in val:
if isinstance(x,int):
lo,hi = x,x+1
hi = hi if hi!=0 else None
sl.append(slice(lo,hi))
elif isinstance(x,slice):
sl.append(x)
else:
raise TypeError("Trajectories can only be sliced by slice objects or integers.")
return self.get_trajectory(slice_part=sl[0],slice_time=sl[1])
@classmethod
def from_mat(cls,file):
from .helper import load_mat
pp,col,time,ccinfo = load_mat(file,['pp','colpy','time','ccinfo'])
period = [None,None,None]
if bool(ccinfo['xperiodic']): period[0]=ccinfo['b']
if bool(ccinfo['yperiodic']): period[1]=ccinfo['d']
if bool(ccinfo['zperiodic']): period[2]=ccinfo['f']
for key in col:
col[key]-=1
ntime = len(time)
traj = cls(Particles(pp[:,:,0],col,time[0],period))
for ii in range(1,ntime):
traj += Particles(pp[:,:,ii],col,time[ii],period)
return traj
def add_particles(self,part):
import numpy
# Verify part
assert part.time>self.part[-1].time, "Time steps must be added in monotonically increasing order."
assert part.num==self.numpart, "Number of particles is different from previous time steps."
assert all([part.period[ii]==self.period[ii] for ii in range(0,3)]), "Period differs!"
for key in self.attr:
assert key in part.attr, "Particles to be added are missing attribute '{}'".format(key)
for key in part.attr:
assert key in self.attr, "Particles to be added have additional attribute '{}'".format(key)
# Add new data
self._make_data_list()
if self.copy:
self.part.append(deepcopy(part))
else:
self.part.append(part)
for key in self.attr:
self.attr[key].append(self.part[-1].attr[key].view())
self.numtime += 1
return
def add_trajectories(self,traj):
# Verify traj
assert traj.part[0].time>self.part[-1].time, "Time steps must be added in monotonically increasing order."
assert traj.numpart==self.numpart, "Number of particles is different from previous time steps."
assert all([traj.period[ii]==self.period[ii] for ii in range(0,3)]), "Period differs!"
for key in self.attr:
assert key in traj.attr, "Trajectories to be added are missing attribute '{}'".format(key)
for key in traj.attr:
assert key in self.attr, "Trajectories to be added have additional attribute '{}'".format(key)
# Add new data
self._make_data_list()
traj._make_data_list()
if self.copy:
self.part += deepcopy(traj.part)
else:
self.part += traj.part
for ii in range(0,traj.numtime):
for key in self.attr:
self.attr[key].append(self.part[-traj.numtime+ii].attr[key].view())
self.numtime += traj.numtime
return
def get_time(self):
import numpy
time = numpy.zeros(self.numtime)
for ii,part in enumerate(self.part):
time[ii] = part.time
return time
def get_trajectory(self,id=None,slice_part=slice(None),slice_time=slice(None)):
# WARNING: slice uses array indexing instead of ID
import numpy
assert isinstance(slice_part,slice), "'slice_part' must be a slice."
assert isinstance(slice_time,slice), "'slice_time' must be a slice."
self._make_data_array()
if id is None: # npart X ndim X ntime
return numpy.stack([
self.attr['x'][slice_part,slice_time],
self.attr['y'][slice_part,slice_time],
self.attr['z'][slice_part,slice_time]],axis=0)
else:
if id<0:
id = self.numpart+id+1
assert id>=1 and id<=self.numpart, "Particle ID out-of-bounds: {:d}".format(id)
id = slice(id-1,id)
return numpy.stack([
self.attr['x'][id,slice_time],
self.attr['y'][id,slice_time],
self.attr['z'][id,slice_time]],axis=0)
def get_segments(self,id=None):
return
def unravel(self):
if self.unraveled: return
if self._crossings is None: return
self._compute_crossings()
# do sth
self.unraveled = True
return
def ravel(self):
if not self.unraveled: return
self.unraveled = False
return
def _compute_crossings(self):
import numpy
if self._crossings is not None: # None means no periodicity
num_computed = self._crossings.shape[1]
if num_computed==self.numtime:
return
crossings = numpy.zeros((6,self.numpart,self.numtime),dtype=numpy.bool)
crossings[:,:,0:num_computed] = self._crossings
for ii in range(num_computed,self.numtime):
for axis in range(0,3):
if self.period[axis] is not None:
key = ('x','y','z')[axis]
posdiff = (self.part[ii].attr[key]-self.part[ii-1].attr[key]).squeeze()
doesCross = abs(posdiff)>0.5*self.period[axis]
upCross = numpy.logical_and(posdiff<0,doesCross)
crossings[2*axis,:,ii] = doesCross
crossings[2*axis+1,:,ii] = upCross
self._crossings = crossings
def _make_data_array(self):
import numpy
#print('DEBUG: _make_data_array')
if not self._is_data_list:
return
#print([x.shape for x in self.attr['x']])
for key in self.attr:
self.attr[key] = numpy.stack(self.attr[key],axis=1)
for ii in range(0,self.numtime):
self.part[ii].attr[key] = self.attr[key][:,ii]
#print(self.attr['x'].shape)
self._is_data_list = False
return
def _make_data_list(self):
import numpy
#print('DEBUG: _make_data_list')
if self._is_data_list:
return
#print(self.attr['x'].shape)
for key in self.attr:
self.attr[key] = [numpy.squeeze(x) for x in numpy.split(self.attr[key],self.numtime,axis=1)]
for ii in range(0,self.numtime):
self.part[ii].attr[key] = self.attr[key][ii].view()
#print([x.shape for x in self.attr['x']])
self._is_data_list = True
return
def sort(pp,col):
ncol,npart,ntime = pp.shape
assert('id' in col)
@ -6,6 +273,54 @@ def sort(pp,col):
pp[:,:,itime] = pp[:,idx,itime]
return pp
# def mask(pp,col,field,reconstruct=False,cval=np.nan,padding=0.0):
# from .field import Field3d
# '''Fills grid points which lie inside of solid phase with values.'''
# # Loop through local particles
# for ipart in range(0,self.__npartl):
# # Slice a box from the field around the particle
# xp,yp,zp,rp = (self.particles[self.col['x'],ipart],
# self.particles[self.col['y'],ipart],
# self.particles[self.col['z'],ipart],
# self.particles[self.col['r'],ipart])
# rp += padding*self.__dx[key]
# # Get coefficients for reconstruction
# if reconstruct:
# coeff_lin = self.particles[self.col[key],ipart]
# if key=='u':
# coeff_rotx = 0.0
# coeff_roty = -self.particles[self.col['oz'],ipart]
# coeff_rotz = self.particles[self.col['oy'],ipart]
# elif key=='v':
# coeff_rotx = self.particles[self.col['oz'],ipart]
# coeff_roty = 0.0
# coeff_rotz = -self.particles[self.col['ox'],ipart]
# elif key=='w':
# coeff_rotx = -self.particles[self.col['oy'],ipart]
# coeff_roty = self.particles[self.col['ox'],ipart]
# coeff_rotz = 0.0
# else:
# coeff_rotx = 0.0
# coeff_roty = 0.0
# coeff_rotz = 0.0
# # Get bounding box of particle
# idx_x = np.nonzero((xg>=xp-rp) & (xg<=xp+rp))[0]
# idx_y = np.nonzero((yg>=yp-rp) & (yg<=yp+rp))[0]
# idx_z = np.nonzero((zg>=zp-rp) & (zg<=zp+rp))[0]
# # Triple for loop
# for ii in range(idx_x[0],idx_x[-1]+1):
# Dx = xg[ii]-xp
# for jj in range(idx_y[0],idx_y[-1]+1):
# Dy = yg[jj]-yp
# for kk in range(idx_z[0],idx_z[-1]+1):
# Dz = zg[kk]-zp
# isInside = Dx*Dx+Dy*Dy+Dz*Dz <= rp*rp
# if isInside:
# if reconstruct:
# self.field[key][ii,jj,kk] = coeff_lin + coeff_rotx*Dx + coeff_roty*Dy + coeff_rotz*Dz
# else:
# self.field[key][ii,jj,kk] = cval
def slice_columns(pp,col,keys):
idx_col = []
col_new = {}

150
ucf.py
View File

@ -460,12 +460,68 @@ class UCF:
self.__currentSetParams = 0
self.__currentSetNumElements = 0
class UCFSnapshot:
'''Handles a snapshot.ucf.tar file.'''
class UCFTarFile:
'''Superclass for instances of ucf.tar files.'''
def __init__(self,file_tar,file_index=None):
self.handler = Ustar(file_tar,file_index)
self.verbose = False
self.debug = False
self._buffer_grid = None
self._buffer_proc = None
self._buffer_params = None
def read_grid(self,key=None):
self._initialize_buffer_grid()
if key is not None:
gidx = self.gidx_from_key(key)
return self._buffer_grid[gidx]
return self._buffer_grid
def read_procgrid(self,key=None):
self._initialize_buffer_proc()
if key is not None:
gidx = self.gidx_from_key(key)
return self._buffer_proc[gidx]
return self._buffer_proc
def read_parameters(self):
self._initialize_buffer_params()
return self._buffer_params
def period(self,axis=None):
if axis is None:
return tuple(self.period(axis=ii) for ii in range(0,3))
else:
assert axis<3, "'axis' must be smaller than 3"
params = self.read_parameters()
key_bound = ('b','d','f')
key_period = ('xperiodic','yperiodic','zperiodic')
period = params.getfloat('geometry',key_bound[axis])
isPeriodic = params.getboolean('geometry',key_period[axis])
return period if isPeriodic else None
def gidx_from_key(self,key):
if key[0]=='u': return 0
elif key[0]=='v': return 1
elif key[0]=='w': return 2
elif key[0]=='p': return 3
elif key[0]=='s': return 4
else: raise ValueError("Invalid value of 'key'.")
def _initialize_buffer_grid(self):
if self._buffer_grid is None:
data = self.handler.read('grid.bin')
self._buffer_grid = read_grid(data,verbosity=self.verbose,debug=self.debug)
return
def _initialize_buffer_proc(self):
if self._buffer_proc is None:
data = self.handler.read('proc.bin')
self._buffer_proc = read_procgrid(data,verbosity=self.verbose,debug=self.debug)
return
def _initialize_buffer_params(self):
if self._buffer_params is None:
data = self.handler.read('parameters.asc')
self._buffer_params = read_parameters(data)
return
class UCFSnapshot(UCFTarFile):
'''Handles a snapshot.ucf.tar file.'''
def __init__(self,file_tar,file_index=None):
super(UCFSnapshot,self).__init__(file_tar,file_index=file_index)
file_name_string = '\t'.join(self.handler.file_name)
if 'uvwp' in file_name_string:
self.type = 'uvwp'
@ -473,11 +529,8 @@ class UCFSnapshot:
self.type = 'scal'
else:
raise ValueError("Archive does not contain 'uvwp' nor 'scal' files.")
self.__nproc = sum(self.type in s for s in self.handler.file_name)
self.__nprocijk = None
self.__buffer_grid = None
self.__buffer_proc = None
self.__buffer_params = None
self._nproc = sum(self.type in s for s in self.handler.file_name)
self._nprocijk = None
def read_particles(self):
data = self.handler.read('particles.bin')
return read_particles(data,step=1,verbosity=self.verbose,debug=self.debug)
@ -486,21 +539,6 @@ class UCFSnapshot:
data = self.handler.read(file_target)
return read_chunk(data,step=1,dset=dset,keep_ghost=keep_ghost,
verbosity=self.verbose,debug=self.debug)
def read_grid(self,key=None):
self.__initialize_buffer_grid()
if key is not None:
gidx = self.gidx_from_key(key)
return self.__buffer_grid[gidx]
return self.__buffer_grid
def read_procgrid(self,key=None):
self.__initialize_buffer_proc()
if key is not None:
gidx = self.gidx_from_key(key)
return self.__buffer_proc[gidx]
return self.__buffer_proc
def read_parameters(self):
self.__initialize_buffer_params()
return self.__buffer_params
def field_chunk(self,rank,key,keep_ghost=True):
'''Returns chunk data as Field3d object.'''
from .field import Field3d
@ -527,24 +565,18 @@ class UCFSnapshot:
return Field3d(chunk['data'],(xo,yo,zo),(dx,dy,dz),period)
def field_complete(self):
return
def period(self,axis=None):
if axis is None:
return tuple(self.period(axis=ii) for ii in range(0,3))
else:
assert axis<3, "'axis' must be smaller than 3"
params = self.read_parameters()
key_bound = ('b','d','f')
key_period = ('xperiodic','yperiodic','zperiodic')
period = params.getfloat('geometry',key_bound[axis])
isPeriodic = params.getboolean('geometry',key_period[axis])
return period if isPeriodic else None
def particles(self,select_col=None):
'''Returns particle data as Particles object.'''
from .particle import Particles
pp,col,time = self.read_particles()
return Particles(pp,col,time,self.period(),select_col=select_col)
def nproc(self,axis=None):
if axis is None:
return self.__nproc
return self._nproc
else:
assert axis<3, "'axis' must be smaller than 3"
self.__initialize_nprocijk()
return self.__nprocijk[axis]
self._initialize_nprocijk()
return self._nprocijk[axis]
def ijk_from_rank(self,rank):
assert rank<self.nproc(), "'rank' exceeds highest rank."
ijk = (
@ -565,13 +597,6 @@ class UCFSnapshot:
elif key[0]=='p': return 3
elif key[0]=='s': return int(key[1:])
else: raise ValueError("Invalid value of 'key'.")
def gidx_from_key(self,key):
if key[0]=='u': return 0
elif key[0]=='v': return 1
elif key[0]=='w': return 2
elif key[0]=='p': return 3
elif key[0]=='s': return 4
else: raise ValueError("Invalid value of 'key'.")
def has_key(self,key):
if key in ('u','v','w','p') and self.type=='uvwp':
return True
@ -579,24 +604,27 @@ class UCFSnapshot:
return True
else:
return False
def __initialize_buffer_grid(self):
if self.__buffer_grid is None:
data = self.handler.read('grid.bin')
self.__buffer_grid = read_grid(data,verbosity=self.verbose,debug=self.debug)
return
def __initialize_buffer_proc(self):
if self.__buffer_proc is None:
data = self.handler.read('proc.bin')
self.__buffer_proc = read_procgrid(data,verbosity=self.verbose,debug=self.debug)
return
def __initialize_buffer_params(self):
if self.__buffer_params is None:
data = self.handler.read('parameters.asc')
self.__buffer_params = read_parameters(data)
return
def __initialize_nprocijk(self):
self.__initialize_buffer_proc()
self.__nprocijk = tuple([len(self.__buffer_proc[2*ii]) for ii in range(0,3)])
def _initialize_nprocijk(self):
self._initialize_buffer_proc()
self._nprocijk = tuple([len(self._buffer_proc[2*ii]) for ii in range(0,3)])
class UCFAuxiliary(UCFTarFile):
'''Handles a auxiliary.ucf.tar file.'''
def __init__(self,file_tar,file_index=None):
super(UCFAuxiliary,self).__init__(file_tar,file_index=file_index)
# TBD: support statistics files
def read_particles(self):
data = self.handler.read('append_particles.bin')
return read_particles(data,step=None,verbosity=self.verbose,debug=self.debug)
def trajectories(self,select_col=None):
from .particle import Particles,Trajectories
pp,col,time = self.read_particles()
part = Particles(pp[:,:,0],col,time[0],self.period(),select_col=select_col)
traj = Trajectories(part,copy=False)
for ii in range(1,len(time)):
part = Particles(pp[:,:,ii],col,time[ii],self.period(),select_col=select_col)
traj.add_particles(part)
return traj
class Ustar:
'''Minimalistic ustar implementation meant to be used with ucftar files'''

48
visu.py
View File

@ -4,7 +4,7 @@ def add_domain_bounds(plotter,bounds,color='black',line_width=2):
plotter.add_mesh(domain,color=color,style='wireframe',line_width=line_width)
return
def add_particles(plotter,pp,col,itime=0,
def add_particles(plotter,part,
name=None,
color='white',
scalars=None,cmap=None,clim=None,
@ -30,18 +30,13 @@ def add_particles(plotter,pp,col,itime=0,
Opacity of the mesh. If a single float value is given, it will be the global opacity of the mesh and uniformly applied everywhere - should be between 0 and 1. A string can also be specified to map the scalars range to a predefined opacity transfer function (options include: linear, linear_r, geom, geom_r). A string could also be used to map a scalars array from the mesh to the opacity (must have same number of elements as the scalars argument). Or you can pass a custom made transfer function that is an array either n_colors in length or shorter.
'''
import pyvista
ncol,npart,ntime = pp.shape
assert(itime<ntime)
assert('x' in col)
assert('y' in col)
assert('z' in col)
assert('r' in col)
pts = pyvista.PolyData(pp[(col['x'],col['y'],col['z']),:,itime].transpose())
pts['r'] = pp[(col['r']),:,itime].squeeze()
assert part.has_attribute('r')
pts = pyvista.PolyData(part.get_position().transpose())
pts['r'] = part['r']
if scalars:
assert(scalars in col)
pts[scalars] = pp[(col[scalars]),:,itime].squeeze()
assert part.has_attribute(scalars)
pts[scalars] = part[scalars]
color=None
geom = pyvista.Sphere(radius=1,theta_resolution=theta_resolution,phi_resolution=phi_resolution)
@ -79,6 +74,37 @@ def field_to_pvmesh(field):
mesh.point_arrays['values'] = field.data.flatten(order='F') # TBD: check order
return mesh
def clip_out_of_bounds(mesh,bounds,axis=0,inplace=True,clip_lower=True,clip_upper=True):
assert axis<3, "axis must be in [0,1,2]."
new_bounds = bounds[2*axis:2*axis+2]
old_bounds = mesh.bounds[2*axis:2*axis+2]
#print('DEBUG: New bounds: ',new_bounds)
#print('DEBUG: Old bounds: ',old_bounds)
if new_bounds[0]>old_bounds[0] and clip_lower:
#print('DEBUG: Clipping lower',new_bounds[0],old_bounds[0])
normal = [0,0,0]
normal[axis] = -1
origin = [0,0,0]
origin[axis] = new_bounds[0]
if inplace:
mesh.clip(normal=normal,origin=origin,inplace=True)
else:
mesh = mesh.clip(normal=normal,origin=origin,inplace=False)
if new_bounds[1]<old_bounds[1] and clip_upper:
#print('DEBUG: Clipping upper',new_bounds[1],old_bounds[1])
normal = [0,0,0]
normal[axis] = 1
origin = [0,0,0]
origin[axis] = new_bounds[1]
if inplace:
mesh.clip(normal=normal,origin=origin,inplace=True)
else:
mesh = mesh.clip(normal=normal,origin=origin,inplace=False)
if inplace:
return None
else:
return mesh
# def common_mesh(gridg)
# import pyvista
# mesh = pyvista.UniformGrid()