import numpy as np class Field3d: def __init__(self,data,origin,spacing): assert len(origin)==3, "'origin' must be of length 3" assert len(spacing)==3, "'spacing' must be of length 3" self.data = np.array(data) self.origin = tuple([float(x) for x in origin]) self.spacing = tuple([float(x) for x in spacing]) self.eps_collapse = 1e-8 self._dim = None return def __str__(self): str = 'Field3d with\n' str+= ' dimension: {}, {}, {}\n'.format(*self.dim()) str+= ' origin: {:G}, {:G}, {:G}\n'.format(*self.origin) str+= ' spacing: {:G}, {:G}, {:G}\n'.format(*self.spacing) str+= ' datatype: {}'.format(self.dtype()) return str def __add__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." return Field3d(self.data+other.data,self.origin,self.spacing) else: return Field3d(self.data+other,self.origin,self.spacing) def __sub__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." return Field3d(self.data-other.data,self.origin,self.spacing) else: return Field3d(self.data-other,self.origin,self.spacing) def __mul__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." return Field3d(self.data*other.data,self.origin,self.spacing) else: return Field3d(self.data*other,self.origin,self.spacing) def __truediv__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." return Field3d(self.data/other.data,self.origin,self.spacing) else: return Field3d(self.data/other,self.origin,self.spacing) def __radd__(self,other): return Field3d(other+self.data,self.origin,self.spacing) def __rmul__(self,other): return Field3d(other*self.data,self.origin,self.spacing) def __pow__(self,other): return Field3d(self.data**other,self.origin,self.spacing) def __iadd__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." self.data += other.data else: self.data += other return self def __isub__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." self.data -= other.data else: self.data -= other return self def __imul__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." self.data *= other.data else: self.data *= other return self def __itruediv__(self,other): if isinstance(other,Field3d): assert self.has_same_grid(other), "Grid mismatch." self.data /= other.data else: self.data /= other return self # TBD: this should return another Field3d object # def __getitem__(self,val): # assert isinstance(val,tuple) and len(val)==3, "Field3d must be indexed by [ii,jj,kk]." # sl = [] # for x in val: # if isinstance(x,int): # lo,hi = x,x+1 # hi = hi if hi!=0 else None # sl.append(slice(lo,hi)) # elif isinstance(x,slice): # sl.append(x) # else: # raise TypeError("Trajectories can only be sliced by slice objects or integers.") # return self.data[sl[0],sl[1],sl[2]] @classmethod def from_chunk(cls,chunk,gridg): '''Initialize Field3d from chunk data and global grid.''' xg,yg,zg = gridg ib,jb,kb = chunk['ibeg']-1, chunk['jbeg']-1, chunk['kbeg']-1 dx,dy,dz = xg[1]-xg[0], yg[1]-yg[0], zg[1]-zg[0] xo,yo,zo = xg[ib]-chunk['ighost']*dx, yg[jb]-chunk['ighost']*dy, zg[kb]-chunk['ighost']*dz nx,ny,nz = chunk['data'].shape assert (chunk['nxl']+2*chunk['ighost'])==nx, "Invalid chunk data: nxl != chunk['data'].shape[0]" assert (chunk['nyl']+2*chunk['ighost'])==ny, "Invalid chunk data: nyl != chunk['data'].shape[1]" assert (chunk['nzl']+2*chunk['ighost'])==nz, "Invalid chunk data: nzl != chunk['data'].shape[2]" return cls(chunk['data'],origin=(xo,yo,zo),spacing=(dx,dy,dz)) @classmethod def from_file(cls,file,name='Field3d'): import h5py is_open = isinstance(file,(h5py.File,h5py.Group)) f = file if is_open else h5py.File(file,'r') g = f[name] origin = tuple(g['origin']) spacing = tuple(g['spacing']) data = g['data'][:] if not is_open: f.close() return cls(data,origin,spacing) @classmethod def allocate(cls,dim,origin,spacing,fill=None,dtype=np.float64): '''Allocates an empty field, or a field filled with 'fill'.''' assert isinstance(dim,(tuple,list,np.ndarray)) and len(dim)==3,\ "'dim' must be a tuple/list of length 3." assert isinstance(origin,(tuple,list,np.ndarray)) and len(origin)==3,\ "'origin' must be a tuple/list of length 3." assert isinstance(spacing,(tuple,list,np.ndarray)) and len(spacing)==3,\ "'spacing' must be a tuple/list of length 3." if fill is None: data = np.empty(dim,dtype=dtype) else: data = np.full(dim,fill,dtype=dtype) return cls(data,origin,spacing) @classmethod def pseudo_field(cls,dim,origin,spacing): '''Creates a Field3d instance without allocating any memory.''' assert isinstance(dim,(tuple,list,np.ndarray)) and len(dim)==3,\ "'dim' must be a tuple/list of length 3." assert isinstance(origin,(tuple,list,np.ndarray)) and len(origin)==3,\ "'origin' must be a tuple/list of length 3." assert isinstance(spacing,(tuple,list,np.ndarray)) and len(spacing)==3,\ "'spacing' must be a tuple/list of length 3." data = np.empty((0,0,0)) r = cls(data,origin,spacing) r._dim = dim return r def save(self,file,name='Field3d',truncate=False): import h5py is_open = isinstance(file,(h5py.File,h5py.Group)) flag = 'w' if truncate else 'a' f = file if is_open else h5py.File(file,flag) g = f.create_group(name) g.create_dataset('origin',data=self.origin) g.create_dataset('spacing',data=self.spacing) g.create_dataset('data',data=self.data) if not is_open: f.close() return def copy(self): return Field3d(self.data.copy(),self.origin,self.spacing) def pseudo_copy(self): return self.pseudo_field(self.dim(),self.origin,self.spacing) def insert_subfield(self,subfield): assert all([abs(subfield.spacing[ii]-self.spacing[ii])=0 and idx_origin[ii]0.5*self.spacing[axis]: val = self.spacing[axis]-val return val def is_within_bounds(self,coord,axis=None): if axis is None: assert len(coord)==3, "If 'axis' is None, 'coord' must be a tuple/list of length 3." return tuple(self.is_within_bounds(coord[ii],axis=ii) for ii in range(3)) assert axis<3, "'axis' must be one of 0,1,2." idx_nearest = self.nearest_gridpoint(coord,axis=axis) if idx_nearest>0 and idx_nearest=-1.0-self.eps_collapse and rel_shift[ii]<=1.0+self.eps_collapse for ii in range(3)]),\ "'shift' must be in (-1.0,1.0). {}".format(rel_shift) data = self.data.copy() origin = list(self.origin) sl = 3*[slice(None)] for axis in range(3): if abs(rel_shift[axis])0: w = rel_shift[axis] if rel_shift[axis]<=1.0 else 1.0 weights = (1.0-w,w) data = ndimage.correlate1d(data,weights,axis=axis,mode='constant',cval=np.nan,origin=-1) origin[axis] += w*self.spacing[axis] if only_keep_interior: sl[axis] = slice(0,-1) else: w = rel_shift[axis] if rel_shift[axis]>=-1.0 else -1.0 weights = (-w,1.0+w) data = ndimage.correlate1d(data,weights,axis=axis,mode='constant',cval=np.nan,origin=0) origin[axis] += w*self.spacing[axis] if only_keep_interior: sl[axis] = slice(1,None) origin[axis] += self.spacing[axis] if only_keep_interior: data = data[sl] return Field3d(data,origin,self.spacing) def relative_shift(self,field): '''Compute the relative shift (in terms of spacing) to shift self onto field.''' assert self.has_same_spacing(field), "spacing differs." rel_shift = [0.0,0.0,0.0] for axis in range(3): dist = field.origin[axis]-self.origin[axis] if abs(dist)>self.eps_collapse: rel_shift[axis] = dist/self.spacing[axis] return tuple(rel_shift) def change_grid(self,origin,spacing,dim,padding=None,numpad=1): assert all([origin[ii]>=self.origin[ii] for ii in range(0,3)]), "New origin is out of bounds." endpoint = [origin[ii]+(dim[ii]-1)*spacing[ii] for ii in range(0,3)] assert all([endpoint[ii]<=self.endpoint(ii) for ii in range(0,3)]), "New end point is out of bounds." # Allocate (possibly padded array) origin_pad,dim_pad,sl_out = padding(origin,spacing,dim,padding,numpad) data = np.zeros(dim_pad,dtype=self.data.dtype) # Trilinear interpolation if np.allclose(spacing,self.spacing): # spacing is the same: we can construct universal weights for the stencil i0,j0,k0 = self.nearest_gridpoint(origin,axis=None,lower=True) cx,cy,cz = [self.distance_to_nearest_gridpoint(origin[ii],axis=ii,lower=True)/self.spacing[ii] for ii in range(3)] c = self.weights_trilinear((cx,cy,cz)) for ii in range(0,2): for jj in range(0,2): for kk in range(0,2): if c[ii,jj,kk]>self.eps_collapse: data[sl_out] += c[ii,jj,kk]*self.data[ i0+ii:i0+ii+dim[0], j0+jj:j0+jj+dim[1], k0+kk:k0+kk+dim[2]] else: data_ref = data[sl_out] for ii in range(0,dim[0]): for jj in range(0,dim[1]): for kk in range(0,dim[2]): coord = ( origin[0]+ii*spacing[0], origin[1]+jj*spacing[1], origin[2]+kk*spacing[2]) data_ref[ii,jj,kk] = self.interpolate(coord) return Field3d(data,origin_pad,spacing) def interpolate(self,coord): assert all([coord[ii]>=self.origin[ii] for ii in range(0,3)]), "'coord' is out of bounds." assert all([coord[ii]<=self.endpoint(ii) for ii in range(0,3)]), "'coord' is out of bounds." i0,j0,k0 = self.nearest_gridpoint(coord,axis=None,lower=True) cx,cy,cz = [self.distance_to_nearest_gridpoint(coord[ii],axis=ii,lower=True)/self.spacing[ii] for ii in range(3)] c = self.weights_trilinear((cx,cy,cz)) val = 0.0 for ii in range(0,2): for jj in range(0,2): for kk in range(0,2): if c[ii,jj,kk]>self.eps_collapse: val += c[ii,jj,kk]*self.data[i0+ii,j0+jj,k0+kk] return val @staticmethod def padding(origin,spacing,dim,padding,numpad): if isinstance(numpad,int): numpad = np.fill(3,numpad,dtype=int) else: numpad = np.array(numpad,dtype=int) assert len(numpad)==3, "'numpad' must be either an integer or tuple/list of length 3." origin_pad = np.array(origin) dim_pad = np.array(dim) sl_out = [slice(None),slice(None),slice(None)] if padding is not None: if padding=='before': dim_pad += numpad origin_pad -= numpad*spacing for axis in range(3): sl_out[axis] = slice(numpad[axis],None) elif padding=='after': dim_pad += numpad for axis in range(3): sl_out[axis] = slice(0,-numpad[axis]) elif padding=='both': dim_pad += 2*numpad origin_pad -= numpad*spacing for axis in range(3): sl_out[axis] = slice(numpad[axis],-numpad[axis]) else: raise ValueError("'padding' must either be None or one of {'before','after','both'}.") sl_out = tuple(sl_out) origin_pad = tuple(origin_pad) dim_pad = tuple(dim_pad) return (origin_pad,dim_pad,sl_out) def weights_trilinear(self,rel_dist): assert len(rel_dist)==3, "len(rel_dist) must be 3." cx,cy,cz = rel_dist if cx<0.0 and cx>-self.eps_collapse: cx=0.0 if cy<0.0 and cy>-self.eps_collapse: cy=0.0 if cz<0.0 and cz>-self.eps_collapse: cz=0.0 if cx>1.0 and cx<1.0+self.eps_collapse: cx=1.0 if cy>1.0 and cy<1.0+self.eps_collapse: cy=1.0 if cz>1.0 and cz<1.0+self.eps_collapse: cz=1.0 assert cx>=0.0 and cy>=0.0 and cz>=0.0, "'rel_dist' must be >=0" assert cx<=1.0 and cy<=1.0 and cz<=1.0, "'rel_dist' must be <=1" c = np.zeros((2,2,2)) c[0,0,0] = 1.0-(cx+cy+cz)+(cx*cy+cx*cz+cy*cz)-(cx*cy*cz) c[1,0,0] = cx-(cx*cy+cx*cz)+(cx*cy*cz) c[0,1,0] = cy-(cx*cy+cy*cz)+(cx*cy*cz) c[0,0,1] = cz-(cx*cz+cy*cz)+(cx*cy*cz) c[1,1,0] = (cx*cy)-(cx*cy*cz) c[1,0,1] = (cx*cz)-(cx*cy*cz) c[0,1,1] = (cy*cz)-(cx*cy*cz) c[1,1,1] = (cx*cy*cz) return c def set_writable(self,flag): self.data.setflags(write=flag) return def to_vtk(self,deep=False): import pyvista as pv mesh = pv.UniformGrid() mesh.dimensions = self.dim(axis=None) mesh.origin = self.origin mesh.spacing = self.spacing # order needs to be F no matter how array is stored in memory if deep: mesh.point_arrays['data'] = self.data.flatten(order='F') else: mesh.point_arrays['data'] = self.data.ravel(order='F') return mesh def vtk_contour(self,val,deep=False): if not isinstance(val,(tuple,list)): val = [val] return self.to_vtk(deep=deep).contour(val) def vtk_slice(self,normal,origin,deep=False): assert (normal in ('x','y','z') or (isinstance(normal,(tuple,list)) and len(normal)==3)), "'normal' must be 'x','y','z' or tuple of length 3." assert isinstance(origin,(tuple,list)) and len(origin)==3,\ "'origin' must be tuple of length 3." return self.to_vtk(deep=deep).slice(normal=normal,origin=origin) def gaussian_filter_umean_channel(array,spacing,sigma,truncate=4.0): '''Applies a Gaussian filter to a numpy array of dimension (1,ny,1) which contains the mean streamwise velocity of the channel (or possibly sth else). Since yu[0] = c and yu[-1] = d, we can use scipy's mirror settings and don't need ghost cells.''' from scipy import ndimage assert array.ndim==3, "Expected an array with three dimensions/axes." assert array.shape[0]==1 and array.shape[1]>1 and array.shape[2]==1,\ "Expected an array with shape (1,ny,1)." sigma_img = sigma/spacing array = ndimage.gaussian_filter1d(array,sigma_img,axis=1,truncate=truncate,mode='mirror') return array class VoxelThreshold: def __init__(self,data,threshold,invert=False): assert isinstance(data,np.ndarray),\ "'data' must be a numpy array." self._dim = data.shape self._ndim = data.ndim if invert: self.data = data=threshold @classmethod def from_field(cls,fld3d,threshold,invert=False): return cls(fld3d.data,threshold,invert=invert) def fill_holes(self,periodicity=(False,False,False)): '''Fills topological holes in threshold regions.''' assert all([isinstance(x,(bool,int)) for x in periodicity]),\ "'periodicity' requires bool values." from scipy import ndimage binarr = ndimage.binary_fill_holes(self.data) for axis in range(self._ndim): if periodicity[axis]: n = binarr.shape[axis] binarr = np.roll(binarr,n//2,axis=axis) binarr = ndimage.binary_fill_holes(binarr) binarr = np.roll(binarr,-n//2,axis=axis) self.data = binarr return def probe(self,idx): '''Returns whether or not point at index is inside threshold region or not.''' return self.data[tuple(idx)] def volume(self): '''Returns volume of region above threshold.''' return np.sum(self.data) class ConnectedRegions: def __init__(self,binarr,periodicity,connect_diagonals=False,fill_holes=False,bytes_label=32): assert isinstance(binarr,np.ndarray) and binarr.dtype==np.dtype('bool'),\ "'binarr' must be a numpy array of dtype('bool')." assert all([isinstance(x,(bool,int)) for x in periodicity]),\ "'periodicity' requires bool values." assert bytes_label in (8,16,32,64),\ "'bytes_label' must be one of {8,16,32,64}." self._dim = binarr.shape self._ndim = binarr.ndim assert self._ndim in (2,3),\ "'binarr' must be either two or three dimensional." assert len(periodicity)==self._ndim,\ "Length of 'periodicity' must match number of dimensions of data." from scipy import ndimage # Construct connectivity stencil if self._ndim==2: connectivity = np.ones((3,3),dtype='bool') if not connect_diagonals: connectivity[0,0] = False connectivity[0,2] = False connectivity[2,0] = False connectivity[0,2] = False else: connectivity = np.ones((3,3,3),dtype='bool') if not connect_diagonals: connectivity[0,0,0] = False connectivity[2,0,0] = False connectivity[0,2,0] = False connectivity[0,0,2] = False connectivity[2,2,0] = False connectivity[0,2,2] = False connectivity[2,0,2] = False connectivity[2,2,2] = False # Compute labels: # this does not take into account periodic wrapping dtype_label = np.dtype('uint'+str(bytes_label)) self.label = np.empty(self._dim,dtype=dtype_label) ndimage.label(binarr,structure=connectivity,output=self.label) self.count = np.max(self.label) # Merge labels if there are periodic overlaps map_tgt = np.array(range(0,self.count+1),dtype=dtype_label) for axis in range(self._ndim): if not periodicity[axis]: continue # Merge the first and last plane and compute connectivity sl = self._ndim*[slice(None)] sl[axis] = (-1,0) binarr_ = binarr[tuple(sl)] label_ = np.empty(binarr_.shape,dtype=dtype_label) ndimage.label(binarr_,structure=connectivity,output=label_) for val_ in np.unique(label_): # Get all global labels which are associated to a region # connected over the boundary global_labels = list(np.unique(self.label[tuple(sl)][label_==val_])) # If there is only one label, nothing needs to be done if len(global_labels)==1: continue # Determine target label: # this needs to be done recursively because the original # target may already be reassigned tgt = global_labels[0] while tgt!=map_tgt[tgt]: tgt=map_tgt[tgt] map_tgt[global_labels[1:]] = tgt # Remove gaps from target mapping map_tgt = np.unique(map_tgt,return_inverse=True)[1] # Remap labels self.label = map_tgt[self.label] self.count = np.max(map_tgt) @classmethod def from_field(cls,fld3d,threshold,periodicity,connect_diagonals=False,bytes_label=32,invert=False): voxthr = VoxelThreshold.from_field(fld3d,threshold,invert=invert) return cls.from_voxelthresh(voxthr,periodicity, connect_diagonals=connect_diagonals, bytes_label=bytes_label) @classmethod def from_voxelthresh(cls,voxthr,periodicity,connect_diagonals=False,bytes_label=32): return cls(voxthr.data,periodicity, connect_diagonals=connect_diagonals, bytes_label=bytes_label) def volume(self,label=None): '''Returns volume of labeled regions. If 'label' is None all volumes are returned including the volume of the background region. The array is sorted by labels, i.e. vol[0] is the volume of the background region, vol[1] the volume of label 1, etc. If 'label' is an integer value, only the volume of the corresponding region is returned. Note: it is more efficient to retrieve all volumes at once than querying single labels.''' if label is None: return np.bincount(self.label.ravel()) else: return np.sum(self.label==label) def volume_domain(self): '''Returns volume of entire domain. Should be equal to sum(volume()).''' return np.prod(self._dim) def labels_by_volume(self,descending=True): '''Returns labels of connected regions sorted by volume.''' labels = np.argsort(self.volume()[1:])+1 if descending: labels = labels[::-1] return labels def discard_regions(self,selection): selection = self._parse_selection(selection) # Map tagged regions to zero in order to discard them map_tgt = np.array(range(0,self.count+1),dtype=self.label.dtype) map_tgt[selection] = 0 # Remove gaps from target mapping map_tgt = np.unique(map_tgt,return_inverse=True)[1] # Discard regions self.label = map_tgt[self.label] self.count = np.max(map_tgt) def probe(self,idx): '''Returns label for given index.''' return self.label[tuple(idx)] def vtk_contour(self,fld3,val,selection): '''Computes contours of a Field3d only within selected structures.''' assert isinstance(fld3,Field3d), "'fld3' must be a Field3d instance." assert tuple(self._dim)==tuple(fld3.dim()), \ "'fld3' must be of dimension {}.".format(self._dim) selection = self._parse_selection(selection) from scipy import ndimage # Create binary map of selection map_tgt = np.zeros(self.count+1,dtype='bool') map_tgt[selection] = True binary_map = map_tgt[self.label] # Add an extra cell to get the contour interpolation right print(np.sum(binary_map)) binary_map = ndimage.binary_dilation(binary_map) print(np.sum(binary_map)) # Extract the subfield fld_con = fld3.copy() fld_con.data[~binary_map] = np.nan # Compute the contour return fld_con.vtk_contour(val) def _parse_selection(self,selection): dtype = self.label.dtype if np.issubdtype(type(selection),np.integer): return np.array(selection,dtype=dtype) elif isinstance(selection,(list,tuple,np.ndarray)): selection = np.array(selection) if selection.dtype==np.dtype('bool'): assert selection.ndim==1 and selection.shape[0]==self.count+1,\ "Boolean indexing must provide count+1 values." else: selection = selection.astype(dtype) assert np.max(selection)<=self.count and np.min(selection)>=0,\ "Entry in selection is out-of-bounds." return selection else: raise ValueError('Invalid input. Accepting int,list,tuple,ndarray.') class ChunkIterator: '''Iterates through all chunks. 'snapshot' must be an instance of a class which returns a Field3d from the method call snapshot.field_chunk(rank,key,keep_ghost=keep_ghost). One example implementation is UCFSnapshot from suspendtools.ucf.''' def __init__(self,snapshot,key,keep_ghost=True): self.snapshot = snapshot self.key = key self.keep_ghost = keep_ghost self.iter_rank = 0 def __iter__(self): self.iter_rank = 0 return self def __next__(self): if self.iter_rank