implemented features3d; need some testing and maybe optimization; still includes debug output

This commit is contained in:
Michael Krayer 2021-08-09 16:21:35 +02:00
parent 56b413cfed
commit 57410abc99
1 changed files with 323 additions and 273 deletions

596
field.py
View File

@ -616,6 +616,8 @@ class Field3d:
if deep:
mesh.point_arrays['data'] = self.data.flatten(order='F')
else:
if not self.data.flags['F_CONTIGUOUS']:
self.data = np.asfortranarray(self.data)
mesh.point_arrays['data'] = self.data.ravel(order='F')
return mesh
@ -645,97 +647,292 @@ def gaussian_filter_umean_channel(array,spacing,sigma,truncate=4.0):
return array
class BinaryFieldNd:
def __init__(self,input):
assert isinstance(input,np.ndarray) and input.dtype==np.dtype('bool'),\
"'input' must be a numpy array of dtype('bool')."
self.data = input
self._dim = input.shape
self._ndim = input.ndim
self.labels = None
self.nlabels = 0
self.wrap = tuple(self._ndim*[None])
self._featsl = None
self.set_structure(False)
self.set_periodicity(self._ndim*[False])
def set_periodicity(self,periodicity):
assert all([isinstance(x,(bool,int)) for x in periodicity]),\
"'periodicity' requires bool values."
assert len(periodicity)==self._ndim,\
"Number of entries in 'periodicity' must match dimension of binary field."
class Features3d:
def __init__(self,input,threshold,origin,spacing,periodicity,connect_diagonals=True,invert=False,has_ghost=False):
assert len(origin)==3, "'origin' must be of length 3"
assert len(spacing)==3, "'spacing' must be of length 3"
assert len(periodicity)==3, "'periodicity' must be of length 3"
assert isinstance(input,np.ndarray), "'input' must be numpy.ndarray."
# Import libraries
import pyvista as pv
# Assign basic properties to class variables
self.origin = tuple(float(x) for x in origin)
self.spacing = tuple(float(x) for x in spacing)
self.periodicity = tuple(bool(x) for x in periodicity)
self._invert = invert
self._sldata = tuple(slice(0,-1) if x else None for x in periodicity)
# If regions are supposed to be inverted, i.e. the interior consists of values
# smaller than the threshold instead of larger, change the sign of the array.
sign_invert = -1 if invert else +1
self._threshold = threshold
# '_data' is the array which is to be triangulated. Here one trailing ghost cell is
# required to get closed surfaces. The data will always be copied since it probably
# needs to be copied for vtk anyway (needs FORTRAN contiguous array) and this way
# there will never be side effects on the input data.
if has_ghost:
self._data = np.asfortranarray(sign_invert*input)
else:
pw = tuple((0,1) if x else (0,0) for x in periodicity)
self._data = np.asfortranarray(np.pad(sign_invert*input,pw,mode='wrap'))
# 'binary' is a BinaryField which determines the connectivity of regions
# and provides morphological manipulations.
self.binary = BinaryFieldNd(self._data>=self._threshold,periodicity,
connect_diagonals=connect_diagonals,
deep=False,has_ghost=has_ghost)
# Compute features in 'binary' by labeling.
self.binary.label()
# Set arrays produced by triangulation to None
self._points = None
self._faces = None
@classmethod
def from_field(cls,fld,threshold,periodicity,invert=False,has_ghost=False):
return cls(fld.data,threshold,fld.origin,fld.spacing,periodicity,invert=invert,has_ghost=has_ghost)
@property
def threshold(self): return self._threshold
@property
def points(self): return self._points
@property
def faces(self): return self._faces
@property
def nfeatures(self): return self.binary.nlabels
def fill_holes(self):
self.binary.fill_holes()
li = ndimage.binary_erosion(self.binary._data)
self._data[li] = 2*self._threshold # arbitrary, only needs to be above threshold
if self._faces is not None:
self.triangulate(contour_method=self.__TRI_CONTMETH,
cellvol_normal_component=self.__TRI_NORMCOMP)
def reduce_noise(self,threshold=1e-5,is_relative=True):
'''Removes all objects with smaller (binary-based) volume than a threshold.'''
if is_relative:
threshold = threshold*self.binary.volume_domain()
vol = self.binary.volumes()
li = vol<threshold
mask = self.binary.discard_features(li,return_mask=True)
self._data[mask] = np.nan#self._threshold
if self._faces is not None:
self.triangulate(contour_method=self.__TRI_CONTMETH,
cellvol_normal_component=self.__TRI_NORMCOMP)
return
def set_structure(self,connect_diagonals):
def triangulate(self,contour_method='flying_edges',cellvol_normal_component=2):
import pyvista as pv
from scipy import ndimage
from time import time
# Save arguments in case we need to recreate triangulation later on
self.__TRI_CONTMETH = contour_method
self.__TRI_NORMCOMP = cellvol_normal_component
# Wrap data with pyvista/VTK
datavtk = pv.UniformGrid()
datavtk.dimensions = self._data.shape
datavtk.origin = self.origin
datavtk.spacing = self.spacing
datavtk.point_arrays['data'] = self._data.ravel('F')
# Compute full contour: ensure we only have triangles to efficiently extract points
# later on.
t = time()
contour_ = datavtk.contour([self._threshold],method=contour_method,compute_scalars=False)
assert contour_.is_all_triangles(), "Contouring produced non-triangle cells."
print('CONTOUR:',time()-t)
# Interpolate labels from binary array: first the array needs to be
# dilated to ensure that we get proper values for the cell vertex interpolation.
t = time()
# labels_ = ndimage.grey_dilation(self.binary._labels,size=(3,3,3),mode='wrap')
labels_ = ndimage.ndimage.maximum_filter(self.binary._labels,size=(3,3,3),mode='wrap')
print('DILATION:',time()-t)
# Labels are interpolated on points, and then the first point of each cell determines
# the value for this cell. This allows us to skip cell center computation and reduce
# the number of points which need to be interpolated. ndimage's 'map_coordinates' is
# the fastest interpolation routine I could find, but coordinates need to be provided
# as pixel values.
t = time()
pixel_coords = (contour_.points-self.origin)/self.spacing
point_val = ndimage.map_coordinates(labels_,pixel_coords.transpose(),order=0,mode='grid-wrap',prefilter=False)
cell_val = point_val[contour_.faces.reshape(contour_.n_faces,4)[:,1]]
print('INTERPOLATION:',time()-t)
# While we have the full contour in memory, compute the area and volume per cell. For
# the volume computation, an arbitrary component of the normal has to be chosen which
# defaults to the z-component and is set by 'cellvol_normal_component'.
t = time()
self._cell_areas = contour_.compute_cell_sizes(length=False,area=True,volume=False)["Area"]
self._cell_volumes = contour_.cell_normals[:,cellvol_normal_component]*\
contour_.cell_centers().points[:,cellvol_normal_component]*self._cell_areas
print('CELLPROPERTIES:',time()-t)
# Now the label is known per cell. We only need to find all cells with the same label
# and group them. Internally we will store the points in one array and the cells in
# nlabel tuples of ndarrays.
t = time()
offset = np.cumsum(np.bincount(cell_val))[1:-1]
ind = np.argsort(cell_val)
self._offset = offset
self._faces = contour_.faces.reshape(contour_.n_faces,4)[ind,:]
self._points = contour_.points
self._cell_areas = self._cell_areas[ind]
self._cell_volumes = self._cell_volumes[ind]
print('FINALIZING:',time()-t)
return
@property
def faces(self): return np.split(self._faces,self.offset)
@property
def points(self): return self._points
@property
def cell_areas(self): return np.split(self._cellarea,self.offset)
@property
def cell_volumes(self): return np.split(self._cellvol,self.offset)
def area(self,feature):
'''Returns the surface area of feature. If feature is None, total surface
area of all features is returned.'''
if label is None:
return np.sum(self._cell_areas)
else:
return np.sum(self.cell_areas[label-1])
def areas(self):
'''Returns a tuple with surface areas of all features.'''
return tuple(np.sum(x) for x in self.cell_areas)
def volume(self,feature,method='triangulation'):
'''Returns volume enclosed by feature. If feature isNone, total volume of
all features is returned.'''
if method=='triangulation':
if self._faces is None:
self.triangulate(contour_method=self.__TRI_CONTMETH,
cellvol_normal_component=self.__TRI_NORMCOMP)
if label is None:
return np.sum(self._cell_volumes)
else:
return np.sum(self.cell_volumes[feature-1])
elif method=='binary':
return self.binary.volume(feature)
else:
raise ValueError("Invalid method. Available methods: 'triangulation','binary'")
def volumes(self,method='triangulation'):
if method=='triangulation':
self.triangulate(contour_method=self.__TRI_CONTMETH,
cellvol_normal_component=self.__TRI_NORMCOMP)
return tuple(np.sum(x) for x in self.cell_volumes)
elif method=='binary':
return self.binary.volumes()
else:
raise ValueError("Invalid method. Available methods: 'triangulation','binary'")
def which_feature(self,coords,method='binary'):
coords = np.array(coords)
assert coords.ndim==2 and coords.shape[1]==3, "'coords' need to be provided as 3xN array."
if method=='binary':
idx = np.round()
# elif method=='triangulation':
return
def to_vtk(self,label,reduce_points=False):
if self._faces is None:
raise RuntimeError('Features have not been triangulated yet. Call triangulate() method first.')
import pyvista as pv
idx = label-1
if reduce_points:
nfaces_ = self._faces[idx].shape[0]
ind,inv = np.unique(self._faces[idx][:,1:4],return_inverse=True)
faces_ = np.zeros((nfaces_,4),dtype=self._faces[idx].dtype)
faces_[:,0] = 3
faces_[:,1:4] = inv.reshape(nfaces_,3)
points_ = self._points[ind,:]
return pv.PolyData(points_,faces_)
else:
return pv.PolyData(self._points,self._faces[idx])
class BinaryFieldNd:
def __init__(self,input,periodicity,connect_diagonals=False,deep=False,has_ghost=False):
assert isinstance(input,np.ndarray) and input.dtype==bool,\
"'input' must be a numpy array of type bool."
assert all([isinstance(x,(bool,int)) for x in periodicity]),\
"'periodicity' requires bool values."
assert len(periodicity)==input.ndim,\
"Number of entries in 'periodicity' must match dimension of binary field."
from scipy import ndimage
if has_ghost and deep:
self._data = input.copy()
elif has_ghost:
self._data = input
else:
pw = tuple((0,1) if x else (0,0) for x in periodicity)
self._data = np.pad(input,pw,mode='wrap')
self._sldata = tuple(slice(0,-1) if x else None for x in periodicity)
self._dim = self._data.shape
self._ndim = self._data.ndim
self._labels = None
self.nlabels = None
self.wrap = tuple(self._ndim*[None])
self.periodicity = tuple(bool(x) for x in periodicity)
if connect_diagonals:
self.structure = ndimage.generate_binary_structure(self._ndim,self._ndim)
else:
self.structure = ndimage.generate_binary_structure(self._ndim,1)
def enable_diagonal_connections(self): self.set_structure(True)
@property
def data(self):
return self._data[self._sldata]
def disable_diagonal_connections(self): self.set_structure(False)
@property
def labels(self):
if self._labels is None:
return None
return self._labels[self._sldata]
def label(self):
'''Labels connected regions in binary fields.'''
from scipy import ndimage
if any(self.periodicity):
self.labels,self.nlabels,self.wrap = self._labels_periodic()
self._labels,self.nlabels,self.wrap = self._labels_periodic()
else:
self.labels,self.nlabels = ndimage.label(self.data,structure=self.structure)
self._featsl = ndimage.find_objects(self.labels)
self._labels,self.nlabels = ndimage.label(self._data,structure=self.structure)
def _labels_periodic(self,map_to_zero=False):
'''Label features in an array while taking into account periodic wrapping.
If map_to_zero=True, every feature which overlaps or is attached to the
periodic boundary will be removed.'''
from scipy import ndimage
# Pad input data
if map_to_zero:
pw = tuple((1,1) if x else (0,0) for x in self.periodicity)
sl_pad = tuple(slice(1,-1) if x else slice(None) for x in self.periodicity)
else:
pw = tuple((0,1) if x else (0,0) for x in self.periodicity)
sl_pad = tuple(slice(0,-1) if x else slice(None) for x in self.periodicity)
data_ = np.pad(self.data,pw,mode='wrap')
# Compute labels on padded array
labels_,nlabels_ = ndimage.label(data_,structure=self.structure)
labels_,nlabels_ = ndimage.label(self._data,structure=self.structure)
# Get a mapping of labels which differ at periodic overlap
map_ = np.array(range(nlabels_+1),dtype=labels_.dtype)
wrap_ = self._ndim*[None]
for axis in range(self._ndim):
if not self.periodicity[axis]: continue
if map_to_zero:
sl_lo = tuple(slice(0,2) if ii==axis else slice(None) for ii in range(self._ndim))
sl_hi = tuple(slice(-2,None) if ii==axis else slice(None) for ii in range(self._ndim))
lab_lo = labels_[sl_lo]
lab_hi = labels_[sl_hi]
li = (lab_lo!=lab_hi)
for source_ in np.unique(lab_lo[li]):
map_[source_] = 0
for source_ in np.unique(lab_hi[li]):
map_[source_] = 0
else:
sl_lo = tuple(slice(0,1) if ii==axis else slice(None) for ii in range(self._ndim))
sl_hi = tuple(slice(-1,None) if ii==axis else slice(None) for ii in range(self._ndim))
sl_pre = tuple(slice(-2,-1) if ii==axis else slice(None) for ii in range(self._ndim))
lab_lo = labels_[sl_lo]
lab_hi = labels_[sl_hi]
lab_pre = np.unique(labels_[sl_pre]) # all labels in last (unwrapped) slice
# Initialize array to keep track of wrapping
wrap_[axis] = np.zeros(nlabels_+1,dtype=bool)
# Determine new label and map
lab_new = np.minimum(lab_lo,lab_hi)
for lab_ in [lab_lo,lab_hi]:
li = (lab_!=lab_new)
lab_li = lab_[li]
lab_new_li = lab_new[li]
for idx_ in np.unique(lab_li,return_index=True)[1]:
source_ = lab_li[idx_] # the label to be changed
target_ = lab_new_li[idx_] # the label which will be newly assigned
sl_lo = tuple(slice(0,1) if ii==axis else slice(None) for ii in range(self._ndim))
sl_hi = tuple(slice(-1,None) if ii==axis else slice(None) for ii in range(self._ndim))
sl_pre = tuple(slice(-2,-1) if ii==axis else slice(None) for ii in range(self._ndim))
lab_lo = labels_[sl_lo]
lab_hi = labels_[sl_hi]
lab_pre = np.unique(labels_[sl_pre]) # all labels in last (unwrapped) slice
# Initialize array to keep track of wrapping
wrap_[axis] = np.zeros(nlabels_+1,dtype=bool)
# Determine new label and map
lab_new = np.minimum(lab_lo,lab_hi)
for lab_ in [lab_lo,lab_hi]:
li = (lab_!=lab_new) #if not map_to_zero else (lab_!=0)
lab_li = lab_[li]
lab_new_li = lab_new[li]
for idx_ in np.unique(lab_li,return_index=True)[1]:
source_ = lab_li[idx_] # the label to be changed
target_ = lab_new_li[idx_] # the label which will be newly assigned
if map_to_zero and source_ in lab_pre:
map_[source_] = 0
map_[target_] = 0
else:
while target_ != map_[target_]: # map it recursively
target_ = map_[target_]
map_[source_] = target_
@ -744,12 +941,13 @@ class BinaryFieldNd:
# Remove gaps from target mapping
idx_,map_ = np.unique(map_,return_index=True,return_inverse=True)[1:3]
# Relabel and remove padding
labels_ = map_[labels_[sl_pad]]
labels_ = map_[labels_]
nlabels_ = np.max(map_)
assert nlabels_==len(idx_)-1, "DEBUG assertion"
for axis in range(self._ndim):
if wrap_[axis] is not None:
wrap_[axis] = wrap_[axis][idx_]
self.wrap = tuple(None if x is None else x[idx_] for x in self.wrap)
# for axis in range(self._ndim):
# if wrap_[axis] is not None:
# wrap_[axis] = wrap_[axis][idx_]
return labels_,nlabels_,tuple(wrap_)
def fill_holes(self):
@ -759,178 +957,89 @@ class BinaryFieldNd:
connected to itself accross the periodic boundaries.'''
from scipy import ndimage
# Reimplementation of "binary_fill_holes" from ndimage
mask = np.logical_not(self.data) # only modify locations which are "False" at the moment
mask = np.logical_not(self._data) # only modify locations which are "False" at the moment
tmp = np.zeros(mask.shape,bool) # create empty array to "grow from boundaries"
ndimage.binary_dilation(tmp,structure=None,iterations=-1,
mask=mask,output=self.data,border_value=1,
origin=0) # everything connected to the boundary is now True in self.data
mask=mask,output=self._data,border_value=1,
origin=0) # everything connected to the boundary is now True in self._data
# Remove holes which overlap the boundaries
if any(self.periodicity):
self.data = self._labels_periodic(map_to_zero=True)[0]>0
self._data = self._labels_periodic(map_to_zero=True)[0]>0
# Invert to get the final result
np.logical_not(self.data,self.data)
np.logical_not(self._data,self._data)
# If labels have been computed already, recompute them to stay consistent
if self.labels is not None:
if self._labels is not None:
self.label()
def probe(self,idx):
def probe(self,idx,probe_label=False):
'''Returns whether or not a point at idx is True or False.'''
return self.data[tuple(idx)]
def volume(self):
'''Returns the sum of True values.'''
return np.sum(self.data)
def volume_feature(self,label=None):
'''Returns volume of features, i.e. connected regions which have been
labeled using the label() method. If 'label' is None all volumes
are returned including the volume of the background region. The array
is sorted by labels, i.e. vol[0] is the volume of the background region,
vol[1] the volume of label 1, etc. If 'label' is an integer value, only
the volume of the corresponding region is returned.
Note: it is more efficient to retrieve all volumes at once than querying
single labels.'''
from scipy import ndimage
if self.labels is None:
self.label()
if label is None:
return np.bincount(self.labels.ravel())
# return ndimage.sum_labels(self.data,labels=self.labels,index=range(0,self.nlabels+1)) # performs much worse
if probe_label:
return self._labels[tuple(idx)]
else:
return self._data[tuple(idx)]
def volume(self,label):
'''Returns the sum of True values.'''
if label is None:
return np.sum(self.data)
else:
if self._labels is None: self.label()
return np.sum(self.labels==label)
def volumes(self):
'''Returns volume of all features, i.e. connected regions which have been
labeled using the label() method including the volume of the background region.
The array is sorted by labels, i.e. vol[0] is the volume of the background
region, vol[1] the volume of label 1, etc. If 'label' is an integer value,
only the volume of the corresponding region is returned.
Note: it is more efficient to retrieve all volumes at once than querying
single labels.'''
if self._labels is None: self.label()
return np.bincount(self.labels.ravel())
def volume_domain(self):
'''Returns volume of entire domain. Should be equal to sum(volume_feature()).'''
return np.prod(self._dim)
dim = tuple(self._dim[axis]-1 if self.periodicity[axis] else self._dim[axis] for axis in range(self._ndim))
return np.prod(dim)
def area_feature(self,ignore_curved=False,ignore_pathological=False):
from scipy import ndimage
from time import time
# Pad array in periodic directions
pw = tuple((1,1) if x else (0,0) for x in self.periodicity)
sl_pad = tuple(slice(1,-1) if x else slice(None) for x in self.periodicity)
data_ = np.pad(self.data,pw,mode='wrap')
#
area = np.zeros(self.nlabels+1)
stypes = list(range(1,10))
if ignore_pathological: del stypes[6:9]
if ignore_curved: del stypes[3:6]
print(stypes,ignore_curved,ignore_pathological)
for stype in stypes:
print(stype)
svs_,w_ = BinaryFieldNd.surface_voxel_structure(stype)
hit_ = np.zeros_like(data_)
t = time()
ii=0
for s_ in svs_: # Loop over permutations
ii+=1
np.logical_or(hit_,ndimage.binary_hit_or_miss(data_,structure1=s_),out=hit_) # Check if this works inplace
print(ii,time()-t,np.sum(hit_))
area += w_*np.bincount(self.labels[hit_[sl_pad]],minlength=self.nlabels+1)
return area
def feature_labels_by_volume(self,descending=True):
def feature_labels_by_volume(self,descending=True,return_volumes=False):
'''Returns labels of connected regions sorted by volume.'''
labels = np.argsort(self.volume_feature()[1:])+1
if descending: labels = labels[::-1]
return labels
vol = self.volumes()[1:]
labels = np.argsort(vol)+1
if descending:
labels = labels[::-1]
vol = vol[::-1]
if return_volumes:
return labels,vol
else:
return labels
def discard_feature(self,selection):
'''Removes a feature from data.'''
if self.labels is None:
def discard_features(self,selection,return_mask=False):
'''Removes features from data. Optionally returns a boolean array 'mask'
which marks position of features which have been removed.'''
if self._labels is None:
self.label()
selection = self._select_feature(selection)
selection = self._select_features(selection)
# Map tagged regions to zero in order to discard them
map_ = np.array(range(self.nlabels+1),dtype=self.labels.dtype)
map_ = np.array(range(self.nlabels+1),dtype=self._labels.dtype)
map_[selection] = 0
# Remove gaps from target mapping
idx_,map_ = np.unique(map_,return_index=True,return_inverse=True)[1:3]
# Discard regions
self.labels = map_[self.labels]
self._labels = map_[self._labels]
self.nlabels = np.max(map_)
for axis in range(self._ndim):
if self.wrap[axis] is not None:
self.wrap[axis] = self.wrap[axis][idx_]
self.data = self.labels>0
print(self.nlabels)
self.wrap = tuple(None if x is None else x[idx_] for x in self.wrap)
# for axis in range(self._ndim):
# if self.wrap[axis] is not None:
# self.wrap[axis] = self.wrap[axis][idx_]
if return_mask:
mask = np.logical_and(self._labels==0,self._data)
self._data = self._labels>0
if return_mask: return mask
def isolate_feature(self,selection,array=None):
from scipy import ndimage
if self.labels is None:
self.label()
selection = self._select_feature(selection)
output1 = []
has_array = array is not None
if has_array:
assert np.all(array.shape==self._dim)
output2 = []
for lab_ in selection:
# Extract feature of interest
if lab_==0:
data_ = np.logical_not(self.data)
if has_array: data2_ = array
else:
data_ = (self.labels[self._featsl[lab_-1]]==lab_)
if has_array: data2_ = array[self._featsl[lab_-1]]
# If feature is wrapped periodically, duplicate it and extract
# largest one
iswrapped = False
rep_ = self._ndim*[1]
for axis in range(self._ndim):
if self.wrap[axis] is not None and self.wrap[axis][lab_]:
rep_[axis] = 2
iswrapped = True
if iswrapped:
data_ = np.tile(data_,rep_)
l_,nl_ = ndimage.label(data_,structure=self.structure)
vol_ = np.bincount(l_.ravel())
il_ = np.argmax(vol_[1:])+1
sl_ = ndimage.find_objects(l_==il_)[0]
print(sl_)
data_ = data_[sl_]
if has_array:
data2_ = np.tile(data2_,rep_)[sl_]
# Add to output
output1.append(data_)
if has_array: output2.append(data2_)
if has_array:
return (output1,output2)
else:
return output1
def triangulate_feature(self,selection,origin=(0,0,0),spacing=(1,1,1),array=None):
assert self._ndim==3, "Triangulation requires 3D data."
from scipy import ndimage
if self.labels is None:
self.label()
selection = self._select_feature(selection)
output = []
pw = tuple((1,1) for ii in range(self._ndim))
has_array = array is not None
if has_array:
assert np.all(array.shape==self._dim)
output2 = []
for lab_ in selection:
if has_array:
data_,scal_ = self.isolate_feature(lab_,array=array)
data_,scal_ = data_[0],scal_[0]
# Fill interior in case we filled holes which is not in scal_
li = ndimage.binary_erosion(data_,structure=self.structure,iterations=2)
scal_[li] = 1.0
li = np.logical_not(ndimage.binary_dilation(data_,structure=self.structure,iterations=2))
scal_[li] = -1.0
# scal_ = np.pad(scal_,pw,mode='reflect',reflect_type='odd')
scal_ = np.pad(scal_,pw,mode='constant',constant_values=-1.0)
pd = Field3d(scal_,origin,spacing).vtk_contour(0.0)
else:
data_ = self.isolate_feature(lab_)[0]
data_ = np.pad(data_.astype(float),pw,mode='constant',constant_values=-1.0)
pd = Field3d(data_,origin,spacing).vtk_contour(0.5)
output.append(pd)
return output
def _select_feature(self,selection):
dtype = self.labels.dtype
def _select_features(self,selection):
dtype = self._labels.dtype
if selection is None:
return np.array(range(1,self.nlabels+1))
elif np.issubdtype(type(selection),np.integer):
@ -948,65 +1057,6 @@ class BinaryFieldNd:
else:
raise ValueError('Invalid input. Accepting int,list,tuple,ndarray.')
@staticmethod
def surface_voxel_structure(stype):
'''Get structure element and all its permutations for surface voxels
as defined by
Mullikin, J. C., & Verbeek, P. W. (1993). Surface area estimation of digitized planes. Bioimaging, 1(1), 6-16.
Also returns weights, where S7-S9 are defined in
Windreich, G., Kiryati, N., & Lohmann, G. (2003). Voxel-based surface area estimation: from theory to practice. Pattern Recognition, 36(11), 2531-2541.
'''
if stype==1:
S = np.array([[[ True, True, True], [ True, True, True], [ True, True, True]],[[ True, True, True], [ True, True, True], [ True, True, True]],[[False,False,False], [False,False,False], [False,False,False]]])
W = 0.894
elif stype==2:
S = np.array([[[ True, True, True], [ True, True, True], [ True, True, True]],[[ True, True,False], [ True, True,False], [ True, True,False]],[[ True,False,False], [ True,False,False], [ True,False,False]]])
W = 1.3409
elif stype==3:
S = np.array([[[ True, True, True], [ True, True,False], [ True,False,False]],[[ True, True,False], [ True, True,False], [False,False,False]],[[ True,False,False], [False,False,False], [False,False,False]]])
W = 1.5879
elif stype==4:
S = np.array([[[ True, True, True], [ True, True, True], [ True, True, True]],[[False, True,False], [False, True,False], [False, True,False]],[[False,False,False], [False,False,False], [False,False,False]]])
W = 2.0
elif stype==5:
S = np.array([[[ True,False,False], [False,False,False], [False,False,False]],[[ True, True,False], [ True, True,False], [False,False,False]],[[ True,False,False], [False,False,False], [False,False,False]]])
W = 8./3.
elif stype==6:
S = np.array([[[False,False,False], [ True,False,False], [False,False,False]],[[False,False,False], [ True, True,False], [False,False,False]],[[False,False,False], [ True,False,False], [False,False,False]]])
W = 10./3.
elif stype==7:
S = np.array([[[False,False,False], [False,False,False], [False,False,False]],[[ True, True, True], [ True, True, True], [ True, True, True]],[[False,False,False], [False,False,False], [False,False,False]]])
W = 1.79
elif stype==8:
S = np.array([[[False,False,False], [False,False,False], [False,False,False]],[[False,False,False], [ True, True, True], [False,False,False]],[[False,False,False], [False,False,False], [False,False,False]]])
W = 2.68
elif stype==9:
S = np.array([[[False,False,False], [False,False,False], [False,False,False]],[[False,False,False], [False, True,False], [False,False,False]],[[False,False,False], [False,False,False], [False,False,False]]])
W = 4.08
else:
raise ValueError('Invalid structure type.')
return (np.unique(BinaryFieldNd.rotations48(S).reshape(-1,3,3,3),axis=0),W)
@staticmethod
def rotations48(a):
'''Generates all possible permutations for numpy array.
Code taken from
https://stackoverflow.com/questions/33190042/how-to-calculate-all-24-rotations-of-3d-array
'''
import itertools
# Get all combinations of axes that are permutable
n = a.ndim
axcomb = np.array(list(itertools.permutations(range(n), n)))
# Initialize output array
out = np.zeros((6,2,2,2,) + a.shape,dtype=a.dtype)
# Run loop through all axes for flipping and permuting each axis
for i,ax in enumerate(axcomb):
for j,fx in enumerate([1,-1]):
for k,fy in enumerate([1,-1]):
for l,fz in enumerate([1,-1]):
out[i,j,k,l] = np.transpose(a[::fx,::fy,::fz],ax)
return out
class ChunkIterator:
'''Iterates through all chunks. 'snapshot' must be an instance
of a class which returns a Field3d from the method call