just a backup, will be changed now
This commit is contained in:
parent
2acef17323
commit
16e67ad666
86
field.py
86
field.py
|
|
@ -659,7 +659,6 @@ class Features3d:
|
||||||
# Assign basic properties to class variables
|
# Assign basic properties to class variables
|
||||||
self.origin = np.array(origin,dtype=np.float)
|
self.origin = np.array(origin,dtype=np.float)
|
||||||
self.spacing = np.array(spacing,dtype=np.float)
|
self.spacing = np.array(spacing,dtype=np.float)
|
||||||
self.dimensions = np.array(input.shape,dtype=np.int)
|
|
||||||
self.periodicity = tuple(bool(x) for x in periodicity)
|
self.periodicity = tuple(bool(x) for x in periodicity)
|
||||||
# If regions are supposed to be inverted, i.e. the interior consists of values
|
# If regions are supposed to be inverted, i.e. the interior consists of values
|
||||||
# smaller than the threshold instead of larger, change the sign of the array.
|
# smaller than the threshold instead of larger, change the sign of the array.
|
||||||
|
|
@ -674,6 +673,7 @@ class Features3d:
|
||||||
else:
|
else:
|
||||||
pw = tuple((0,1) if x else (0,0) for x in periodicity)
|
pw = tuple((0,1) if x else (0,0) for x in periodicity)
|
||||||
self._input = np.pad(sign_invert*input,pw,mode='wrap')
|
self._input = np.pad(sign_invert*input,pw,mode='wrap')
|
||||||
|
self.dimensions = np.array(self._input.shape,dtype=np.int)
|
||||||
# Triangulate
|
# Triangulate
|
||||||
self.triangulate(contour_method=contour_method,
|
self.triangulate(contour_method=contour_method,
|
||||||
cellvol_normal_component=cellvol_normal_component,
|
cellvol_normal_component=cellvol_normal_component,
|
||||||
|
|
@ -700,12 +700,13 @@ class Features3d:
|
||||||
import pyvista as pv
|
import pyvista as pv
|
||||||
import vtk
|
import vtk
|
||||||
from scipy import ndimage, spatial
|
from scipy import ndimage, spatial
|
||||||
|
from scipy.spatial import KDTree
|
||||||
from time import time
|
from time import time
|
||||||
# Check if '_input' is available: might have been dropped after initialization
|
# Check if '_input' is available: might have been dropped after initialization
|
||||||
assert self._input is not None, "'_input' not available. Initialize object with keep_input=True flag."
|
assert self._input is not None, "'_input' not available. Initialize object with keep_input=True flag."
|
||||||
# Wrap data for VTK using pyvista
|
# Wrap data for VTK using pyvista
|
||||||
datavtk = pv.UniformGrid()
|
datavtk = pv.UniformGrid()
|
||||||
datavtk.dimensions = self._input.shape
|
datavtk.dimensions = self.dimensions
|
||||||
datavtk.origin = self.origin
|
datavtk.origin = self.origin
|
||||||
datavtk.spacing = self.spacing
|
datavtk.spacing = self.spacing
|
||||||
datavtk.point_arrays['data'] = self._input.ravel('F')
|
datavtk.point_arrays['data'] = self._input.ravel('F')
|
||||||
|
|
@ -714,6 +715,87 @@ class Features3d:
|
||||||
if report: print('[Features3d.triangulate] computing isocontour using {}...'.format(contour_method))
|
if report: print('[Features3d.triangulate] computing isocontour using {}...'.format(contour_method))
|
||||||
contour = datavtk.contour([self._threshold],method=contour_method,compute_scalars=False,compute_gradients=True)
|
contour = datavtk.contour([self._threshold],method=contour_method,compute_scalars=False,compute_gradients=True)
|
||||||
assert contour.is_all_triangles(), "Contouring produced non-triangle cells."
|
assert contour.is_all_triangles(), "Contouring produced non-triangle cells."
|
||||||
|
# Compute contour on boundaries: this is necessary for inside/outside checks and proper
|
||||||
|
# volume computation for overlapping objects
|
||||||
|
t__ = time()
|
||||||
|
self._faces_bd = []
|
||||||
|
self._points_bd = []
|
||||||
|
offset_pts = contour.n_points
|
||||||
|
print(offset_pts)
|
||||||
|
for axis in range(3):
|
||||||
|
# Build a nearest-neighbor search KD-trees for boundary points so that we can connect
|
||||||
|
# them to the boundary faces when needed
|
||||||
|
# t__ = time()
|
||||||
|
pos_bd_lo = self.origin[axis]
|
||||||
|
pos_bd_hi = self.origin[axis]+self.spacing[axis]*(self.dimensions[axis]-1)
|
||||||
|
search_dist = 1e-5*self.spacing[axis]
|
||||||
|
idx_lo = np.flatnonzero(np.abs(contour.points[:,axis]-pos_bd_lo)<search_dist)
|
||||||
|
idx_hi = np.flatnonzero(np.abs(contour.points[:,axis]-pos_bd_lo)<search_dist)
|
||||||
|
sl_pln = [0,1,2]
|
||||||
|
del sl_pln[axis]
|
||||||
|
# print(len(idx_lo),len(idx_hi))
|
||||||
|
# kd_lo = KDTree(contour.points[np.ix_(idx_lo,sl_pln)],leafsize=10,compact_nodes=True,copy_data=False,balanced_tree=True)
|
||||||
|
# kd_hi = KDTree(contour.points[np.ix_(idx_hi,sl_pln)],leafsize=10,compact_nodes=True,copy_data=False,balanced_tree=True)
|
||||||
|
# print('KD tree build:',time()-t__)
|
||||||
|
# Compute the contour on the boundary: the normal should point outwards.
|
||||||
|
sl_lo = 3*[slice(None)]
|
||||||
|
sl_lo[axis] = 0
|
||||||
|
sl_hi = 3*[slice(None)]
|
||||||
|
sl_hi[axis] = -1
|
||||||
|
origin = self.origin.copy()
|
||||||
|
origin[axis] -= self.spacing[axis]
|
||||||
|
dimensions = self.dimensions.copy()
|
||||||
|
dimensions[axis] = 2
|
||||||
|
#
|
||||||
|
tmp = np.empty(dimensions,dtype=self._input.dtype,order='F')
|
||||||
|
tmp[tuple(sl_hi)] = self._input[tuple(sl_lo)]
|
||||||
|
tmp[tuple(sl_lo)] = -1e-30
|
||||||
|
print(origin)
|
||||||
|
print(self.spacing)
|
||||||
|
print(dimensions)
|
||||||
|
#
|
||||||
|
planevtk = pv.UniformGrid()
|
||||||
|
planevtk.dimensions = dimensions
|
||||||
|
planevtk.spacing = self.spacing
|
||||||
|
planevtk.origin = origin
|
||||||
|
planevtk.point_arrays['data'] = tmp.ravel('F')
|
||||||
|
# Contour for lower boundary
|
||||||
|
contour_bd = planevtk.contour([self._threshold],method=contour_method)
|
||||||
|
faces_bd = contour_bd.faces.reshape(-1,4).copy()
|
||||||
|
points_bd = contour_bd.points.copy()
|
||||||
|
print(points_bd)
|
||||||
|
# Find points which connect to main contour and update faces
|
||||||
|
kd = KDTree(points_bd[:,sl_pln],leafsize=10,compact_nodes=True,copy_data=False,balanced_tree=True)
|
||||||
|
kddist = 1e-1*self.spacing[axis]
|
||||||
|
print(kddist)
|
||||||
|
# ptidx = kd.query(contour.points[np.ix_(idx_lo,sl_pln)],k=1)#,distance_upper_bound=kddist)
|
||||||
|
ptidx = kd.query(contour.points[np.ix_(idx_lo,sl_pln)],k=1)#,distance_upper_bound=kddist)
|
||||||
|
# print(ptidx[0])
|
||||||
|
# print(np.min(contour.points[idx_lo,0]),np.max(contour.points[idx_lo,0]))
|
||||||
|
print(np.min(points_bd[:,0]),np.max(points_bd[:,0]))
|
||||||
|
print('connections:',np.sum(ptidx[0]<kddist))
|
||||||
|
print('boundary points:',points_bd[:,sl_pln].shape)
|
||||||
|
print('selected points:',contour.points[np.ix_(idx_lo,sl_pln)].shape)
|
||||||
|
stop
|
||||||
|
#
|
||||||
|
if self.periodicity[axis]:
|
||||||
|
sl_swap = [1,2,3]
|
||||||
|
del sl_swap[axis]
|
||||||
|
self._faces_bd += [contour_bd.faces.reshape(-1,4).copy()]
|
||||||
|
self._faces_bd[-1][:,sl_swap] = self._faces_bd[-1][:,sl_swap[::-1]]
|
||||||
|
self._points_bd += [contour_bd.points.copy()]
|
||||||
|
self._points_bd[-1][axis] += self.spacing[axis]*(self.dimensions[axis]-1)
|
||||||
|
else:
|
||||||
|
origin[axis] = self.origin[axis]+self.spacing[axis]*(self.dimensions[axis]-1)
|
||||||
|
tmp[tuple(sl_lo)] = self._input[tuple(sl_hi)]
|
||||||
|
tmp[tuple(sl_hi)] = -1e-30
|
||||||
|
planevtk.origin = origin
|
||||||
|
planevtk.point_arrays['data'] = tmp.ravel('F')
|
||||||
|
contour_bd = planevtk.contour([self._threshold],method=contour_method)
|
||||||
|
self._faces_bd += [contour_bd.faces.reshape(-1,4).copy()]
|
||||||
|
self._points_bd += [contour_bd.points.copy()]
|
||||||
|
print('boundary contour:',time()-t__)
|
||||||
|
#
|
||||||
# Compute the connectivity of the triangulated surface: first we run an ordinary
|
# Compute the connectivity of the triangulated surface: first we run an ordinary
|
||||||
# connectivity filter neglecting periodic wrapping.
|
# connectivity filter neglecting periodic wrapping.
|
||||||
if report: print('[Features3d.triangulate] computing connectivity...')
|
if report: print('[Features3d.triangulate] computing connectivity...')
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue