Add initial batch processing and do some tidying.
This commit is contained in:
parent
00fa3da2eb
commit
2e4514dd4a
34 changed files with 446 additions and 119 deletions
17
.project
Normal file
17
.project
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<projectDescription>
|
||||||
|
<name>Stack3D</name>
|
||||||
|
<comment></comment>
|
||||||
|
<projects>
|
||||||
|
</projects>
|
||||||
|
<buildSpec>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.python.pydev.PyDevBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
</buildSpec>
|
||||||
|
<natures>
|
||||||
|
<nature>org.python.pydev.pythonNature</nature>
|
||||||
|
</natures>
|
||||||
|
</projectDescription>
|
8
.pydevproject
Normal file
8
.pydevproject
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<?eclipse-pydev version="1.0"?><pydev_project>
|
||||||
|
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||||
|
<path>/${PROJECT_DIR_NAME}</path>
|
||||||
|
</pydev_pathproperty>
|
||||||
|
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
|
||||||
|
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||||
|
</pydev_project>
|
2
.settings/org.eclipse.core.resources.prefs
Normal file
2
.settings/org.eclipse.core.resources.prefs
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
eclipse.preferences.version=1
|
||||||
|
encoding//src/stack3d/formats/ome_schema.py=utf-8
|
|
@ -1,44 +0,0 @@
|
||||||
"""
|
|
||||||
Reader for the MATLAB vessel network format provided by Russel Bates (russell.bates@new.ox.ac.uk)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
import scipy.io
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
def compare(x, y):
|
|
||||||
|
|
||||||
return (x[0] == y[0]) and (x[1] == y[1]) and (x[2] == y[2])
|
|
||||||
|
|
||||||
def convert_to_vtk(mat):
|
|
||||||
|
|
||||||
"""
|
|
||||||
Convert the format to VTK
|
|
||||||
"""
|
|
||||||
|
|
||||||
nodes = []
|
|
||||||
edges = []
|
|
||||||
|
|
||||||
for idx, eachConnectedComponent in enumerate(mat["Vessels"][0]):
|
|
||||||
adjacency = eachConnectedComponent['Adj']
|
|
||||||
branches = eachConnectedComponent['Branch'][0][0][0]
|
|
||||||
if idx ==0:
|
|
||||||
for kdx, eachBranch in enumerate(branches):
|
|
||||||
if kdx <200:
|
|
||||||
for jdx, eachPoint in enumerate(eachBranch['Points'][0][0]):
|
|
||||||
nodes.append(np.array(eachPoint))
|
|
||||||
|
|
||||||
for idx, eachEntry in enumerate(nodes):
|
|
||||||
for jdx, eachOtherEntry in enumerate(nodes):
|
|
||||||
if idx!=jdx:
|
|
||||||
if compare(eachEntry, eachOtherEntry):
|
|
||||||
print eachEntry, eachOtherEntry, idx, jdx
|
|
||||||
break
|
|
||||||
|
|
||||||
#nodes.append(eachBranch['Points'][0][0][0])
|
|
||||||
#print eachBranch['Points'][0][0][0]
|
|
||||||
#adjacency_indices = adjacency[0][0].nonzero()
|
|
||||||
#print adjacency_indices[0]
|
|
||||||
|
|
||||||
mat = scipy.io.loadmat('/home/grogan/Vessels.mat', struct_as_record=True)
|
|
||||||
convert_to_vtk(mat)
|
|
136
src/stack3d/formats/unet.py
Normal file
136
src/stack3d/formats/unet.py
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
""" Format conversion for Russ Bate's unet format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import csv
|
||||||
|
from subprocess import call
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import scipy.io
|
||||||
|
import networkx as nx
|
||||||
|
import stack3d.formats
|
||||||
|
|
||||||
|
|
||||||
|
def load_skeleton(path):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Load the skeleton from a pickle
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Delayed import so script can be run with both Python 2 and 3
|
||||||
|
from unet_core.vessel_analysis import VesselTree
|
||||||
|
|
||||||
|
v = VesselTree()
|
||||||
|
v.load_skeleton(path)
|
||||||
|
return v.skeleton
|
||||||
|
|
||||||
|
|
||||||
|
def skeleton_to_vtp(path):
|
||||||
|
|
||||||
|
temp_output_path = os.path.dirname(os.path.realpath(path)) + "/temp_network_csv.csv"
|
||||||
|
script_path = inspect.getfile(stack3d.formats.unet).replace(".pyc", ".py")
|
||||||
|
launch_args = " --input '" + path + "' --output '" + temp_output_path + "' --format csv --field diameter"
|
||||||
|
call("python3 " + script_path + launch_args, shell=True)
|
||||||
|
|
||||||
|
# Avoid vtk import when method not used to reduce dependency overhead
|
||||||
|
import vtk
|
||||||
|
polydata = vtk.vtkPolyData()
|
||||||
|
points = vtk.vtkPoints()
|
||||||
|
cells = vtk.vtkCellArray()
|
||||||
|
data = vtk.vtkDoubleArray()
|
||||||
|
|
||||||
|
with open(temp_output_path, 'rb') as csvfile:
|
||||||
|
reader = csv.reader(csvfile, delimiter=',', quotechar='|')
|
||||||
|
counter = 0
|
||||||
|
first = 0
|
||||||
|
for row in reader:
|
||||||
|
if first == 0:
|
||||||
|
first += 1
|
||||||
|
continue
|
||||||
|
point0 = [float(x) for x in row[0:3]]
|
||||||
|
point1 = [float(x) for x in row[3:6]]
|
||||||
|
diameter = float(row[6])
|
||||||
|
points.InsertNextPoint(point0)
|
||||||
|
points.InsertNextPoint(point1)
|
||||||
|
line = vtk.vtkLine()
|
||||||
|
line.GetPointIds().SetId(0, counter)
|
||||||
|
line.GetPointIds().SetId(1, counter+1)
|
||||||
|
counter += 2
|
||||||
|
cells.InsertNextCell(line)
|
||||||
|
data.InsertNextTuple1(diameter)
|
||||||
|
|
||||||
|
polydata.SetPoints(points)
|
||||||
|
polydata.SetLines(cells)
|
||||||
|
polydata.GetCellData().SetScalars(data)
|
||||||
|
|
||||||
|
clean = vtk.vtkCleanPolyData()
|
||||||
|
clean.SetInputData(polydata)
|
||||||
|
clean.Update()
|
||||||
|
|
||||||
|
return clean.GetOutput()
|
||||||
|
|
||||||
|
|
||||||
|
def skeleton_to_matlab(skeleton, field, output_path):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Convert the skeleton's network description to matlab sparse matrix format.
|
||||||
|
Each mat file corresponds to a connected component
|
||||||
|
"""
|
||||||
|
|
||||||
|
# if os.path.isabs(output_path) and not os.path.exists(output_path):
|
||||||
|
# os.makedirs(output_path)
|
||||||
|
|
||||||
|
for idx, component in enumerate(skeleton.components):
|
||||||
|
for n, nbrs in component.graph.adjacency_iter():
|
||||||
|
for nbr, edict in nbrs.items():
|
||||||
|
branch = edict[0]["branch"]
|
||||||
|
component.graph.add_edge(n, nbr, weight=branch.diameter)
|
||||||
|
scipy_mat = nx.to_scipy_sparse_matrix(component.graph)
|
||||||
|
output = {"skeleton": scipy_mat}
|
||||||
|
output_file = output_path + "/skeleton_" + field + "_Comp_" + str(idx) + ".mat"
|
||||||
|
scipy.io.savemat(output_file, output)
|
||||||
|
|
||||||
|
def skeleton_to_csv(skeleton, output_path):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Convert the skeleton's network description to csv format
|
||||||
|
"""
|
||||||
|
|
||||||
|
# if os.path.isabs(output_path) and not os.path.exists(output_path):
|
||||||
|
# os.makedirs(output_path)
|
||||||
|
|
||||||
|
f = open(output_path, "w")
|
||||||
|
f.write("P0 - x, P0 - y, P0 - z, P1 - x, P1 - y, P1 - z, diameter")
|
||||||
|
|
||||||
|
for _, _, branch in skeleton.skeleton_branch_iter():
|
||||||
|
for idx, p1 in enumerate(branch.points):
|
||||||
|
if idx > 0:
|
||||||
|
p0 = branch.points[idx-1]
|
||||||
|
p0_s = str(p0.x) + "," + str(p0.y) + "," + str(p0.z)
|
||||||
|
p1_s = str(p1.x) + "," + str(p1.y) + "," + str(p1.z)
|
||||||
|
f.write(p0_s + "," + p1_s + "," + str(branch.diameter) + "\n")
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument('--input', type=str, help='Input skeleton file.')
|
||||||
|
parser.add_argument('--output', type=str, help='Output directory.')
|
||||||
|
parser.add_argument('--format', type=str,
|
||||||
|
help='Output format.',
|
||||||
|
choices=['csv', 'mat'],
|
||||||
|
default='mat')
|
||||||
|
parser.add_argument('--field', type=str,
|
||||||
|
help='Output field.',
|
||||||
|
choices=['diameter', "length", "tortuosity"],
|
||||||
|
default='diameter')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
skeleton = load_skeleton(args.input)
|
||||||
|
if "csv" in args.format:
|
||||||
|
skeleton_to_csv(skeleton, args.output)
|
||||||
|
elif "mat" in args.format:
|
||||||
|
skeleton_to_matlab(skeleton, args.field, args.output)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Conversion of IMS/LSM/CZI files to OME and then Generic TIFFs
|
Conversion of IMS/LSM/CZI files to OME and then Generic TIFFs
|
||||||
"""
|
"""
|
0
src/stack3d/study/__init__.py
Normal file
0
src/stack3d/study/__init__.py
Normal file
22
src/stack3d/study/components.py
Normal file
22
src/stack3d/study/components.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
class TimepointData(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
self.raw_data_path = None
|
||||||
|
|
||||||
|
|
||||||
|
class MouseData(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
self.raw_data_path = None
|
||||||
|
self.extra_path = ""
|
||||||
|
self.timepoints = []
|
||||||
|
|
||||||
|
|
||||||
|
class StudyData(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
self.raw_data_path = None
|
||||||
|
self.mice = []
|
0
src/stack3d/viewing/__init__.py
Normal file
0
src/stack3d/viewing/__init__.py
Normal file
|
@ -8,11 +8,11 @@ import vtk
|
||||||
import mp_color_map
|
import mp_color_map
|
||||||
|
|
||||||
def get_ox_map():
|
def get_ox_map():
|
||||||
|
|
||||||
files = ["/home/grogan/oxygen_solution_loc_0.vti",
|
files = ["/home/grogan/oxygen_solution_loc_1.vti",
|
||||||
"/home/grogan/oxygen_solution_loc_1.vti",
|
|
||||||
"/home/grogan/oxygen_solution_loc_2.vti",
|
"/home/grogan/oxygen_solution_loc_2.vti",
|
||||||
"/home/grogan/oxygen_solution_loc_3.vti"]
|
"/home/grogan/oxygen_solution_loc_3.vti",
|
||||||
|
"/home/grogan/oxygen_solution_loc_4.vti"]
|
||||||
offsets = [(0.0,0.0), (40.0, 0.0), (40.0, 40.0), (0.0, 40.0)]
|
offsets = [(0.0,0.0), (40.0, 0.0), (40.0, 40.0), (0.0, 40.0)]
|
||||||
|
|
||||||
ox_actors = []
|
ox_actors = []
|
||||||
|
@ -72,12 +72,12 @@ def get_ox_map():
|
||||||
ox_actor = vtk.vtkActor()
|
ox_actor = vtk.vtkActor()
|
||||||
ox_actor.SetMapper(ox_mapper)
|
ox_actor.SetMapper(ox_mapper)
|
||||||
ox_actor.GetProperty().SetColor(0.0, 1.0, 0.0)
|
ox_actor.GetProperty().SetColor(0.0, 1.0, 0.0)
|
||||||
ox_actor.GetProperty().SetOpacity(0.5)
|
ox_actor.GetProperty().SetOpacity(0.8)
|
||||||
ox_actors.append(ox_actor)
|
ox_actors.append(ox_actor)
|
||||||
|
|
||||||
scale_bar = vtk.vtkScalarBarActor()
|
scale_bar = vtk.vtkScalarBarActor()
|
||||||
scale_bar.SetLookupTable(scaled_ctf);
|
scale_bar.SetLookupTable(scaled_ctf);
|
||||||
#scale_bar.SetTitle("Oxygen Tension (mmHg)");
|
scale_bar.SetTitle(".");
|
||||||
scale_bar.SetOrientationToVertical();
|
scale_bar.SetOrientationToVertical();
|
||||||
scale_bar.GetPositionCoordinate().SetCoordinateSystemToNormalizedViewport();
|
scale_bar.GetPositionCoordinate().SetCoordinateSystemToNormalizedViewport();
|
||||||
scale_bar.GetPositionCoordinate().SetValue(0.23, 0.27);
|
scale_bar.GetPositionCoordinate().SetValue(0.23, 0.27);
|
||||||
|
@ -95,7 +95,7 @@ def get_ox_map():
|
||||||
scale_bar.GetTitleTextProperty().SetFontSize(15)
|
scale_bar.GetTitleTextProperty().SetFontSize(15)
|
||||||
scale_bar.GetLabelTextProperty().SetFontSize(15)
|
scale_bar.GetLabelTextProperty().SetFontSize(15)
|
||||||
scale_bar.GetTitleTextProperty().SetColor(1.0, 1.0, 1.0);
|
scale_bar.GetTitleTextProperty().SetColor(1.0, 1.0, 1.0);
|
||||||
scale_bar.GetLabelTextProperty().SetColor(1.0, 1.0, 1.0);
|
scale_bar.GetLabelTextProperty().SetColor(0.0, 0.0, 0.0);
|
||||||
|
|
||||||
return ox_actors, scale_bar
|
return ox_actors, scale_bar
|
||||||
|
|
||||||
|
@ -175,8 +175,7 @@ def render_image(image_path, threshold = 20.0):
|
||||||
input_files.append(image_path + "/"+eachFile)
|
input_files.append(image_path + "/"+eachFile)
|
||||||
|
|
||||||
renderer = vtk.vtkRenderer()
|
renderer = vtk.vtkRenderer()
|
||||||
renderer.SetBackground(20.0/255.0,30.0/255.0,48.0/255.0);
|
renderer.SetBackground(1.0,1.0,1.0);
|
||||||
renderer.SetBackground2(36.0/255.0,59.0/255.0,85.0/255.0);
|
|
||||||
|
|
||||||
for eachImage in input_files:
|
for eachImage in input_files:
|
||||||
reader= vtk.vtkXMLImageDataReader()
|
reader= vtk.vtkXMLImageDataReader()
|
||||||
|
@ -198,7 +197,7 @@ def render_image(image_path, threshold = 20.0):
|
||||||
opacity_transfer_func.AddPoint(0, 0.0 )
|
opacity_transfer_func.AddPoint(0, 0.0 )
|
||||||
opacity_transfer_func.AddPoint(threshold-1.0, 0.0 )
|
opacity_transfer_func.AddPoint(threshold-1.0, 0.0 )
|
||||||
#opacity_transfer_func.AddPoint(threshold, 1.0 )
|
#opacity_transfer_func.AddPoint(threshold, 1.0 )
|
||||||
opacity_transfer_func.AddPoint(255.0, 0.1 )
|
opacity_transfer_func.AddPoint(255.0, 1.0 )
|
||||||
|
|
||||||
volume_properties = vtk.vtkVolumeProperty()
|
volume_properties = vtk.vtkVolumeProperty()
|
||||||
volume_properties.SetColor( color_transfer_func )
|
volume_properties.SetColor( color_transfer_func )
|
||||||
|
@ -230,7 +229,9 @@ def render_image(image_path, threshold = 20.0):
|
||||||
render_window.AddRenderer(renderer)
|
render_window.AddRenderer(renderer)
|
||||||
window_interactor.SetRenderWindow(render_window)
|
window_interactor.SetRenderWindow(render_window)
|
||||||
|
|
||||||
render_window.SetSize(170, 177)
|
render_window.SetSize(1200, 800)
|
||||||
|
render_window.SetAlphaBitPlanes(1)
|
||||||
|
|
||||||
render_window.Render()
|
render_window.Render()
|
||||||
ox_actors, scale_bar_actor = get_ox_map()
|
ox_actors, scale_bar_actor = get_ox_map()
|
||||||
#renderer.AddActor(scale_bar_actor)
|
#renderer.AddActor(scale_bar_actor)
|
||||||
|
@ -247,14 +248,16 @@ def render_image(image_path, threshold = 20.0):
|
||||||
|
|
||||||
window_to_image_filter = vtk.vtkWindowToImageFilter()
|
window_to_image_filter = vtk.vtkWindowToImageFilter()
|
||||||
window_to_image_filter.SetInput(render_window)
|
window_to_image_filter.SetInput(render_window)
|
||||||
|
window_to_image_filter.SetMagnification(1);
|
||||||
|
window_to_image_filter.SetInputBufferTypeToRGBA()
|
||||||
|
window_to_image_filter.ReadFrontBufferOff()
|
||||||
writer = vtk.vtkTIFFWriter()
|
writer = vtk.vtkTIFFWriter()
|
||||||
extension = ".tiff"
|
extension = ".tiff"
|
||||||
writer.SetInputConnection(window_to_image_filter.GetOutputPort())
|
writer.SetInputConnection(window_to_image_filter.GetOutputPort())
|
||||||
writer.SetFileName("vtk_animation_0"+extension)
|
writer.SetFileName("vtk_animation_0"+extension)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
|
|
||||||
|
num_samples = 2
|
||||||
num_samples = 1
|
|
||||||
for idx in range(num_samples):
|
for idx in range(num_samples):
|
||||||
|
|
||||||
if idx==0:
|
if idx==0:
|
||||||
|
@ -264,38 +267,42 @@ def render_image(image_path, threshold = 20.0):
|
||||||
renderer.GetActiveCamera().OrthogonalizeViewUp()
|
renderer.GetActiveCamera().OrthogonalizeViewUp()
|
||||||
#time.sleep(0.02)
|
#time.sleep(0.02)
|
||||||
|
|
||||||
#renderer.GetActiveCamera().Dolly(1.002))
|
#renderer.GetActiveCamera().Elevation(-1.0)
|
||||||
|
renderer.GetActiveCamera().Dolly(1.002)
|
||||||
|
renderer.ResetCameraClippingRange()
|
||||||
render_window.Render()
|
render_window.Render()
|
||||||
window_to_image_filter.Modified()
|
window_to_image_filter.Modified()
|
||||||
writer.SetFileName("vtk_animation_"+str(idx+1)+extension)
|
writer.SetFileName("vtk_animation_"+str(idx+1)+extension)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
|
|
||||||
|
renderer.AddActor(scale_bar_actor)
|
||||||
|
|
||||||
renderer.RemoveActor(ox_actors[0])
|
renderer.RemoveActor(ox_actors[0])
|
||||||
render_window.Render()
|
render_window.Render()
|
||||||
window_to_image_filter.Modified()
|
# window_to_image_filter.Modified()
|
||||||
writer.SetFileName("vtk_animation_"+str(num_samples+1)+extension)
|
# writer.SetFileName("vtk_animation_"+str(num_samples+1)+extension)
|
||||||
writer.Write()
|
# writer.Write()
|
||||||
|
#
|
||||||
cell_actors = add_cells()
|
# cell_actors = add_cells()
|
||||||
renderer.AddActor(cell_actors[0])
|
# renderer.AddActor(cell_actors[0])
|
||||||
render_window.Render()
|
# render_window.Render()
|
||||||
window_to_image_filter.Modified()
|
# window_to_image_filter.Modified()
|
||||||
writer.SetFileName("vtk_animation_"+str(num_samples+2)+extension)
|
# writer.SetFileName("vtk_animation_"+str(num_samples+2)+extension)
|
||||||
writer.Write()
|
# writer.Write()
|
||||||
renderer.RemoveActor(cell_actors[0])
|
# renderer.RemoveActor(cell_actors[0])
|
||||||
|
#
|
||||||
renderer.AddActor(cell_actors[1])
|
# renderer.AddActor(cell_actors[1])
|
||||||
render_window.Render()
|
# render_window.Render()
|
||||||
window_to_image_filter.Modified()
|
# window_to_image_filter.Modified()
|
||||||
writer.SetFileName("vtk_animation_"+str(num_samples+3)+extension)
|
# writer.SetFileName("vtk_animation_"+str(num_samples+3)+extension)
|
||||||
writer.Write()
|
# writer.Write()
|
||||||
renderer.RemoveActor(cell_actors[1])
|
# renderer.RemoveActor(cell_actors[1])
|
||||||
|
#
|
||||||
renderer.AddActor(cell_actors[2])
|
# renderer.AddActor(cell_actors[2])
|
||||||
render_window.Render()
|
# render_window.Render()
|
||||||
window_to_image_filter.Modified()
|
# window_to_image_filter.Modified()
|
||||||
writer.SetFileName("vtk_animation_"+str(num_samples+4)+extension)
|
# writer.SetFileName("vtk_animation_"+str(num_samples+4)+extension)
|
||||||
writer.Write()
|
# writer.Write()
|
||||||
|
|
||||||
window_interactor.Start()
|
window_interactor.Start()
|
||||||
|
|
||||||
|
@ -306,4 +313,4 @@ if __name__ == "__main__":
|
||||||
parser.add_argument("-i", "--input_file", type=str, help='vtk input file')
|
parser.add_argument("-i", "--input_file", type=str, help='vtk input file')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
render_image(args.input_file)
|
render_image(args.input_file)
|
52
test/merge_plots.py
Normal file
52
test/merge_plots.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
from PIL import Image
|
||||||
|
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||||
|
|
||||||
|
|
||||||
|
def merge_plots(work_dir, study_collection):
|
||||||
|
|
||||||
|
for eachStudy in study_collection:
|
||||||
|
print "Merging Study: ", eachStudy.raw_data_path
|
||||||
|
study_dir = work_dir + "/" + eachStudy.raw_data_path
|
||||||
|
for eachMouse in eachStudy.mice:
|
||||||
|
print "Merging Mouse: ", eachMouse.raw_data_path
|
||||||
|
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||||
|
|
||||||
|
image_paths = []
|
||||||
|
for eachTimePoint in eachMouse.timepoints:
|
||||||
|
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||||
|
base_path = "/analysis_analysis/plots/vessels/diameterPlot.png"
|
||||||
|
if os.path.isfile(time_dir + base_path):
|
||||||
|
image_paths.append(time_dir + base_path)
|
||||||
|
|
||||||
|
images = map(Image.open, image_paths)
|
||||||
|
merge_path = mouse_dir + "/timeline_diameterPlot.jpg"
|
||||||
|
merge_axis = 0
|
||||||
|
print "Merging plots to: ", merge_path
|
||||||
|
if len(images) > 0:
|
||||||
|
widths, heights = zip(*(i.size for i in images))
|
||||||
|
if merge_axis == 0:
|
||||||
|
total_width = sum(widths)
|
||||||
|
max_height = max(heights)
|
||||||
|
new_im = Image.new('RGB', (total_width, max_height))
|
||||||
|
x_offset = 0
|
||||||
|
for im in images:
|
||||||
|
new_im.paste(im, (x_offset, 0))
|
||||||
|
x_offset += im.size[0]
|
||||||
|
new_im.save(merge_path, optimize=True, quality=5)
|
||||||
|
else:
|
||||||
|
max_width = max(widths)
|
||||||
|
total_height = sum(heights)
|
||||||
|
new_im = Image.new('RGB', (max_width, total_height))
|
||||||
|
y_offset = 0
|
||||||
|
for im in images:
|
||||||
|
new_im.paste(im, (0, y_offset))
|
||||||
|
y_offset += im.size[1]
|
||||||
|
new_im.save(merge_path, optimize=True, quality=5)
|
||||||
|
|
||||||
|
|
||||||
|
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||||
|
f = open(work_dir + "/study_collection.p", 'r')
|
||||||
|
study_collection = pickle.load(f)
|
||||||
|
merge_plots(work_dir, study_collection)
|
10
test/render_networks.py
Normal file
10
test/render_networks.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import vtk
|
||||||
|
import stack3d.formats.unet
|
||||||
|
|
||||||
|
path = "/home/grogan/stack-working/skeleton.pkl"
|
||||||
|
network_vtk = stack3d.formats.unet.skeleton_to_vtp(path)
|
||||||
|
|
||||||
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
|
writer.SetInputData(network_vtk)
|
||||||
|
writer.SetFileName("/home/grogan/test.vtp")
|
||||||
|
writer.Write()
|
55
test/skeleton_to_mat.py
Normal file
55
test/skeleton_to_mat.py
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
import os
|
||||||
|
import inspect
|
||||||
|
import pickle
|
||||||
|
from subprocess import call
|
||||||
|
from PIL import Image
|
||||||
|
import vtk
|
||||||
|
import scipy.sparse as sps
|
||||||
|
from scipy.io import savemat
|
||||||
|
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||||
|
import stack3d.formats.unet
|
||||||
|
|
||||||
|
|
||||||
|
def vtk_to_mat(polydata, path):
|
||||||
|
|
||||||
|
num_points = polydata.GetNumberOfPoints()
|
||||||
|
sps_acc = sps.coo_matrix((num_points, num_points))
|
||||||
|
data = []
|
||||||
|
row_indices = []
|
||||||
|
column_indices = []
|
||||||
|
lines = polydata.GetLines()
|
||||||
|
vtk_data = polydata.GetCellData().GetScalars()
|
||||||
|
|
||||||
|
print "np: ", num_points
|
||||||
|
for idx in range(lines.GetNumberOfCells()):
|
||||||
|
points = vtk.vtkIdList()
|
||||||
|
lines.GetCell(idx, points)
|
||||||
|
if points.GetNumberOfIds() == 2:
|
||||||
|
row_indices.append(points.GetId(0))
|
||||||
|
column_indices.append(points.GetId(1))
|
||||||
|
data.append(vtk_data.GetTuple1(idx))
|
||||||
|
sps_acc = sps_acc + sps.coo_matrix((data, (row_indices, column_indices)), shape=(num_points, num_points))
|
||||||
|
savemat(path, {'skeleton': sps_acc})
|
||||||
|
|
||||||
|
|
||||||
|
def skeleton_to_mat(work_dir, study_collection):
|
||||||
|
|
||||||
|
for eachStudy in study_collection:
|
||||||
|
print "Converting Study: ", eachStudy.raw_data_path
|
||||||
|
study_dir = work_dir + "/" + eachStudy.raw_data_path
|
||||||
|
for eachMouse in eachStudy.mice:
|
||||||
|
print "Converting Mouse: ", eachMouse.raw_data_path
|
||||||
|
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||||
|
for eachTimePoint in eachMouse.timepoints:
|
||||||
|
print "Converting Time: ", eachTimePoint.raw_data_path
|
||||||
|
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||||
|
base_path = "/analysis_analysis/skeleton.pkl"
|
||||||
|
if os.path.isfile(time_dir + base_path):
|
||||||
|
output_path = os.path.dirname(os.path.realpath(time_dir + "/analysis_analysis/skeleton_distance.mat"))
|
||||||
|
polydata = stack3d.formats.unet.skeleton_to_vtp(time_dir + base_path)
|
||||||
|
vtk_to_mat(polydata, output_path)
|
||||||
|
|
||||||
|
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||||
|
f = open(work_dir + "/study_collection.p", 'r')
|
||||||
|
study_collection = pickle.load(f)
|
||||||
|
skeleton_to_mat(work_dir, study_collection)
|
97
test/study_setup.py
Normal file
97
test/study_setup.py
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
from distutils.dir_util import copy_tree
|
||||||
|
from shutil import copyfile
|
||||||
|
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||||
|
|
||||||
|
|
||||||
|
def setup_study_collection_from_remote(path):
|
||||||
|
|
||||||
|
"""Scan the remote directory structure. Remote needs to be mounted first
|
||||||
|
with Samba etc."""
|
||||||
|
|
||||||
|
study_collection = []
|
||||||
|
study_dirs = next(os.walk(path))[1]
|
||||||
|
|
||||||
|
for study_dir in study_dirs:
|
||||||
|
new_study = StudyData()
|
||||||
|
new_study.raw_data_path = study_dir
|
||||||
|
print "Set up study: ", study_dir
|
||||||
|
mouse_dirs = next(os.walk(path + "/" + study_dir))[1]
|
||||||
|
for mouse_dir in mouse_dirs:
|
||||||
|
new_mouse = MouseData()
|
||||||
|
new_mouse.raw_data_path = mouse_dir
|
||||||
|
print "Set up mouse: ", mouse_dir
|
||||||
|
|
||||||
|
cumulative_path = path + "/" + study_dir + "/" + mouse_dir
|
||||||
|
if os.path.isdir(cumulative_path + "/Outputs_new_model"):
|
||||||
|
new_mouse.extra_path = "/Outputs_new_model/"
|
||||||
|
elif os.path.isdir(cumulative_path + "/Outputs"):
|
||||||
|
new_mouse.extra_path = "/Outputs/"
|
||||||
|
timepoint_dirs = next(os.walk(cumulative_path + new_mouse.extra_path))[1]
|
||||||
|
for timepoint_dir in timepoint_dirs:
|
||||||
|
if "TIF" not in timepoint_dir:
|
||||||
|
new_timepoint = TimepointData()
|
||||||
|
new_timepoint.raw_data_path = timepoint_dir
|
||||||
|
new_mouse.timepoints.append(new_timepoint)
|
||||||
|
new_study.mice.append(new_mouse)
|
||||||
|
study_collection.append(new_study)
|
||||||
|
return study_collection
|
||||||
|
|
||||||
|
|
||||||
|
def setup_local_study_directory_structure(path, study_collection):
|
||||||
|
|
||||||
|
for eachStudy in study_collection:
|
||||||
|
study_dir = path + "/" + eachStudy.raw_data_path
|
||||||
|
if not os.path.exists(study_dir):
|
||||||
|
os.makedirs(study_dir)
|
||||||
|
for eachMouse in eachStudy.mice:
|
||||||
|
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||||
|
if not os.path.exists(mouse_dir):
|
||||||
|
os.makedirs(mouse_dir)
|
||||||
|
for eachTimePoint in eachMouse.timepoints:
|
||||||
|
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||||
|
if not os.path.exists(time_dir):
|
||||||
|
os.makedirs(time_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_files(remote_path, local_path, study_collection):
|
||||||
|
|
||||||
|
for eachStudy in study_collection:
|
||||||
|
print "Copying Study: ", eachStudy.raw_data_path
|
||||||
|
remote_study_dir = remote_path + "/" + eachStudy.raw_data_path
|
||||||
|
local_study_dir = local_path + "/" + eachStudy.raw_data_path
|
||||||
|
for eachMouse in eachStudy.mice:
|
||||||
|
print "Copying Mouse: ", eachMouse.raw_data_path
|
||||||
|
remote_mouse_dir = remote_study_dir + "/" + eachMouse.raw_data_path
|
||||||
|
local_mouse_dir = local_study_dir + "/" + eachMouse.raw_data_path
|
||||||
|
for eachTimePoint in eachMouse.timepoints:
|
||||||
|
remote_time_dir = remote_mouse_dir + eachMouse.extra_path + "/" + eachTimePoint.raw_data_path
|
||||||
|
local_time_dir = local_mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||||
|
|
||||||
|
if os.path.isdir(remote_time_dir + "/analysis_analysis"):
|
||||||
|
groups = ["csv", "histograms", "plots"]
|
||||||
|
for group in groups:
|
||||||
|
if os.path.isdir(remote_time_dir + "/analysis_analysis/" + group):
|
||||||
|
copy_tree(remote_time_dir + "/analysis_analysis/" + group,
|
||||||
|
local_time_dir + "/analysis_analysis/" + group)
|
||||||
|
|
||||||
|
if os.path.isfile(remote_time_dir + "/analysis_analysis/skeleton.pkl"):
|
||||||
|
copyfile(remote_time_dir + "/analysis_analysis/skeleton.pkl",
|
||||||
|
local_time_dir + "/analysis_analysis/skeleton.pkl")
|
||||||
|
#
|
||||||
|
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||||
|
|
||||||
|
samba_path = os.environ["XDG_RUNTIME_DIR"] + "/gvfs/"
|
||||||
|
raw_data_path = samba_path + "smb-share:server=imcore1.rob.ox.ac.uk,share=maths"
|
||||||
|
raw_data_path += "/Window Experiments/James/Network analysis/"
|
||||||
|
|
||||||
|
# study_collection = setup_study_collection_from_remote(raw_data_path)
|
||||||
|
# f = open(work_dir + "/study_collection.p", "wb")
|
||||||
|
# pickle.dump(study_collection, f)
|
||||||
|
|
||||||
|
f = open(work_dir + "/study_collection.p", 'r')
|
||||||
|
study_collection = pickle.load(f)
|
||||||
|
|
||||||
|
#setup_local_study_directory_structure(work_dir, study_collection)
|
||||||
|
copy_files(raw_data_path, work_dir, study_collection)
|
36
utility.py
36
utility.py
|
@ -1,36 +0,0 @@
|
||||||
"""
|
|
||||||
Utilities for logging, parallel execution
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
def do_setup(tool_name = None):
|
|
||||||
|
|
||||||
out_directory = os.getcwd() + "/Stack3D_Logging/"
|
|
||||||
if not os.path.exists(out_directory):
|
|
||||||
os.makedirs(out_directory)
|
|
||||||
if tool_name is not None:
|
|
||||||
filename = out_directory + tool_name + ".log"
|
|
||||||
else:
|
|
||||||
filename = out_directory + "/root.log"
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG,
|
|
||||||
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
|
|
||||||
datefmt='%m-%d %H:%M',
|
|
||||||
filename=filename,
|
|
||||||
filemode='w')
|
|
||||||
|
|
||||||
console = logging.StreamHandler()
|
|
||||||
console.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# set a format which is simpler for console use
|
|
||||||
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
|
|
||||||
|
|
||||||
# tell the handler to use this format
|
|
||||||
console.setFormatter(formatter)
|
|
||||||
|
|
||||||
# add the handler to the root logger
|
|
||||||
logging.getLogger('').addHandler(console)
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue