Add multi render.

This commit is contained in:
jmsgrogan 2017-09-05 15:35:28 +01:00
parent 2e4514dd4a
commit 26c823fa9b
4 changed files with 316 additions and 29 deletions

31
test/convert_to_vtk.py Normal file
View file

@ -0,0 +1,31 @@
import pickle
import vtk
import os
from stack3d.study.components import StudyData, MouseData, TimepointData
import stack3d.formats.unet
def skeleton_to_vtp(work_dir, study_collection):
for eachStudy in study_collection:
print "Converting Study: ", eachStudy.raw_data_path
study_dir = work_dir + "/" + eachStudy.raw_data_path
for eachMouse in eachStudy.mice:
print "Converting Mouse: ", eachMouse.raw_data_path
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
for eachTimePoint in eachMouse.timepoints:
print "Converting Time: ", eachTimePoint.raw_data_path
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
base_path = "/analysis_analysis/skeleton.pkl"
if os.path.isfile(time_dir + base_path):
output_path = os.path.dirname(os.path.realpath(time_dir + "/analysis_analysis/skeleton.vtp"))
polydata = stack3d.formats.unet.skeleton_to_vtp(time_dir + base_path)
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInputData(polydata)
writer.SetFileName(output_path + "/skeleton.vtp")
writer.Write()
work_dir = "/scratch/jgrogan/stack-working/study/"
f = open(work_dir + "/study_collection.p", 'r')
study_collection = pickle.load(f)
skeleton_to_vtp(work_dir, study_collection)

114
test/get_tumour_stats.py Normal file
View file

@ -0,0 +1,114 @@
import pickle
import vtk
import os
import numpy as np
import matplotlib.pyplot as plt
from stack3d.study.components import StudyData, MouseData, TimepointData
import stack3d.formats.unet
def get_time(path):
no_space = path.replace(" ", "")
day_index = no_space.find("Day")
day_string = no_space[day_index+3:day_index+5]
day_string = day_string.replace("_", "")
return int(day_string)
def render_skeleton(work_dir, study_collection):
for eachStudy in study_collection:
print "Rendering Study: ", eachStudy.raw_data_path
study_dir = work_dir + "/" + eachStudy.raw_data_path
rad_fig, rad_ax = plt.subplots()
line_fig, line_ax = plt.subplots()
den_fig, den_ax = plt.subplots()
for eachMouse in eachStudy.mice:
print "Rendering Mouse: ", eachMouse.raw_data_path
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
days = []
radii = []
lengths = []
densities = []
for eachTimePoint in eachMouse.timepoints:
days.append(get_time(eachTimePoint.raw_data_path))
if len(days) > 0:
days, eachMouse.timepoints = (list(t) for t in zip(*sorted(zip(days, eachMouse.timepoints))))
for kdx, eachTimePoint in enumerate(eachMouse.timepoints):
print "Rendering Time: ", eachTimePoint.raw_data_path
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
base_path = "/analysis_analysis/skeleton.vtp"
if os.path.isfile(time_dir + base_path):
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(time_dir + base_path)
reader.Update()
polydata = reader.GetOutput()
for idx in range(polydata.GetNumberOfPoints()):
loc = list(polydata.GetPoint(idx))
loc[2] = 0
polydata.GetPoints().SetPoint(idx, loc)
lines = polydata.GetLines()
points = polydata.GetPoints()
length = 0.0
for jdx in range(polydata.GetNumberOfCells()):
pt_ids = vtk.vtkIdList()
lines.GetCell(jdx, pt_ids)
if pt_ids.GetNumberOfIds() == 2:
pt0 = np.array(points.GetPoint(pt_ids.GetId(0)))
pt1 = np.array(points.GetPoint(pt_ids.GetId(0)))
length += np.linalg.norm(pt0-pt1)
lengths.append(length)
delaunay = vtk.vtkDelaunay2D()
delaunay.SetInputData(polydata)
delaunay.Update()
com = vtk.vtkCenterOfMass()
com.SetInputData(delaunay.GetOutput())
com.SetUseScalarsAsWeights(False)
com.Update()
centre = com.GetCenter()
feature = vtk.vtkFeatureEdges()
feature.SetBoundaryEdges(1)
feature.SetFeatureEdges(0)
feature.SetNonManifoldEdges(0)
feature.SetManifoldEdges(0)
feature.SetInputData(delaunay.GetOutput())
feature.Update()
av_distance = 0.0
for idx in range(feature.GetOutput().GetNumberOfPoints()):
loc = feature.GetOutput().GetPoint(idx)
av_distance += np.linalg.norm(np.array(centre)-np.array(loc))
av_distance /= float(feature.GetOutput().GetNumberOfPoints())
densities.append(length/(np.pi*av_distance*av_distance))
print "d: ", days[kdx], "c: ", centre, "r: ", av_distance
radii.append(av_distance)
else:
radii.append(0)
rad_ax.plot(days, radii, lw=2.0, label=eachMouse.raw_data_path)
line_ax.plot(days, lengths, lw=2.0, label=eachMouse.raw_data_path)
den_ax.plot(days, densities, lw=2.0, label=eachMouse.raw_data_path)
handles, labels = rad_ax.get_legend_handles_labels()
rad_ax.legend(handles, labels)
handles, labels = line_ax.get_legend_handles_labels()
line_ax.legend(handles, labels)
handles, labels = den_ax.get_legend_handles_labels()
den_ax.legend(handles, labels)
rad_fig.savefig(study_dir+"/timeline_radius.png", bbox_inches='tight', dpi=160)
line_fig.savefig(study_dir+"/timeline_length.png", bbox_inches='tight', dpi=160)
den_fig.savefig(study_dir+"/timeline_density.png", bbox_inches='tight', dpi=160)
work_dir = "/scratch/jgrogan/stack-working/study/"
f = open(work_dir + "/study_collection.p", 'r')
study_collection = pickle.load(f)
render_skeleton(work_dir, study_collection)

View file

@ -4,46 +4,91 @@ from PIL import Image
from stack3d.study.components import StudyData, MouseData, TimepointData
def get_time(path):
no_space = path.replace(" ", "")
day_index = no_space.find("Day")
day_string = no_space[day_index+3:day_index+5]
day_string = day_string.replace("_", "")
return int(day_string)
def merge_plots(work_dir, study_collection):
plot_name = "diameterPlot"
plot_name = "histogramDiameter"
plot_name = "rendering"
reduction_factor = 1
reduction_factor2 = 1
for eachStudy in study_collection:
print "Merging Study: ", eachStudy.raw_data_path
study_dir = work_dir + "/" + eachStudy.raw_data_path
merge_paths = []
for eachMouse in eachStudy.mice:
print "Merging Mouse: ", eachMouse.raw_data_path
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
image_paths = []
# first sort time points
days = []
for eachTimePoint in eachMouse.timepoints:
days.append(get_time(eachTimePoint.raw_data_path))
if len(days) > 0:
days, eachMouse.timepoints = (list(t) for t in zip(*sorted(zip(days, eachMouse.timepoints))))
for eachTimePoint in eachMouse.timepoints:
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
base_path = "/analysis_analysis/plots/vessels/diameterPlot.png"
#base_path = "/analysis_analysis/plots/vessels/" + plot_name + ".png"
base_path = "/analysis_analysis/histograms/" + plot_name + ".png"
base_path = "/analysis_analysis//" + plot_name + ".tiff"
if os.path.isfile(time_dir + base_path):
image_paths.append(time_dir + base_path)
images = map(Image.open, image_paths)
merge_path = mouse_dir + "/timeline_diameterPlot.jpg"
merge_path = mouse_dir + "/timeline_" + plot_name + ".jpg"
merge_axis = 0
print "Merging plots to: ", merge_path
if len(images) > 0:
widths, heights = zip(*(i.size for i in images))
if merge_axis == 0:
total_width = sum(widths)
max_height = max(heights)
new_im = Image.new('RGB', (total_width, max_height))
x_offset = 0
for im in images:
new_im.paste(im, (x_offset, 0))
x_offset += im.size[0]
new_im.save(merge_path, optimize=True, quality=5)
else:
max_width = max(widths)
total_height = sum(heights)
new_im = Image.new('RGB', (max_width, total_height))
y_offset = 0
for im in images:
new_im.paste(im, (0, y_offset))
y_offset += im.size[1]
new_im.save(merge_path, optimize=True, quality=5)
if len(image_paths) > 0:
widths = []
heights = []
for path in image_paths:
im = Image.open(path)
widths.append(im.size[0])
heights.append(im.size[1])
im.close()
total_width = int(0.5*sum(widths))
max_height = int(0.8*max(heights))
new_im = Image.new('RGB', (total_width, max_height))
x_offset = 0
for path in image_paths:
im = Image.open(path)
img2 = im.crop((int(0.25*im.size[0]), int(0.2*im.size[1]), int(0.75*im.size[0]), im.size[1]))
new_im.paste(img2, (x_offset, 0))
x_offset += int(0.5*im.size[0])
im.close()
new_im.thumbnail((total_width/reduction_factor, max_height/reduction_factor))
merge_paths.append(merge_path)
new_im.save(merge_path)
if len(merge_paths) > 0:
widths = []
heights = []
for path in merge_paths:
im = Image.open(path)
widths.append(im.size[0])
heights.append(im.size[1])
im.close()
max_width = max(widths)
total_height = sum(heights)
new_im = Image.new('RGB', (max_width, total_height))
y_offset = 0
for path in merge_paths:
im = Image.open(path)
new_im.paste(im, (0, y_offset))
y_offset += im.size[1]
im.close()
new_im.thumbnail((max_width/reduction_factor2, total_height/reduction_factor2))
new_im.save(study_dir + "/timeline_" + plot_name + ".jpg")
work_dir = "/scratch/jgrogan/stack-working/study/"

View file

@ -1,10 +1,107 @@
import pickle
import vtk
import os
from stack3d.study.components import StudyData, MouseData, TimepointData
import stack3d.formats.unet
path = "/home/grogan/stack-working/skeleton.pkl"
network_vtk = stack3d.formats.unet.skeleton_to_vtp(path)
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInputData(network_vtk)
writer.SetFileName("/home/grogan/test.vtp")
writer.Write()
def render_skeleton(work_dir, study_collection):
for eachStudy in study_collection:
print "Rendering Study: ", eachStudy.raw_data_path
study_dir = work_dir + "/" + eachStudy.raw_data_path
for eachMouse in eachStudy.mice:
print "Rendering Mouse: ", eachMouse.raw_data_path
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
for eachTimePoint in eachMouse.timepoints:
print "Rendering Time: ", eachTimePoint.raw_data_path
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
base_path = "/analysis_analysis/skeleton.vtp"
if os.path.isfile(time_dir + base_path):
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(time_dir + base_path)
reader.Update()
polydata = reader.GetOutput()
cell_to_point = vtk.vtkCellDataToPointData()
cell_to_point.SetInputData(polydata)
cell_to_point.Update()
tubes = vtk.vtkTubeFilter()
tubes.SetInputData(cell_to_point.GetOutput())
tubes.SetNumberOfSides(6)
tubes.SetRadius(1.0)
tubes.SetRadiusFactor(50.0)
tubes.SetVaryRadiusToVaryRadiusByScalar()
tubes.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(tubes.GetOutput())
mapper.ScalarVisibilityOff()
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(1, 0, 0)
textActor = vtk.vtkTextActor()
textActor.SetTextScaleModeToProp()
textActor.SetDisplayPosition(400, 50)
inp_string = "Study: " + eachStudy.raw_data_path
inp_string += " | Mouse: " + eachMouse.raw_data_path
inp_string += " | Time: " + eachTimePoint.raw_data_path
textActor.SetInput(inp_string)
tprop = textActor.GetTextProperty()
tprop.SetFontSize(24)
tprop.SetFontFamilyToArial()
tprop.SetJustificationToCentered()
tprop.BoldOn()
tprop.ItalicOn()
tprop.ShadowOn()
tprop.SetColor(0, 0, 1)
cubesource = vtk.vtkCubeSource()
cubesource.SetBounds(0, 4000, 0, 4000, 0, 70)
cubesource.Update()
cube_mapper = vtk.vtkPolyDataMapper()
cube_mapper.SetInputConnection(cubesource.GetOutputPort())
cube_mapper.ScalarVisibilityOff()
cube_actor = vtk.vtkActor()
cube_actor.SetMapper(cube_mapper)
cube_actor.GetProperty().SetColor(0, 0, 0)
cube_actor.GetProperty().SetOpacity(0.1)
# Create a renderer, render window, and interactor
renderer = vtk.vtkRenderer()
renderer.SetBackground(1, 1, 1)
renderer.AddActor2D(textActor)
#renderer.GetActiveCamera().Zoom(1.0)
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetSize(1200, 800)
renderWindow.SetAlphaBitPlanes(1)
renderWindow.AddRenderer(renderer)
# renderWindowInteractor = vtk.vtkRenderWindowInteractor()
# renderWindowInteractor.SetRenderWindow(renderWindow)
# Add the actors to the scene
renderer.AddActor(actor)
renderer.AddActor(cube_actor)
renderer.ResetCameraClippingRange()
# Render and interact
renderWindow.Render()
#renderWindowInteractor.Start()
window_to_image_filter = vtk.vtkWindowToImageFilter()
window_to_image_filter.SetInput(renderWindow)
window_to_image_filter.SetMagnification(1);
window_to_image_filter.SetInputBufferTypeToRGBA()
window_to_image_filter.ReadFrontBufferOff()
writer = vtk.vtkTIFFWriter()
writer.SetInputConnection(window_to_image_filter.GetOutputPort())
writer.SetFileName(time_dir + "/analysis_analysis/rendering.tiff")
writer.Write()
work_dir = "/scratch/jgrogan/stack-working/study/"
f = open(work_dir + "/study_collection.p", 'r')
study_collection = pickle.load(f)
render_skeleton(work_dir, study_collection)