Add initial batch processing and do some tidying.
This commit is contained in:
parent
00fa3da2eb
commit
2e4514dd4a
34 changed files with 446 additions and 119 deletions
52
test/merge_plots.py
Normal file
52
test/merge_plots.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
import os
|
||||
import pickle
|
||||
from PIL import Image
|
||||
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||
|
||||
|
||||
def merge_plots(work_dir, study_collection):
|
||||
|
||||
for eachStudy in study_collection:
|
||||
print "Merging Study: ", eachStudy.raw_data_path
|
||||
study_dir = work_dir + "/" + eachStudy.raw_data_path
|
||||
for eachMouse in eachStudy.mice:
|
||||
print "Merging Mouse: ", eachMouse.raw_data_path
|
||||
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||
|
||||
image_paths = []
|
||||
for eachTimePoint in eachMouse.timepoints:
|
||||
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||
base_path = "/analysis_analysis/plots/vessels/diameterPlot.png"
|
||||
if os.path.isfile(time_dir + base_path):
|
||||
image_paths.append(time_dir + base_path)
|
||||
|
||||
images = map(Image.open, image_paths)
|
||||
merge_path = mouse_dir + "/timeline_diameterPlot.jpg"
|
||||
merge_axis = 0
|
||||
print "Merging plots to: ", merge_path
|
||||
if len(images) > 0:
|
||||
widths, heights = zip(*(i.size for i in images))
|
||||
if merge_axis == 0:
|
||||
total_width = sum(widths)
|
||||
max_height = max(heights)
|
||||
new_im = Image.new('RGB', (total_width, max_height))
|
||||
x_offset = 0
|
||||
for im in images:
|
||||
new_im.paste(im, (x_offset, 0))
|
||||
x_offset += im.size[0]
|
||||
new_im.save(merge_path, optimize=True, quality=5)
|
||||
else:
|
||||
max_width = max(widths)
|
||||
total_height = sum(heights)
|
||||
new_im = Image.new('RGB', (max_width, total_height))
|
||||
y_offset = 0
|
||||
for im in images:
|
||||
new_im.paste(im, (0, y_offset))
|
||||
y_offset += im.size[1]
|
||||
new_im.save(merge_path, optimize=True, quality=5)
|
||||
|
||||
|
||||
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||
f = open(work_dir + "/study_collection.p", 'r')
|
||||
study_collection = pickle.load(f)
|
||||
merge_plots(work_dir, study_collection)
|
10
test/render_networks.py
Normal file
10
test/render_networks.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
import vtk
|
||||
import stack3d.formats.unet
|
||||
|
||||
path = "/home/grogan/stack-working/skeleton.pkl"
|
||||
network_vtk = stack3d.formats.unet.skeleton_to_vtp(path)
|
||||
|
||||
writer = vtk.vtkXMLPolyDataWriter()
|
||||
writer.SetInputData(network_vtk)
|
||||
writer.SetFileName("/home/grogan/test.vtp")
|
||||
writer.Write()
|
55
test/skeleton_to_mat.py
Normal file
55
test/skeleton_to_mat.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
import os
|
||||
import inspect
|
||||
import pickle
|
||||
from subprocess import call
|
||||
from PIL import Image
|
||||
import vtk
|
||||
import scipy.sparse as sps
|
||||
from scipy.io import savemat
|
||||
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||
import stack3d.formats.unet
|
||||
|
||||
|
||||
def vtk_to_mat(polydata, path):
|
||||
|
||||
num_points = polydata.GetNumberOfPoints()
|
||||
sps_acc = sps.coo_matrix((num_points, num_points))
|
||||
data = []
|
||||
row_indices = []
|
||||
column_indices = []
|
||||
lines = polydata.GetLines()
|
||||
vtk_data = polydata.GetCellData().GetScalars()
|
||||
|
||||
print "np: ", num_points
|
||||
for idx in range(lines.GetNumberOfCells()):
|
||||
points = vtk.vtkIdList()
|
||||
lines.GetCell(idx, points)
|
||||
if points.GetNumberOfIds() == 2:
|
||||
row_indices.append(points.GetId(0))
|
||||
column_indices.append(points.GetId(1))
|
||||
data.append(vtk_data.GetTuple1(idx))
|
||||
sps_acc = sps_acc + sps.coo_matrix((data, (row_indices, column_indices)), shape=(num_points, num_points))
|
||||
savemat(path, {'skeleton': sps_acc})
|
||||
|
||||
|
||||
def skeleton_to_mat(work_dir, study_collection):
|
||||
|
||||
for eachStudy in study_collection:
|
||||
print "Converting Study: ", eachStudy.raw_data_path
|
||||
study_dir = work_dir + "/" + eachStudy.raw_data_path
|
||||
for eachMouse in eachStudy.mice:
|
||||
print "Converting Mouse: ", eachMouse.raw_data_path
|
||||
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||
for eachTimePoint in eachMouse.timepoints:
|
||||
print "Converting Time: ", eachTimePoint.raw_data_path
|
||||
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||
base_path = "/analysis_analysis/skeleton.pkl"
|
||||
if os.path.isfile(time_dir + base_path):
|
||||
output_path = os.path.dirname(os.path.realpath(time_dir + "/analysis_analysis/skeleton_distance.mat"))
|
||||
polydata = stack3d.formats.unet.skeleton_to_vtp(time_dir + base_path)
|
||||
vtk_to_mat(polydata, output_path)
|
||||
|
||||
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||
f = open(work_dir + "/study_collection.p", 'r')
|
||||
study_collection = pickle.load(f)
|
||||
skeleton_to_mat(work_dir, study_collection)
|
97
test/study_setup.py
Normal file
97
test/study_setup.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
import os
|
||||
import pickle
|
||||
from distutils.dir_util import copy_tree
|
||||
from shutil import copyfile
|
||||
from stack3d.study.components import StudyData, MouseData, TimepointData
|
||||
|
||||
|
||||
def setup_study_collection_from_remote(path):
|
||||
|
||||
"""Scan the remote directory structure. Remote needs to be mounted first
|
||||
with Samba etc."""
|
||||
|
||||
study_collection = []
|
||||
study_dirs = next(os.walk(path))[1]
|
||||
|
||||
for study_dir in study_dirs:
|
||||
new_study = StudyData()
|
||||
new_study.raw_data_path = study_dir
|
||||
print "Set up study: ", study_dir
|
||||
mouse_dirs = next(os.walk(path + "/" + study_dir))[1]
|
||||
for mouse_dir in mouse_dirs:
|
||||
new_mouse = MouseData()
|
||||
new_mouse.raw_data_path = mouse_dir
|
||||
print "Set up mouse: ", mouse_dir
|
||||
|
||||
cumulative_path = path + "/" + study_dir + "/" + mouse_dir
|
||||
if os.path.isdir(cumulative_path + "/Outputs_new_model"):
|
||||
new_mouse.extra_path = "/Outputs_new_model/"
|
||||
elif os.path.isdir(cumulative_path + "/Outputs"):
|
||||
new_mouse.extra_path = "/Outputs/"
|
||||
timepoint_dirs = next(os.walk(cumulative_path + new_mouse.extra_path))[1]
|
||||
for timepoint_dir in timepoint_dirs:
|
||||
if "TIF" not in timepoint_dir:
|
||||
new_timepoint = TimepointData()
|
||||
new_timepoint.raw_data_path = timepoint_dir
|
||||
new_mouse.timepoints.append(new_timepoint)
|
||||
new_study.mice.append(new_mouse)
|
||||
study_collection.append(new_study)
|
||||
return study_collection
|
||||
|
||||
|
||||
def setup_local_study_directory_structure(path, study_collection):
|
||||
|
||||
for eachStudy in study_collection:
|
||||
study_dir = path + "/" + eachStudy.raw_data_path
|
||||
if not os.path.exists(study_dir):
|
||||
os.makedirs(study_dir)
|
||||
for eachMouse in eachStudy.mice:
|
||||
mouse_dir = study_dir + "/" + eachMouse.raw_data_path
|
||||
if not os.path.exists(mouse_dir):
|
||||
os.makedirs(mouse_dir)
|
||||
for eachTimePoint in eachMouse.timepoints:
|
||||
time_dir = mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||
if not os.path.exists(time_dir):
|
||||
os.makedirs(time_dir)
|
||||
|
||||
|
||||
def copy_files(remote_path, local_path, study_collection):
|
||||
|
||||
for eachStudy in study_collection:
|
||||
print "Copying Study: ", eachStudy.raw_data_path
|
||||
remote_study_dir = remote_path + "/" + eachStudy.raw_data_path
|
||||
local_study_dir = local_path + "/" + eachStudy.raw_data_path
|
||||
for eachMouse in eachStudy.mice:
|
||||
print "Copying Mouse: ", eachMouse.raw_data_path
|
||||
remote_mouse_dir = remote_study_dir + "/" + eachMouse.raw_data_path
|
||||
local_mouse_dir = local_study_dir + "/" + eachMouse.raw_data_path
|
||||
for eachTimePoint in eachMouse.timepoints:
|
||||
remote_time_dir = remote_mouse_dir + eachMouse.extra_path + "/" + eachTimePoint.raw_data_path
|
||||
local_time_dir = local_mouse_dir + "/" + eachTimePoint.raw_data_path
|
||||
|
||||
if os.path.isdir(remote_time_dir + "/analysis_analysis"):
|
||||
groups = ["csv", "histograms", "plots"]
|
||||
for group in groups:
|
||||
if os.path.isdir(remote_time_dir + "/analysis_analysis/" + group):
|
||||
copy_tree(remote_time_dir + "/analysis_analysis/" + group,
|
||||
local_time_dir + "/analysis_analysis/" + group)
|
||||
|
||||
if os.path.isfile(remote_time_dir + "/analysis_analysis/skeleton.pkl"):
|
||||
copyfile(remote_time_dir + "/analysis_analysis/skeleton.pkl",
|
||||
local_time_dir + "/analysis_analysis/skeleton.pkl")
|
||||
#
|
||||
work_dir = "/scratch/jgrogan/stack-working/study/"
|
||||
|
||||
samba_path = os.environ["XDG_RUNTIME_DIR"] + "/gvfs/"
|
||||
raw_data_path = samba_path + "smb-share:server=imcore1.rob.ox.ac.uk,share=maths"
|
||||
raw_data_path += "/Window Experiments/James/Network analysis/"
|
||||
|
||||
# study_collection = setup_study_collection_from_remote(raw_data_path)
|
||||
# f = open(work_dir + "/study_collection.p", "wb")
|
||||
# pickle.dump(study_collection, f)
|
||||
|
||||
f = open(work_dir + "/study_collection.p", 'r')
|
||||
study_collection = pickle.load(f)
|
||||
|
||||
#setup_local_study_directory_structure(work_dir, study_collection)
|
||||
copy_files(raw_data_path, work_dir, study_collection)
|
Loading…
Add table
Add a link
Reference in a new issue