From 8eaf5aeec899cf23481014f8c7b35589b653fd90 Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Fri, 10 May 2013 10:49:18 -0400 Subject: [PATCH 1/9] Move twisted server and pydas out of slicer python environment --- controllers/components/ApiComponent.php | 18 +- controllers/components/PipelineComponent.php | 10 +- library/pipeline.py | 406 +++++++++++++++++++ library/reg_pipeline.py | 119 ------ library/reg_slicerjob.py | 113 ++++++ library/seg_pipeline.py | 118 ------ library/seg_slicerjob.py | 106 +++++ library/slicerjob.py | 43 ++ library/slicerprocess.py | 351 +--------------- library/twserver.cfg | 1 + library/twserver.py | 141 +++++-- public/js/process/process.status.js | 2 +- public/js/process/process.statuslist.js | 4 +- views/index/index.phtml | 2 +- views/process/statuslist.phtml | 2 +- 15 files changed, 817 insertions(+), 619 deletions(-) create mode 100755 library/pipeline.py delete mode 100644 library/reg_pipeline.py create mode 100644 library/reg_slicerjob.py delete mode 100644 library/seg_pipeline.py create mode 100644 library/seg_slicerjob.py create mode 100644 library/slicerjob.py diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index 306501a..c7937cb 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -592,6 +592,7 @@ public function addJobstatuses($args) * will return a job object for a job_id with the current status, * along with any related jobstatus objects for that job. * @param job_id the id of the job to query status for. + * @param status_only (Optional) If set, will only return the array of jobstatus objects * @return array ('job' => the job object, 'jobstatuses' => the array of jobstatus objects, 'condition_rows' => array of job condition lines, if any, @@ -621,11 +622,18 @@ public function getJobstatus($args) // get the status details $jobstatuses = $jobstatusModel->getForJob($job); - - $pipelineComponent = MidasLoader::loadComponent('Pipeline', 'pyslicer'); - $conditionRows = $pipelineComponent->formatJobCondition($job->getCondition()); - $inputsAndOutputs = $pipelineComponent->resolveInputsAndOutputs($job); - return array('job' => $job, 'jobstatuses' => $jobstatuses, 'condition_rows' => $conditionRows, 'output_links' => $inputsAndOutputs['outputs']); + + if(array_key_exists('status_only', $args)) + { + return $jobstatuses; + } + else + { + $pipelineComponent = MidasLoader::loadComponent('Pipeline', 'pyslicer'); + $conditionRows = $pipelineComponent->formatJobCondition($job->getCondition()); + $inputsAndOutputs = $pipelineComponent->resolveInputsAndOutputs($job); + return array('job' => $job, 'jobstatuses' => $jobstatuses, 'condition_rows' => $conditionRows, 'output_links' => $inputsAndOutputs['outputs']); + } } diff --git a/controllers/components/PipelineComponent.php b/controllers/components/PipelineComponent.php index f94424f..2f3ca71 100644 --- a/controllers/components/PipelineComponent.php +++ b/controllers/components/PipelineComponent.php @@ -59,8 +59,8 @@ function init() function segmentationInputLinks($job, $inputs, $outputs, $midasPath) { $inputItemId = $inputs[0]->getItemId(); - $volumeView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId; - $sliceView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId='.$inputItemId; return array( array ('text' => 'slice view', 'url' => $sliceView), array ('text' => 'volume view', 'url' => $volumeView)); @@ -71,9 +71,9 @@ function segmentationOutputLinks($job, $inputs, $outputs, $midasPath) $inputItemId = $inputs[0]->getItemId(); $outputItemId = $outputs[0]->getItemId(); - $meshView = $midasPath . '/visualize/paraview/surface?itemId='.$outputItemId; - $sliceView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; - $volumeView = $midasPath . '/visualize/paraview/volume?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $meshView = $midasPath . '/pvw/paraview/surface?itemId='.$outputItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; return array( array ('text' => 'model mesh view', 'url' => $meshView), array ('text' => 'slice view', 'url' => $sliceView), diff --git a/library/pipeline.py b/library/pipeline.py new file mode 100755 index 0000000..854f54f --- /dev/null +++ b/library/pipeline.py @@ -0,0 +1,406 @@ +import re +import logging +import os +import sys +import pydas +import shutil +import json + +class PipelineStatusEvent(): + """This class implements pipeline status events and is called by pyslicer's own process manager.""" + statuseventpattern = 'status&remoteprocessing_job_id=%s&event_id=%s×tamp=%s&event_type=%s' + statuseventmessagepattern = statuseventpattern + '&message=%s' + + def __init__(self, jobId, eventId, timestamp, eventType, message=None): + self.jobId = str(jobId) + self.eventId = str(eventId) + self.timestamp = str(timestamp) + self.eventType = eventType + self.message = message + self.jobstatusId = None + + def __repr__(self): + if self.message is not None: + string = self.statuseventmessagepattern % (self.jobId, self.eventId, self.timestamp, self.eventType, self.message) + else: + string = self.statuseventpattern % (self.jobId, self.eventId, self.timestamp, self.eventType) + return string + + @staticmethod + def parseEvent(data): + anychargroup = '(.*)' + # first search for pattern with message as it is a superset of messageless pattern + pattern = PipelineStatusEvent.statuseventmessagepattern + regex = pattern % tuple([anychargroup] * pattern.count('%s')) + match = False + m = re.search(regex, data) + message = None + if m is not None: + match = True + #print "Match:", m.groups() + (jobId, eventId, timestamp, eventType, message) = m.groups() + return PipelineStatusEvent(jobId, eventId, timestamp, eventType, message) + else: + pattern = PipelineStatusEvent.statuseventpattern + regex = pattern % tuple([anychargroup] * pattern.count('%s')) + m = re.search(regex, data) + if m is not None: + match = True + #print "Match:", m.groups() + (jobId, eventId, timestamp, eventType) = m.groups() + return PipelineStatusEvent(jobId, eventId, timestamp, eventType) + return None + +class PipelineFactory(): + """This class implements an interface to get pipeline and the python script running within Slicer.""" + def getPipeline(self, pipelineName, jobId, pydasParams, tmpDirRoot, args): + if pipelineName == 'segmentation': + return SegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + elif pipelineName == 'registration': + return RegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + else: + return None + + def getSlicerScript(self, pipelineName): + if pipelineName == 'segmentation': + return 'seg_slicerjob.py' + elif pipelineName == 'registration': + return 'reg_slicerjob.py' + else: + return None + +class Pipeline(): + """This class implements the base class for Pyslicer's pipelines.""" + event_pipelinestart = "PipelineStart" + event_downloadinput = "DownloadInput" + event_process = "Process" + event_uploadoutput = "UploadOutput" + event_pipelineend = "PipelineEnd" + event_exception = "Exception" + + midasstatus_started = 1 + midasstatus_done = 2 + midasstatus_exception = 3 + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot): + self.pipelineName = pipelineName + self.jobId = jobId + self.pydasParams = pydasParams + self.tmpDirRoot = tmpDirRoot + self.eventIdCounter = 0 + #TODO something better with logging + logging.basicConfig(level=logging.WARNING) + self.log = logging.getLogger('example') + + def create_event(self, eventType, message=None): + eventId = self.eventIdCounter + self.eventIdCounter = self.eventIdCounter + 1 + timestamp = 0 + event = PipelineStatusEvent(self.jobId, eventId, timestamp, eventType, message) + return event + + def create_process_event(self, message): + return self.create_event(self.event_process, message) + + def define_events(self): + self.eventsMap = {} + events = [self.create_event(eventType) for eventType in [self.event_pipelinestart, self.event_downloadinput]] + events = events + self.define_process_events() + events = events + [self.create_event(eventType) for eventType in [self.event_uploadoutput, self.event_pipelineend]] + for event in events: + self.eventsMap[event.eventId] = event + # then when it is their time to nofify, call notify passing in jobstatu_id and timestamp + # need an imple method for subclasses to list their process events + # maybe a map of event types to event, then a submap for process events? + # somehow i need to keep up with all these events here + # and maybe there is no reason to print them in the tracker anymore + + def register_events(self): + # get all the events, register them with the midas server + self.define_events() + events = self.eventsMap.values() + method = 'midas.pyslicer.add.jobstatuses' + parameters = {} + jsonEvents = json.dumps([str(event) for event in events]) + print jsonEvents + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + parameters['token'] = pydas.session.token + parameters['events'] = jsonEvents + eventId_to_jobstatusId = pydas.session.communicator.request(method, parameters) + for (eventId, jobstatusId) in eventId_to_jobstatusId.items(): + event = self.eventsMap[eventId] + event.jobstatusId = jobstatusId + + def get_events(self): + # get all the events for a given job + self.define_events() + method = 'midas.pyslicer.get.job.status' + parameters = {} + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['status_only'] = True + eventId_to_jobstatusId = pydas.session.communicator.request(method, parameters) + jobstatuses = pydas.session.communicator.request(method, parameters) + for jobstatus in jobstatuses: + event = self.eventsMap[jobstatus['event_id']] + event.jobstatusId = jobstatus['jobstatus_id'] + + def define_process_events(self): + # should be overwritten in the subclass + return [] + + def createTmpDir(self): + self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) + # clear it out if it already exists + if(os.path.exists(self.tmpdirpath)): + self.removeTmpDir() + os.mkdir(self.tmpdirpath) + # create a data dir + self.dataDir = os.path.join(self.tmpdirpath, 'data') + os.mkdir(self.dataDir) + # create an out dir + self.outDir = os.path.join(self.tmpdirpath, 'out') + os.mkdir(self.outDir) + + def downloadInput(self): + self.reportStatus(self.event_downloadinput) + self.downloadInputImpl() + + def downloadInputImpl(self): + self.tempfiles = {} + self.tempfiles['inputfile'] = self.downloadItem(self.itemId) + print self.tempfiles + + def downloadItem(self, itemId): + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + pydas.api._download_item(itemId, self.dataDir) + # unzip any zipped files + for filename in os.listdir(self.dataDir): + if filename.endswith('.zip'): + filepath = os.path.join(self.dataDir, filename) + zip = zipfile.ZipFile(filepath) + zip.extractall(self.dataDir) + zip.close() + # return the path to the name of the item + item = pydas.session.communicator.item_get(pydas.session.token, itemId) + return item['name'] + + def executeDownload(self): + try: + self.register_events() + # send pydas pipeline started + self.reportMidasStatus(self.midasstatus_started) + self.reportStatus(self.event_pipelinestart) + self.createTmpDir() + self.downloadInput() + print self.tempfiles + return (self.dataDir, self.outDir, self.tempfiles) + except Exception as exception: + # TODO where to do exceptions status and conditions + self.log.exception(exception) + print self.event_exception + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + self.reportMidasStatus(self.midasstatus_exception, emsg) + exit(1) + + def setTmpDir(self): + self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) + self.dataDir = os.path.join(self.tmpdirpath, 'data') + self.outDir = os.path.join(self.tmpdirpath, 'out') + + def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, itemDescription=None): + # read everything in the srcDir and upload it as a single item + # create a new item + # need a folder id + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + if itemDescription is not None: + item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId, description=itemDescription) + else: + item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId) + itemId = item['item_id'] + if srcDir is None: + srcDir = self.outDir + if outFile is not None: + # only upload this one file + uploadToken = pydas.session.communicator.generate_upload_token(pydas.session.token, itemId, outFile) + filepath = os.path.join(srcDir, outFile) + pydas.session.communicator.perform_upload(uploadToken, outFile, itemid=itemId, filepath=filepath) + else: + for filename in os.listdir(srcDir): + uploadToken = pydas.session.communicator.generate_upload_token(pydas.session.token, itemId, filename) + filepath = os.path.join(srcDir, filename) + pydas.session.communicator.perform_upload(uploadToken, filename, itemid=itemId, filepath=filepath) + # set the output item as an output for the job + method = 'midas.pyslicer.add.job.output.item' + parameters = {} + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['item_id'] = itemId + print parameters + pydas.session.communicator.request(method, parameters) + return itemId + + def uploadOutput(self): + self.reportStatus(self.event_uploadoutput) + self.uploadOutputImpl() + + def uploadOutputImpl(self): + pass + + def executeUpload(self): + try: + self.get_events() + self.setTmpDir() + self.uploadOutput() + self.removeTmpDir() + self.reportStatus(self.event_pipelineend) + # send pydas pipeline finished + self.reportMidasStatus(self.midasstatus_done) + except Exception as exception: + # TODO where to do exceptions status and conditions + self.log.exception(exception) + print self.event_exception + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + self.reportMidasStatus(self.midasstatus_exception, emsg) + exit(1) + + def removeTmpDir(self): + shutil.rmtree(self.tmpdirpath) + + def reportProcessStatus(self, message=None): + self.reportStatus(self.event_process, message) + + def reportStatus(self, eventType, message=None): + # find the event + match = None + for event in self.eventsMap.values(): + if event.eventType == eventType and event.message == message: + match = event + if match is None: + print 'reportStatus', eventType, message + print "NO MATCH" + exit(1) + import time + timestamp = time.time() + match.timestamp = timestamp + method = 'midas.pyslicer.notify.jobstatus' + parameters = {} + parameters['token'] = pydas.session.token + parameters['jobstatus_id'] = match.jobstatusId + parameters['notify_date'] = timestamp + pydas.session.communicator.request(method, parameters) + + def reportMidasStatus(self, status, condition=None): + # TODO add these methods to pydas + # TODO add condition to api call + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + method = 'midas.pyslicer.update.job' + parameters = {} + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['status'] = status + if condition is not None: parameters['condition'] = condition + print parameters + pydas.session.communicator.request(method, parameters) + + +class SegPipeline(Pipeline): + """This class implements a pipeline for simple region growing segmentation.""" + loaded_input_volume = "Loaded Input Volume" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print jsonArgs + if 'inputitemid' in jsonArgs: + self.itemId = jsonArgs['inputitemid'][0] + if 'outputitemname' in jsonArgs: + self.outputItemName = jsonArgs['outputitemname'][0] + self.outputFolderId = jsonArgs['outputfolderid'][0] + # + def define_process_events(self): + process_events = [self.loaded_input_volume, self.started_segmentation, self.finished_segmentation, self.started_modelmaker, self.finished_modelmaker, self.wrote_model_output] + process_events = [self.create_process_event(eventType) for eventType in process_events] + print process_events + return process_events + + def uploadOutputImpl(self): + #print "segmodeluploadoutputimpl" + self.outFile = self.outputItemName + '.vtp' + itemId = self.uploadItem(self.outFile, self.outputFolderId) + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + # TODO move metadata to superclass + # set metadata on the output item + method = 'midas.item.setmultiplemetadata' + parameters = {} + parameters['token'] = pydas.session.token + parameters['itemid'] = itemId + parameters['count'] = 2 + parameters['element_1'] = 'ParaView' + parameters['element_2'] = 'ParaView' + parameters['qualifier_1'] = 'DiffuseColor' + parameters['qualifier_2'] = 'Orientation' + parameters['value_1'] = '[1.0,0.0,0.0]' + parameters['value_2'] = '[180.0,180.0,0.0]' + print parameters + pydas.session.communicator.request(method, parameters) + + +class RegPipeline(Pipeline): + """This class implements a pipeline for simple region growing registration.""" + loaded_input_volumes = "Loaded Input Volumes" + finished_registration = "Finished Registration" + wrote_transformed_volume = "Wrote Transformed Volume" + wrote_transform = "Wrote Transform" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print jsonArgs + if 'fixed_item_id' in jsonArgs: + self.fixedItemId = jsonArgs['fixed_item_id'][0] + self.movingItemId = jsonArgs['moving_item_id'][0] + self.fixedFiducialsList = json.loads(argMap['fixed_fiducials'][0]) + self.movingFiducialsList = json.loads(argMap['moving_fiducials'][0]) + self.transformType = jsonArgs['transformType'][0] + if 'output_folder_id' in jsonArgs: + self.outputFolderId = jsonArgs['output_folder_id'][0] + self.outputVolumeName = jsonArgs['output_volume_name'][0] + self.outputTransformName = jsonArgs['output_transform_name'][0] + + def define_process_events(self): + process_events = [self.loaded_input_volumes, self.finished_registration, self.wrote_transformed_volume, self.wrote_transform] + process_events = [self.create_process_event(eventType) for eventType in process_events] + print process_events + return process_events + + def downloadInputImpl(self): + print "regmodeldownloadinputimpl" + self.tempfiles = {} + self.tempfiles['fixed_volume_file'] = self.downloadItem(self.fixedItemId) + self.tempfiles['moving_volume_file'] = self.downloadItem(self.movingItemId) + print self.tempfiles + + def uploadOutputImpl(self): + #print "regmodeluploadoutputimpl" + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) + folderId = folder['folder_id'] + itemId = self.uploadItem(self.outputVolumeName, folderId, self.transformed_volume, itemDescription='output volume') + itemId = self.uploadItem(self.outputTransformName, folderId, self.transform, itemDescription='output transform') + diff --git a/library/reg_pipeline.py b/library/reg_pipeline.py deleted file mode 100644 index af86d5d..0000000 --- a/library/reg_pipeline.py +++ /dev/null @@ -1,119 +0,0 @@ -from __main__ import vtk, slicer -from slicerprocess import SlicerPipeline -import pydas - -import slicer_utils - - - -class SlicerRegPipeline(SlicerPipeline): - loaded_input_volumes = "Loaded Input Volumes" - finished_registration = "Finished Registration" - wrote_transformed_volume = "Wrote Transformed Volume" - wrote_transform = "Wrote Transform" - - def __init__(self, jobId, pydasParams, tmpDirRoot, fixedItemId, movingItemId, fixedFiducialsList, movingFiducialsList, transformType, outputFolderId, outputVolumeName, outputTransformName): - SlicerPipeline.__init__(self, 'fiducialregistration', jobId, pydasParams, tmpDirRoot) - self.fixedItemId = fixedItemId - self.movingItemId = movingItemId - print self.fixedItemId, self.movingItemId - self.fixedFiducialsList = fixedFiducialsList - self.movingFiducialsList = movingFiducialsList - self.transformType = transformType - self.outputFolderId = outputFolderId - self.outputVolumeName = outputVolumeName - self.outputTransformName = outputTransformName - - def define_process_events(self): - process_events = [self.loaded_input_volumes, self.finished_registration, self.wrote_transformed_volume, self.wrote_transform] - process_events = [self.create_process_event(event_type) for event_type in process_events] - print process_events - return process_events - - - def downloadInputImpl(self): - print "segmodeldownloadinputimpl" - print self.fixedItemId, self.movingItemId - self.fixedVolumeFile = self.downloadItem(self.fixedItemId) - self.movingVolumeFile = self.downloadItem(self.movingItemId) - print self.fixedVolumeFile, self.movingVolumeFile - - - def parseSeedpointsList(self, seedpointsList): - print 'parseSeedpointsList' - #print seedpointsList - #print type(seedpointsList) - # TODO this is pretty bad - if type(seedpointsList) == type([]) and type(seedpointsList[0]) == type([]): - #print type(seedpointsList[0][0]) - if type(seedpointsList[0][0]) != type(0): - #print "noe" - seedpointsList = [[float(p) for p in seed] for seed in seedpointsList] - #print seedpointsList - #types = [type(seed) for seed in seedpointsList] - #print types - # TODO something better, email from jc - seedpoints = [(-1 * x, -1 * y, z) for (x, y, z) in seedpointsList] - return seedpoints - - - def processImpl(self): - print "segmodelprocessimpl" - # take first two coords and multiply by -1 - # TODO something much more systematic dealing with coords - - # parse the seedpoints and create fiducials lists - fixedFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.fixedFiducialsList)) - movingFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.movingFiducialsList)) - - # load the volumes - print self.fixedVolumeFile - print self.movingVolumeFile - - fixedVolume = slicer_utils.load_volume(self.fixedVolumeFile) - movingVolume = slicer_utils.load_volume(self.movingVolumeFile) - self.reportProcessStatus(self.loaded_input_volumes) - outputTransform = slicer_utils.create_linear_transform() - - slicer_utils.run_fiducial_registration(fixedFiducialsList, movingFiducialsList, outputTransform, self.transformType) - self.reportProcessStatus(self.finished_registration) - - self.transformed_volume = self.outputVolumeName + '.mha' - outPath = os.path.join(self.outdir, self.transformed_volume) - # apply transform to moving image, then save volume - movingVolume.ApplyTransformMatrix(outputTransform.GetMatrixTransformToParent()) - slicer_utils.write_storable_node(movingVolume, outPath) - self.reportProcessStatus(self.wrote_transformed_volume) - - self.transform = self.outputTransformName + '.tfm' - outPath = os.path.join(self.outdir, self.transform) - slicer_utils.write_storable_node(outputTransform, outPath) - - self.reportProcessStatus(self.wrote_transform) - - def uploadOutputImpl(self): - #print "segmodeluploadoutputimpl" - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) - folder_id = folder['folder_id'] - - item_id = self.uploadItem(self.outputVolumeName, folder_id, self.transformed_volume, item_description='output volume') - item_id = self.uploadItem(self.outputTransformName, folder_id, self.transform, item_description='output transform') - - - -if __name__ == '__main__': - print 'reg pipeline', sys.argv - (script, jobId, tmpDirRoot, json_args) = sys.argv - import json - arg_map = json.loads(json_args) - #print arg_map - pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) - (fixed_item_id, moving_item_id, fixed_fiducials, moving_fiducials, transform_type, output_folder_id, output_volume_name, output_transform_name) = (arg_map['fixed_item_id'][0], arg_map['moving_item_id'][0], json.loads(arg_map['fixed_fiducials'][0]), json.loads(arg_map['moving_fiducials'][0]), arg_map['transform_type'][0], arg_map['output_folder_id'][0], arg_map['output_volume_name'][0], arg_map['output_transform_name'][0]) - - print fixed_item_id, moving_item_id - rp = SlicerRegPipeline(jobId, pydasParams, tmpDirRoot, fixed_item_id, moving_item_id, fixed_fiducials, moving_fiducials, transform_type, output_folder_id, output_volume_name, output_transform_name) - rp.execute() - - diff --git a/library/reg_slicerjob.py b/library/reg_slicerjob.py new file mode 100644 index 0000000..a284e87 --- /dev/null +++ b/library/reg_slicerjob.py @@ -0,0 +1,113 @@ +from __main__ import vtk, slicer +import os +import json +from slicerjob import SlicerJob +import slicer_utils + +class SlicerReg(SlicerJob): + """This class implements a job executed in Slicer's Python environment - simple region growing registration""" + loaded_input_volumes = "Loaded Input Volumes" + finished_registration = "Finished Registration" + wrote_transformed_volume = "Wrote Transformed Volume" + wrote_transform = "Wrote Transform" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, + fixedVolumeFile, movingVolumeFile, fixedItemId, movingItemId, + fixedFiducialsList, movingFiducialsList, transformType, + outputFolderId, outputVolumeName, outputTransformName): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) + self.fixedVolumeFile = fixedVolumeFile + self.movingVolumeFile = movingVolumeFile + self.fixedItemId = fixedItemId + self.movingItemId = movingItemId + self.fixedFiducialsList = fixedFiducialsList + self.movingFiducialsList = movingFiducialsList + self.transformType = transformType + self.outputFolderId = outputFolderId + self.outputVolumeName = outputVolumeName + self.outputTransformName = outputTransformName + + def parseSeedpointsList(self, seedpointsList): + print 'parseSeedpointsList' + #print seedpointsList + #print type(seedpointsList) + # TODO this is pretty bad + if type(seedpointsList) == type([]) and type(seedpointsList[0]) == type([]): + #print type(seedpointsList[0][0]) + if type(seedpointsList[0][0]) != type(0): + #print "noe" + seedpointsList = [[float(p) for p in seed] for seed in seedpointsList] + #print seedpointsList + #types = [type(seed) for seed in seedpointsList] + #print types + # TODO something better, email from jc + seedpoints = [(-1 * x, -1 * y, z) for (x, y, z) in seedpointsList] + return seedpoints + + def process(self): + print "regmodelprocessimpl" + # take first two coords and multiply by -1 + # TODO something much more systematic dealing with coords + + # parse the seedpoints and create fiducials lists + fixedFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.fixedFiducialsList)) + movingFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.movingFiducialsList)) + + # load the volumes + print self.fixedVolumeFile + print self.movingVolumeFile + + fixedVolume = slicer_utils.load_volume(self.fixedVolumeFile) + movingVolume = slicer_utils.load_volume(self.movingVolumeFile) + self.report_status(self.event_process, self.loaded_input_volumes) + outputTransform = slicer_utils.create_linear_transform() + + slicer_utils.run_fiducial_registration(fixedFiducialsList, movingFiducialsList, outputTransform, self.transformType) + self.report_status(self.event_process, self.finished_registration) + + self.transformed_volume = self.outputVolumeName + '.mha' + outPath = os.path.join(self.outDir, self.transformed_volume) + # apply transform to moving image, then save volume + movingVolume.ApplyTransformMatrix(outputTransform.GetMatrixTransformToParent()) + slicer_utils.write_storable_node(movingVolume, outPath) + self.report_status(self.event_process, self.wrote_transformed_volume) + + self.transform = self.outputTransformName + '.tfm' + outPath = os.path.join(self.outDir, self.transform) + slicer_utils.write_storable_node(outputTransform, outPath) + + self.report_status(self.event_process, self.wrote_transform) + + def jobEndingNotification(self, args=None): + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['output_folder_id'] = self.outputFolderId + reqArgs['output_volume_name'] = self.outputVolumeName + reqArgs['output_transform_name'] = self.outputTransformName + SlicerJob.jobEndingNotification(self, reqArgs) + +if __name__ == '__main__': + print 'reg pipeline', sys.argv + (script, jobId, tmpDirRoot, jsonArgs) = sys.argv + argMap = json.loads(jsonArgs) + #print argMap + pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) + (fixedItemId, movingItemId, fixedFiducialsList, movingFiducialsList, + transformType, outputFolderId, outputVolumeName, outputTransformName) = \ + (argMap['fixed_volume_file'][0], argMap['moving_volume_file'][0], + argMap['fixed_item_id'][0], argMap['moving_item_id'][0], + json.loads(argMap['fixed_fiducials'][0]), json.loads(argMap['moving_fiducials'][0]), + argMap['transform_type'][0], argMap['output_folder_id'][0], + argMap['output_volume_name'][0], argMap['output_transform_name'][0]) + + print fixed_item_id, moving_item_id + rp = SlicerReg(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, fixedVolumeFile, movingVolumeFile, + fixedItemId, movingItemId, fixedFiducialsList, + movingFiducialsList, transformType, outputFolderId, + outputVolumeName, outputTransformName) + rp.execute() + + diff --git a/library/seg_pipeline.py b/library/seg_pipeline.py deleted file mode 100644 index c9d651a..0000000 --- a/library/seg_pipeline.py +++ /dev/null @@ -1,118 +0,0 @@ -from __main__ import vtk, slicer -from slicerprocess import SlicerPipeline -import pydas - - - -class SlicerSegPipeline(SlicerPipeline): - loaded_input_volume = "Loaded Input Volume" - started_segmentation = "Starting Segmentation" - finished_segmentation = "Finished Segmentation" - started_modelmaker = "Starting Modelmaker" - finished_modelmaker = "Finished Modelmaker" - wrote_model_output = "Wrote Model Output" - - def __init__(self, jobId, pydasParams, tmpDirRoot, itemId, seed, outputItemName, outputFolderId ): - SlicerPipeline.__init__(self, 'segmentationmodel', jobId, pydasParams, tmpDirRoot) - self.itemId = itemId - self.seed = seed - self.outputItemName = outputItemName - self.outputFolderId = outputFolderId - - def downloadInputImpl(self): - print "segmodeldownloadinputimpl" - self.headerFile = self.downloadItem(self.itemId) - - def define_process_events(self): - process_events = [self.loaded_input_volume, self.started_segmentation, self.finished_segmentation, self.started_modelmaker, self.finished_modelmaker, self.wrote_model_output] - process_events = [self.create_process_event(event_type) for event_type in process_events] - print process_events - return process_events - - def processImpl(self): - print "segmodelprocessimpl" - # take first two coords and multiply by -1 - # TODO something much more systematic dealing with coords - (x, y, z) = [float(coord) for coord in self.seed.split(',')] - seedPointCoords = (-1 * x, -1 * y, z) - - # create the path to the desired output file - outFile = self.outputItemName + '.vtp' - outPath = os.path.join(self.outdir, outFile) - - (status, inputVolume) = slicer.util.loadVolume(self.headerFile, returnNode=True) - self.reportProcessStatus(self.loaded_input_volume) - - outVolume = slicer.vtkMRMLScalarVolumeNode() - slicer.mrmlScene.AddNode(outVolume) - fiducialNode = slicer.vtkMRMLAnnotationFiducialNode() - fiducialNode.SetFiducialWorldCoordinates(seedPointCoords) - fiducialNode.SetName('Seed Point') - fiducialNode.Initialize(slicer.mrmlScene) - fiducialsList = getNode('Fiducials List') - params = {'inputVolume': inputVolume.GetID(), 'outputVolume': outVolume.GetID(), 'seed' : fiducialsList.GetID(), 'iterations' : 6} - self.reportProcessStatus(self.started_segmentation) - cliNode = slicer.cli.run(slicer.modules.simpleregiongrowingsegmentation,None, params , wait_for_completion=True) - #from time import sleep - #sleep(3) - self.reportProcessStatus(self.finished_segmentation) - - # make a model, only param is name of output file - modelmaker = slicer.modules.modelmaker - mhn = slicer.vtkMRMLModelHierarchyNode() - slicer.mrmlScene.AddNode(mhn) - parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} - self.reportProcessStatus(self.started_modelmaker) - cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) - self.reportProcessStatus(self.finished_modelmaker) - - # output the model - # TODO change to method without memory leaks - outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() - modelStorage = outputModelNode.CreateDefaultStorageNode() - slicer.mrmlScene.AddNode(modelStorage) - modelStorage.SetFileName(outPath) - modelStorage.WriteData(outputModelNode) - self.outFile = outFile - #sleep(3) - self.reportProcessStatus(self.wrote_model_output) - -#TODO metadata -# Visualize DiffuseColor 1.0,0.0,0.0 -#Visualize Orientation 180.0,180.0,0 - - - - def uploadOutputImpl(self): - #print "segmodeluploadoutputimpl" - item_id = self.uploadItem(self.outFile, self.outputFolderId) - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - # TODO move metadata to superclass - # set metadata on the output item - method = 'midas.item.setmultiplemetadata' - parameters = {} - parameters['token'] = pydas.session.token - parameters['itemid'] = item_id - parameters['count'] = 2 - parameters['element_1'] = 'Visualize' - parameters['element_2'] = 'Visualize' - parameters['qualifier_1'] = 'DiffuseColor' - parameters['qualifier_2'] = 'Orientation' - parameters['value_1'] = '[1.0,0.0,0.0]' - parameters['value_2'] = '[180.0,180.0,0.0]' - print parameters - pydas.session.communicator.request(method, parameters) - - - - - -if __name__ == '__main__': - (script, jobId, tmpDirRoot, json_args) = sys.argv - import json - arg_map = json.loads(json_args) - pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) - (input_item_id, coords, output_item_name, output_folder_id) = (arg_map['inputitemid'][0], arg_map['coords'][0], arg_map['outputitemname'][0], arg_map['outputfolderid'][0], ) - sp = SlicerSegPipeline(jobId, pydasParams, tmpDirRoot, input_item_id, coords, output_item_name, output_folder_id) - sp.execute() diff --git a/library/seg_slicerjob.py b/library/seg_slicerjob.py new file mode 100644 index 0000000..751854e --- /dev/null +++ b/library/seg_slicerjob.py @@ -0,0 +1,106 @@ +from __main__ import vtk, slicer +import os +import json +from slicerjob import SlicerJob + +class SlicerSeg(SlicerJob): + """This class implements a job executed in Slicer's Python environment - simple region growing segmentation""" + loaded_input_volume = "Loaded Input Volume" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, inputFile, seed, outputItemName, outputFolderId): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) + self.inputFile = inputFile + self.seed = seed + self.outputItemName = outputItemName + self.outputFolderId = outputFolderId + + + def process(self): + print "segmodelprocessimpl" + # take first two coords and multiply by -1 + # TODO something much more systematic dealing with coords + (x, y, z) = [float(coord) for coord in self.seed.split(',')] + seedPointCoords = (-1 * x, -1 * y, z) + + # create the path to the desired output file + outFile = self.outputItemName + '.vtp' + outPath = os.path.join(self.outDir, outFile) + print outPath + inputPath = os.path.join(self.dataDir, self.inputFile) + print inputPath + (status, inputVolume) = slicer.util.loadVolume(inputPath, returnNode=True) + self.report_status(self.event_process, self.loaded_input_volume) + + outVolume = slicer.vtkMRMLScalarVolumeNode() + slicer.mrmlScene.AddNode(outVolume) + fiducialNode = slicer.vtkMRMLAnnotationFiducialNode() + fiducialNode.SetFiducialWorldCoordinates(seedPointCoords) + fiducialNode.SetName('Seed Point') + fiducialNode.Initialize(slicer.mrmlScene) + fiducialsList = getNode('Fiducials List') + params = {'inputVolume': inputVolume.GetID(), 'outputVolume': outVolume.GetID(), 'seed' : fiducialsList.GetID(), 'iterations' : 6} + self.report_status(self.event_process, self.started_segmentation) + cliNode = slicer.cli.run(slicer.modules.simpleregiongrowingsegmentation, None, params , wait_for_completion=True) + self.report_status(self.event_process, self.finished_segmentation) + + # make a model, only param is name of output file + modelmaker = slicer.modules.modelmaker + mhn = slicer.vtkMRMLModelHierarchyNode() + slicer.mrmlScene.AddNode(mhn) + parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} + self.report_status(self.event_process, self.started_modelmaker) + cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) + self.report_status(self.event_process, self.finished_modelmaker) + + # output the model + # TODO change to method without memory leaks + outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() + modelStorage = outputModelNode.CreateDefaultStorageNode() + slicer.mrmlScene.AddNode(modelStorage) + modelStorage.SetFileName(outPath) + modelStorage.WriteData(outputModelNode) + self.outFile = outFile + self.report_status(self.event_process, self.wrote_model_output) + + + def jobEndingNotification(self, args=None): + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['outputitemname'] = self.outputItemName + reqArgs['outputfolderid'] = self.outputFolderId + SlicerJob.jobEndingNotification(self, reqArgs) + + def execute(self): + try: + self.process() + slicer.app.exit() + self.jobEndingNotification() + except Exception as exception: + # TODO where to do exceptions status and conditions + #self.log.exception(exception) + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + print emsg + self.report_status(self.event_exception, emsg) + exit(1) + + +if __name__ == '__main__': + (script, jobId, tmpDirRoot, jsonArgs) = sys.argv + argMap = json.loads(jsonArgs) + pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) + (pipelineName, dataDir, outDir, proxyurl, inputFile, coords, outputItemName, outputFolderId) \ + = (argMap['pipeline'][0], argMap['data_dir'][0], argMap['out_dir'][0], + argMap['proxyurl'][0], argMap['inputfile'][0], argMap['coords'][0], + argMap['outputitemname'][0], argMap['outputfolderid'][0]) + sp = SlicerSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, coords, outputItemName, outputFolderId) + sp.execute() diff --git a/library/slicerjob.py b/library/slicerjob.py new file mode 100644 index 0000000..bdc0afd --- /dev/null +++ b/library/slicerjob.py @@ -0,0 +1,43 @@ +import os +import urllib2 +import urllib + +class SlicerJob(): + """This class implements the base class for python jobs excuted within Slicer's Python environment.""" + event_process = "Process" + event_exception = "Exception" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl): + self.jobId = jobId + self.pipelineName = pipelineName + self.pydasParams = pydasParams + self.tmpdirpath = tmpDirRoot + self.dataDir = os.path.join(tmpDirRoot, dataDir) + self.outDir = os.path.join(tmpDirRoot, outDir) + self.proxyurl = proxyurl + + def report_status(self, eventType, message): + args = {} + args['pipeline'] = self.pipelineName + args['job_id'] = self.jobId + (args['email'], args['apikey'], args['url']) = self.pydasParams + args['event_type'] = eventType + args['message'] = message + data = urllib.urlencode(args) + request = urllib2.Request(self.proxyurl + "slicerjob/reportstatus?" + data) + response = urllib2.urlopen(request) + print response + + def jobEndingNotification(self, args=None): + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['pipeline'] = self.pipelineName + reqArgs['job_id'] = self.jobId + (reqArgs['email'], reqArgs['apikey'], reqArgs['url']) = self.pydasParams + data = urllib.urlencode(reqArgs) + request = urllib2.Request(self.proxyurl + "slicerjob/finish?" + data) + response = urllib2.urlopen(request) + print response + diff --git a/library/slicerprocess.py b/library/slicerprocess.py index edc74bc..3f4cd9d 100755 --- a/library/slicerprocess.py +++ b/library/slicerprocess.py @@ -1,19 +1,18 @@ from twisted.internet import reactor from twisted.internet import protocol -import re -import logging import os -import sys -import pydas -import shutil import json +from pipeline import PipelineStatusEvent, PipelineFactory + class SlicerProcessJobManager(): - def __init__(self, tmpDirRoot, slicerPath): + """This class should implement Pyslicer's own process manager. Pyslicer sitll uses Slicer's process manager as of now.""" + def __init__(self, tmpDirRoot, slicerPath, proxyurl): self.jobs = {} #self.processCount = 0 self.tmpDirRoot = tmpDirRoot self.slicerPath = slicerPath + self.proxyurl = proxyurl # def getNextJobId(self): # # could be problematic if multithreaded # jobId = self.processCount @@ -23,16 +22,11 @@ def __init__(self, tmpDirRoot, slicerPath): def addJob(self, jobId): self.jobs[str(jobId)] = {} - - def processEvent(self, event): pass #jobEvents = self.jobs[str(event.jobId)] #jobEvents[event.eventId] = event - - - def getStatus(self, jobId=None): print "getStatus", jobId print self.jobs @@ -46,88 +40,14 @@ def getStatus(self, jobId=None): status = "" return status - - - -class SlicerProcessStatusEvent(): - statuseventpattern = 'status&remoteprocessing_job_id=%s&event_id=%s×tamp=%s&event_type=%s' - statuseventmessagepattern = statuseventpattern + '&message=%s' - - def __init__(self, jobId, eventId, timestamp, eventType, message=None): - self.jobId = str(jobId) - self.eventId = str(eventId) - self.timestamp = str(timestamp) - self.eventType = eventType - self.message = message - self.jobstatus_id = None - - def __repr__(self): - if self.message is not None: - string = self.statuseventmessagepattern % (self.jobId, self.eventId, self.timestamp, self.eventType, self.message) - else: - string = self.statuseventpattern % (self.jobId, self.eventId, self.timestamp, self.eventType) - return string - - @staticmethod - def parseEvent(data): - anychargroup = '(.*)' - # first search for pattern with message as it is a superset of messageless pattern - pattern = SlicerProcessStatusEvent.statuseventmessagepattern - regex = pattern % tuple([anychargroup] * pattern.count('%s')) - match = False - m = re.search(regex, data) - message = None - if m is not None: - match = True - #print "Match:", m.groups() - (jobId, eventId, timestamp, eventType, message) = m.groups() - return SlicerProcessStatusEvent(jobId, eventId, timestamp, eventType, message) - else: - pattern = SlicerProcessStatusEvent.statuseventpattern - regex = pattern % tuple([anychargroup] * pattern.count('%s')) - m = re.search(regex, data) - if m is not None: - match = True - #print "Match:", m.groups() - (jobId, eventId, timestamp, eventType) = m.groups() - return SlicerProcessStatusEvent(jobId, eventId, timestamp, eventType) - return None - - -class SlicerPipelineStatusTracker(): - def __init__(self, pipelineName, jobId): - self.clis = {} - self.started = True - self.finished = False - self.jobId = jobId - self.pipelineName = pipelineName - - def reportStatus(self, event): - # could change this to calling url if need be - print(event) - - def start(self): - self.started = True - - def finish(self): - self.finished = True - from __main__ import slicer - slicer.app.exit() - - class SlicerProcess(protocol.ProcessProtocol): - pipeline_scripts = {'segmentation' : 'seg_pipeline.py', 'registration' : 'reg_pipeline.py'} - - - - def __init__(self, jobManager, jobId, pipeline, request_args): - #def __init__(self, jobManager, jobId, requestArgs, jsonargs): + """This class implements a twisted process which runs a python script within Slicer's Python environment.""" + def __init__(self, jobManager, jobId, pipelineName, requestArgs): self.jobManager = jobManager self.jobId = jobId self.jobManager.addJob(jobId) - #self.requestArgs = requestArgs - self.pipeline = pipeline - self.request_args = request_args + self.pipelineName = pipelineName + self.requestArgs = requestArgs self.data = "" self.err = "" self.events = {} @@ -136,14 +56,12 @@ def connectionMade(self): print str(self.jobId) + "connectionMade!" self.transport.closeStdin() # tell them we're done - - def outReceived(self, data): #print str(self.jobId) + "outReceived! with %d bytes!" % len(data) # look for status events self.data = self.data + data print data - event = SlicerProcessStatusEvent.parseEvent(data) + event = PipelineStatusEvent.parseEvent(data) if event is not None: self.jobManager.processEvent(event) @@ -161,12 +79,10 @@ def outConnectionLost(self): lines = self.data.split('\n') print len(lines) for line in lines: - event = SlicerProcessStatusEvent.parseEvent(line) + event = PipelineStatusEvent.parseEvent(line) if event is not None: self.jobManager.processEvent(event) - - def errConnectionLost(self): print str(self.jobId) + "errConnectionLost! The child closed their stderr." print self.err @@ -181,244 +97,15 @@ def run(self): xvfbLogfile = os.path.join(self.jobManager.tmpDirRoot, 'xvfb.log') xvfbCmdParts = ['xvfb-run', '-a', '-e', xvfbLogfile] slicerArgs = ['--no-main-window', '--python-script'] - slicerPythonScript = [self.pipeline_scripts[self.pipeline]] - #licerPythonScript = ['seg_pipeline.py'] - import json - json_args = json.dumps(self.request_args) - print json_args - print [json_args] - print [str(self.jobId), self.jobManager.tmpDirRoot, json_args] - slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + slicerPythonScript + [str(self.jobId), self.jobManager.tmpDirRoot, json_args]#['slicerjob'] + pipelinefactory = PipelineFactory() + slicerPythonScript = pipelinefactory.getSlicerScript(self.pipelineName) + jsonArgs = json.dumps(self.requestArgs) + print jsonArgs + print [jsonArgs] + print [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs] + slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + [slicerPythonScript] + [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs]#['slicerjob'] cmd = xvfbCmdParts + slicerCmdParts print str(self.jobId) + " run: " + str(cmd) print ">>>>>>>>>>>>>>>SlicerProcess running:",str(cmd) - reactor.spawnProcess(self, 'xvfb-run', cmd, {}, usePTY=True) - - - - - -class SlicerPipeline(): - - event_pipelinestart = "PipelineStart" - event_downloadinput = "DownloadInput" - event_process = "Process" - event_uploadoutput = "UploadOutput" - event_pipelineend = "PipelineEnd" - event_exception = "Exception" - - midasstatus_started = 1 - midasstatus_done = 2 - midasstatus_exception = 3 - - def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot): - self.pipelineName = pipelineName - self.jobId = jobId - self.pydasParams = pydasParams - self.tmpDirRoot = tmpDirRoot - self.tracker = SlicerPipelineStatusTracker(pipelineName, jobId) - self.eventIdCounter = 0 - #TODO something better with logging - logging.basicConfig(level=logging.WARNING) - self.log = logging.getLogger('example') - self.register_events() - - def create_event(self, event_type, message=None): - event_id = self.eventIdCounter - self.eventIdCounter = self.eventIdCounter + 1 - timestamp = 0 - event = SlicerProcessStatusEvent(self.jobId, event_id, timestamp, event_type, message) - return event - - def create_process_event(self, message): - return self.create_event(self.event_process, message) - - def define_events(self): - self.events_map = {} - events = [self.create_event(event_type) for event_type in [self.event_pipelinestart, self.event_downloadinput]] - events = events + self.define_process_events() - events = events + [self.create_event(event_type) for event_type in [self.event_uploadoutput, self.event_pipelineend]] - for event in events: - self.events_map[event.eventId] = event - # then when it is their time to nofify, call notify passing in jobstatu_id and timestamp - # need an imple method for subclasses to list their process events - # maybe a map of event types to event, then a submap for process events? - # somehow i need to keep up with all these events here - # and maybe there is no reason to print them in the tracker anymore - - def register_events(self): - # get all the events, register them with the midas server - self.define_events() - events = self.events_map.values() - method = 'midas.pyslicer.add.jobstatuses' - parameters = {} - json_events = json.dumps([str(event) for event in events]) - print json_events - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - parameters['token'] = pydas.session.token - parameters['events'] = json_events - event_id_to_jobstatus_id = pydas.session.communicator.request(method, parameters) - for (event_id, jobstatus_id) in event_id_to_jobstatus_id.items(): - event = self.events_map[event_id] - event.jobstatus_id = jobstatus_id - - - - def define_process_events(self): - # should be overwritten in the subclass - return [] - - def createTmpDir(self): - self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) - # clear it out if it already exists - if(os.path.exists(self.tmpdirpath)): - self.removeTmpDir() - os.mkdir(self.tmpdirpath) - # create a data dir - self.datadir = os.path.join(self.tmpdirpath, 'data') - os.mkdir(self.datadir) - # create an out dir - self.outdir = os.path.join(self.tmpdirpath, 'out') - os.mkdir(self.outdir) - - def removeTmpDir(self): - shutil.rmtree(self.tmpdirpath) - - - def downloadInput(self): - self.reportStatus(self.event_downloadinput) - self.downloadInputImpl() - - def downloadInputImpl(self): - pass - - def downloadItem(self, itemId): - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - - pydas.api._download_item(itemId, self.datadir) - # unzip any zipped files - for filename in os.listdir(self.datadir): - if filename.endswith('.zip'): - filepath = os.path.join(self.datadir, filename) - zip = zipfile.ZipFile(filepath) - zip.extractall(self.datadir) - zip.close() - # return the path to the name of the item - item = pydas.session.communicator.item_get(pydas.session.token, itemId) - return os.path.join(self.datadir, item['name']) - - - def uploadItem(self, itemName, outputFolderId, out_file=None, item_description=None): - # read everything in the outdir and upload it as a single item - # create a new item - # need a folder id - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - if item_description is not None: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId, description=item_description) - else: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId) - item_id = item['item_id'] - if out_file is not None: - # only upload this one file - upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, out_file) - filepath=os.path.join(self.outdir, out_file) - pydas.session.communicator.perform_upload(upload_token, out_file, itemid=item_id, filepath=filepath) - else: - for filename in os.listdir(self.outdir): - upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, filename) - filepath=os.path.join(self.outdir, filename) - pydas.session.communicator.perform_upload(upload_token, filename, itemid=item_id, filepath=filepath) - # set the output item as an output for the job - method = 'midas.pyslicer.add.job.output.item' - parameters = {} - parameters['token'] = pydas.session.token - parameters['job_id'] = self.jobId - parameters['item_id'] = item_id - print parameters - pydas.session.communicator.request(method, parameters) - return item_id - - - def process(self): - self.processImpl() - - def processImpl(self): - pass - - def reportStatus(self, eventType, message=None): - # find the event - match = None - for event in self.events_map.values(): - if event.eventType == eventType and event.message == message: - match = event - if match is None: - print 'reportStatus', eventType, message - print "NO MATCH" - exit(1) - import time - timestamp = time.time() - match.timestamp = timestamp - method = 'midas.pyslicer.notify.jobstatus' - parameters = {} - parameters['token'] = pydas.session.token - parameters['jobstatus_id'] = match.jobstatus_id - parameters['notify_date'] = timestamp - pydas.session.communicator.request(method, parameters) - - - def reportProcessStatus(self, message=None): - self.reportStatus(self.event_process, message) - - def uploadOutput(self): - self.reportStatus(self.event_uploadoutput) - self.uploadOutputImpl() - - def uploadOutputImpl(self): - pass - - - def reportMidasStatus(self, status, condition=None): - # TODO add these methods to pydas - # TODO add condition to api call - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - method = 'midas.pyslicer.update.job' - parameters = {} - parameters['token'] = pydas.session.token - parameters['job_id'] = self.jobId - parameters['status'] = status - if condition is not None: parameters['condition'] = condition - print parameters - pydas.session.communicator.request(method, parameters) - - - def execute(self): - try: - self.tracker.start() - # send pydas pipeline started - self.reportMidasStatus(self.midasstatus_started) - self.reportStatus(self.event_pipelinestart) - self.createTmpDir() - self.downloadInput() - self.process() - self.uploadOutput() - self.removeTmpDir() - self.reportStatus(self.event_pipelineend) - # send pydas pipeline finished - self.reportMidasStatus(self.midasstatus_done) - self.tracker.finish() - except Exception as exception: - # TODO where to do exceptions status and conditions - # TODO send this through status tracker - self.log.exception(exception) - self.tracker.reportStatus(self.event_exception) - import traceback - etype, value, tb = sys.exc_info() - emsg = repr(traceback.format_exception(etype, value, tb)) - self.reportMidasStatus(self.midasstatus_exception, emsg) - exit(1) - + reactor.spawnProcess(self, 'xvfb-run', cmd, env=os.environ, usePTY=True) diff --git a/library/twserver.cfg b/library/twserver.cfg index 727e368..0885ce3 100644 --- a/library/twserver.cfg +++ b/library/twserver.cfg @@ -1,3 +1,4 @@ slicer_path=/Applications/Slicer.app/Contents/MacOS/Slicer +proxy_url=http://localhost:8880/ diff --git a/library/twserver.py b/library/twserver.py index 2ee12e7..646a0bb 100644 --- a/library/twserver.py +++ b/library/twserver.py @@ -1,69 +1,135 @@ -from twisted.internet.task import deferLater from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from twisted.internet import reactor +from twisted.internet import reactor, defer from twisted.web import server, resource - - - -#TODO clean imports +import os from slicerprocess import SlicerProcess, SlicerProcessJobManager +from pipeline import PipelineFactory class ServerRoot(Resource): + """This class implements a twisted Resource Object - server root""" def getChild(self, name, request): if name == '': return self return Resource.getChild(self, name, request) def render_GET(self, request): - return "Server Root, the following paths have service: slicerjob, status" + return "Server Root, the following paths have service: slicerjob" class Slicerjob(Resource): + """This class implements a twisted Resource Object - /slicerjob URL segment""" def getChild(self, name, request): if name == '': return self return Resource.getChild(self, name, request) def render_GET(self, request): - return "slicerjob root, the following paths have service: init, status" + return "slicerjob root, the following paths have service: init, finish, reportstatus" -class SlicerjobStatus(Resource): +class SlicerjobReportStatus(Resource): + """This class implements a twisted Resource Object - /reportstatus URL segment""" isLeaf = True - def __init__(self, jobManager): - self.jobManager = jobManager - - def render_GET(self, request): - if 'job_id' in request.args: - return str(self.jobManager.getStatus(jobId=request.args['job_id'][0])) + + def _report_status(self, request): + print request.args + print "SlicerjobReportStatus" + if 'pipeline' in request.args and 'job_id' in request.args: + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + tmpDir = os.getcwd(); + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + pipeline.get_events() + if 'message'in request.args: + pipeline.reportStatus(request.args['event_type'][0], request.args['message'][0]) + request.write("\nreport job status:" + str(request.args['event_type'][0]) + str(request.args['message'][0])) + else: + pipeline.reportStatus(request.args['event_type'][0]) + request.write("\nreport job status:" + str(request.args['event_type'][0])) + request.finish() else: - return '' -# else: -# return str(self.jobManager.getStatus()) - + request.finish() + def render_GET(self, request): + reactor.callLater(0, self._report_status, request) + return NOT_DONE_YET class SlicerjobInit(Resource): + """This class implements a twisted Resource Object - /init URL segment""" isLeaf = True def __init__(self, jobManager): self.jobManager = jobManager + + def _download_process(self, request): + print request.args + print "SlicerjobInit download" + request.write('init job') + if 'pipeline' in request.args and 'job_id' in request.args: + print "YES" + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER starting SlicerProcess" + tmpDir = os.getcwd(); + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + print pydasParams; + request.write("\nstarted job " + str(jobId)) + request.write("\nstarted downloading item(s)") + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + (self.dataDir, self.outDir, self.inputfiles) = pipeline.executeDownload() + request.write("\nfinished downloading item(s)") + request.args['proxyurl'] = [self.jobManager.proxyurl] + request.args['data_dir'] = [self.dataDir] + request.args['out_dir'] = [self.outDir] + for k, v in self.inputfiles.items(): + request.args[k] = [v] # save value in a list as other reqeust parameters + request.write("\nstarted processing item(s) ") + slicerJob = SlicerProcess(self.jobManager, jobId, pipelineName, request.args) + d = defer.Deferred() + reactor.callLater(0, d.callback, None) + d.addCallback(lambda ignored: slicerJob.run()) + request.finish() + else: + request.finish() def render_GET(self, request): + reactor.callLater(0, self._download_process, request) + return NOT_DONE_YET + + +class SlicerjobFinish(Resource): + """This class implements a twisted Resource Object - /finish URL segment""" + isLeaf = True + + def _upload(self, request): print request.args - print "SlicerjobInit" - response = 'job:' + print "SlicerjobFinish" + request.write('Upload output') if 'pipeline' in request.args and 'job_id' in request.args: print "YES" - job_id = request.args['job_id'][0] - pipeline = request.args['pipeline'][0] - print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER starting SlicerProcess" - slicerJob = SlicerProcess(jobManager, job_id, pipeline, request.args) - slicerJob.run() - response = "started job " + str(job_id) - return response - + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER finishing SlicerProcess" + tmpDir = os.getcwd(); + print tmpDir + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + response = "\nstarted uploading output item" + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + pipeline.executeUpload() + request.write("\nfinished uploading output item") + request.write("\nfinished job " + str(jobId)) + request.finish() + else: + request.finish() + + def render_GET(self, request): + reactor.callLater(0, self._upload, request) + return NOT_DONE_YET # check status like this: #http://localhost:8880/slicerjob/status/?jobid=122 @@ -73,26 +139,31 @@ def render_GET(self, request): if __name__ == '__main__': # read the config file for slicer_path - config_file = open('twserver.cfg') + configFile = open('twserver.cfg') config = {} - for line in config_file: + for line in configFile: line = line.strip() if line is not None and line != '': cols = line.split('=') config[cols[0]] = cols[1] - config_file.close() + configFile.close() import os if 'slicer_path' not in config or not os.path.isfile(config['slicer_path']): print "You must specify the path to the Slicer exe as slicer_path in twserver.cfg" exit(1) + if 'proxy_url' not in config: + print "You must specify the Slicer Proxy Server URL" + exit(1) # set current dir as temp working dir - jobManager = SlicerProcessJobManager(os.getcwd(), config['slicer_path']) + jobManager = SlicerProcessJobManager(os.getcwd(), config['slicer_path'], config['proxy_url']) root = ServerRoot() slicerjobRoot = Slicerjob() slicerjobInit = SlicerjobInit(jobManager) - slicerjobStatus = SlicerjobStatus(jobManager) + slicerjobFinish = SlicerjobFinish() + slicerjobReportStatus = SlicerjobReportStatus() root.putChild('slicerjob', slicerjobRoot) slicerjobRoot.putChild('init', slicerjobInit) - slicerjobRoot.putChild('status', slicerjobStatus) + slicerjobRoot.putChild('finish', slicerjobFinish) + slicerjobRoot.putChild('reportstatus', slicerjobReportStatus) reactor.listenTCP(8880, server.Site(root)) reactor.run() diff --git a/public/js/process/process.status.js b/public/js/process/process.status.js index 61f2d7b..a541b48 100644 --- a/public/js/process/process.status.js +++ b/public/js/process/process.status.js @@ -67,7 +67,7 @@ midas.pyslicer.process.updateJobStatus = function() { } $.each(columns, function (col_index, column) { var colval = statusrow[column]; - if(colval == null) { + if(!colval) { if(column == 'notify_date') { colval = emptyNotifyDateColVal; } diff --git a/public/js/process/process.statuslist.js b/public/js/process/process.statuslist.js index b863f71..0eca0a1 100644 --- a/public/js/process/process.statuslist.js +++ b/public/js/process/process.statuslist.js @@ -14,8 +14,8 @@ $(document).ready(function(){ midas.showDialog('Browse for the Image to be Segmented'); midas.pyslicer.statuslist.itemSelectionCallback = function(name, id) { - var redirectUrl = $('.webroot').val() + '/visualize/paraview/slice?itemId=' + id; - redirectUrl += '&operations=pointSelect&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/visualize.pointSelect.js'; + var redirectUrl = $('.webroot').val() + '/pvw/paraview/slice?itemId=' + id; + redirectUrl += '&operations=pointSelect&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.pointSelect.js'; window.location = redirectUrl; }; }); diff --git a/views/index/index.phtml b/views/index/index.phtml index 8149fc8..5ce014e 100644 --- a/views/index/index.phtml +++ b/views/index/index.phtml @@ -16,7 +16,7 @@ $this->headScript()->appendFile($this->moduleWebroot . '/public/js/index/pyslice echo '"; diff --git a/views/process/statuslist.phtml b/views/process/statuslist.phtml index 4dd4d75..d33760e 100644 --- a/views/process/statuslist.phtml +++ b/views/process/statuslist.phtml @@ -25,7 +25,7 @@ $this->headScript()->appendFile($this->moduleWebroot . '/public/js/process/proce

Start a new pipeline job

From 9d97c966501f8bf1d968a2629fb5b1e46ac98766 Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Wed, 5 Jun 2013 11:47:35 -0400 Subject: [PATCH 2/9] STYLE: clean original code and add more comments. --- controllers/components/ApiComponent.php | 15 +- controllers/components/PipelineComponent.php | 46 +++--- library/pipeline.py | 156 ++++++++++++------- library/reg_slicerjob.py | 44 ++++-- library/seg_slicerjob.py | 39 +++-- library/slicerjob.py | 5 + library/slicerprocess.py | 17 +- library/twserver.py | 42 +++-- 8 files changed, 230 insertions(+), 134 deletions(-) diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index c7937cb..fb95ffa 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -593,10 +593,11 @@ public function addJobstatuses($args) * along with any related jobstatus objects for that job. * @param job_id the id of the job to query status for. * @param status_only (Optional) If set, will only return the array of jobstatus objects - * @return array ('job' => the job object, - 'jobstatuses' => the array of jobstatus objects, - 'condition_rows' => array of job condition lines, if any, - 'output_links' => array of output links for this job, if any). + * @return if status_only is not set: array ('job' => the job object, + * 'jobstatuses' => the array of jobstatus objects, + * 'condition_rows' => array of job condition lines, if any, + * 'output_links' => array of output links for this job, if any); + * if status_only is set: the array of jobstatus objects */ public function getJobstatus($args) { @@ -622,7 +623,7 @@ public function getJobstatus($args) // get the status details $jobstatuses = $jobstatusModel->getForJob($job); - + if(array_key_exists('status_only', $args)) { return $jobstatuses; @@ -632,7 +633,9 @@ public function getJobstatus($args) $pipelineComponent = MidasLoader::loadComponent('Pipeline', 'pyslicer'); $conditionRows = $pipelineComponent->formatJobCondition($job->getCondition()); $inputsAndOutputs = $pipelineComponent->resolveInputsAndOutputs($job); - return array('job' => $job, 'jobstatuses' => $jobstatuses, 'condition_rows' => $conditionRows, 'output_links' => $inputsAndOutputs['outputs']); + return array('job' => $job, 'jobstatuses' => $jobstatuses, + 'condition_rows' => $conditionRows, + 'output_links' => $inputsAndOutputs['outputs']); } } diff --git a/controllers/components/PipelineComponent.php b/controllers/components/PipelineComponent.php index 2f3ca71..b8cb770 100644 --- a/controllers/components/PipelineComponent.php +++ b/controllers/components/PipelineComponent.php @@ -33,7 +33,7 @@ class Pyslicer_PipelineComponent extends AppComponent MIDAS_REMOTEPROCESSING_STATUS_STARTED => 'midas_pyslicer_started', MIDAS_REMOTEPROCESSING_STATUS_DONE => 'midas_pyslicer_done', MIDAS_PYSLICER_REMOTEPROCESSING_JOB_EXCEPTION => 'midas_pyslicer_error'); - + protected $pipelines = array( MIDAS_PYSLICER_SEGMENTATION_PIPELINE => array( @@ -46,16 +46,16 @@ class Pyslicer_PipelineComponent extends AppComponent MIDAS_PYSLICER_EXPECTED_OUTPUTS => MIDAS_PYSLICER_REGISTRATION_OUTPUT_COUNT, MIDAS_PYSLICER_INPUT_GENERATOR => 'registrationInputLinks', MIDAS_PYSLICER_OUTPUT_GENERATOR => 'registrationOutputLinks')); - + protected $missingInputs = array( array ('text' => 'Error: missing input', 'url' => '')); protected $missingOutputs = array( array ('text' => 'Error: missing output', 'url' => '')); - + /** init method */ function init() { } - + function segmentationInputLinks($job, $inputs, $outputs, $midasPath) { $inputItemId = $inputs[0]->getItemId(); @@ -70,16 +70,20 @@ function segmentationOutputLinks($job, $inputs, $outputs, $midasPath) { $inputItemId = $inputs[0]->getItemId(); $outputItemId = $outputs[0]->getItemId(); - - $meshView = $midasPath . '/pvw/paraview/surface?itemId='.$outputItemId; - $sliceView = $midasPath . '/pvw/paraview/slice?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; - $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; - + + $meshView = $midasPath . '/pvw/paraview/surface?itemId=' . $outputItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId=' . $inputItemId . + '&meshes=' . $outputItemId . '&jsImports=' . $midasPath . + '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId=' . $inputItemId . + '&meshes=' . $outputItemId . '&jsImports=' . $midasPath . + '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + return array( array ('text' => 'model mesh view', 'url' => $meshView), array ('text' => 'slice view', 'url' => $sliceView), array ('text' => 'volume view', 'url' => $volumeView)); } - + function registrationInputLinks($job, $inputs, $outputs, $midasPath) { $fixedItemId = $inputs[0]->getItemId(); @@ -94,7 +98,7 @@ function registrationOutputLinks($job, $inputs, $outputs, $midasPath) { $params = JsonComponent::decode($job->getParams()); $fixedItemId = $params['fixed_item_id']; - + // we need to get the output volume, but there are two outputs // we know the fact that the output volume is created first // and the outputs here are returned in reverse order of creation, but @@ -108,17 +112,17 @@ function registrationOutputLinks($job, $inputs, $outputs, $midasPath) $outputVolumeId = $output->getItemId(); } } - + $outputLink = $midasPath . '/visualize/paraview/dual?left='.$fixedItemId; $outputLink .= '&right=' . $outputVolumeId; $outputLink .= '&jsImports=' . $midasPath.'/modules/pyslicer/public/js/lib/visualize.regOutput.js'; - $outputLinkText = 'View'; + $outputLinkText = 'View'; return array( array ('text' => $outputLinkText, 'url' => $outputLink)); } - + public function resolveInputsAndOutputs($job) { - $midasPath = Zend_Registry::get('webroot'); + $midasPath = Zend_Registry::get('webroot'); $inputs = array(); $outputs = array(); $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); @@ -134,7 +138,7 @@ public function resolveInputsAndOutputs($job) $outputs[] = $item; } } - + // generate inputs $expectedInputs = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_EXPECTED_INPUTS]; $inputGenerator = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_INPUT_GENERATOR]; @@ -147,7 +151,7 @@ public function resolveInputsAndOutputs($job) { $inputLinks = call_user_func_array(array($this, $inputGenerator), array($job, $inputs, $outputs, $midasPath)); } - + // generate outputs if done $expectedOutputs = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_EXPECTED_OUTPUTS]; $outputGenerator = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_OUTPUT_GENERATOR]; @@ -162,7 +166,7 @@ public function resolveInputsAndOutputs($job) { if(sizeof($inputs) < $expectedInputs) { - $outputLinks = $this->missingInputs; + $outputLinks = $this->missingInputs; } else { @@ -172,8 +176,8 @@ public function resolveInputsAndOutputs($job) } return array('inputs' => $inputLinks, 'outputs' => $outputLinks); } - - + + public function formatJobCondition($condition) { $splitLines = array(); @@ -190,7 +194,7 @@ public function formatJobCondition($condition) } return $splitLines; } - + } // end class diff --git a/library/pipeline.py b/library/pipeline.py index 854f54f..33258db 100755 --- a/library/pipeline.py +++ b/library/pipeline.py @@ -2,12 +2,13 @@ import logging import os import sys -import pydas import shutil import json +import pydas + class PipelineStatusEvent(): - """This class implements pipeline status events and is called by pyslicer's own process manager.""" + """This class implements pipeline status events.""" statuseventpattern = 'status&remoteprocessing_job_id=%s&event_id=%s×tamp=%s&event_type=%s' statuseventmessagepattern = statuseventpattern + '&message=%s' @@ -21,13 +22,16 @@ def __init__(self, jobId, eventId, timestamp, eventType, message=None): def __repr__(self): if self.message is not None: - string = self.statuseventmessagepattern % (self.jobId, self.eventId, self.timestamp, self.eventType, self.message) + string = self.statuseventmessagepattern % (self.jobId, self.eventId, + self.timestamp, self.eventType, self.message) else: - string = self.statuseventpattern % (self.jobId, self.eventId, self.timestamp, self.eventType) + string = self.statuseventpattern % (self.jobId, self.eventId, + self.timestamp, self.eventType) return string @staticmethod def parseEvent(data): + """Create a status event if the input data matches status event pattern""" anychargroup = '(.*)' # first search for pattern with message as it is a superset of messageless pattern pattern = PipelineStatusEvent.statuseventmessagepattern @@ -36,8 +40,7 @@ def parseEvent(data): m = re.search(regex, data) message = None if m is not None: - match = True - #print "Match:", m.groups() + match = True (jobId, eventId, timestamp, eventType, message) = m.groups() return PipelineStatusEvent(jobId, eventId, timestamp, eventType, message) else: @@ -45,15 +48,17 @@ def parseEvent(data): regex = pattern % tuple([anychargroup] * pattern.count('%s')) m = re.search(regex, data) if m is not None: - match = True - #print "Match:", m.groups() + match = True (jobId, eventId, timestamp, eventType) = m.groups() return PipelineStatusEvent(jobId, eventId, timestamp, eventType) return None + class PipelineFactory(): - """This class implements an interface to get pipeline and the python script running within Slicer.""" + """This class implements an interface to get pipeline and the python script + running within Slicer.""" def getPipeline(self, pipelineName, jobId, pydasParams, tmpDirRoot, args): + """Return a specific pipeline based on input parameters""" if pipelineName == 'segmentation': return SegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) elif pipelineName == 'registration': @@ -62,6 +67,8 @@ def getPipeline(self, pipelineName, jobId, pydasParams, tmpDirRoot, args): return None def getSlicerScript(self, pipelineName): + """Return the name of a python script running within Slicer's python + environment.""" if pipelineName == 'segmentation': return 'seg_slicerjob.py' elif pipelineName == 'registration': @@ -69,6 +76,7 @@ def getSlicerScript(self, pipelineName): else: return None + class Pipeline(): """This class implements the base class for Pyslicer's pipelines.""" event_pipelinestart = "PipelineStart" @@ -88,11 +96,12 @@ def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot): self.pydasParams = pydasParams self.tmpDirRoot = tmpDirRoot self.eventIdCounter = 0 - #TODO something better with logging + # TODO something better with logging logging.basicConfig(level=logging.WARNING) self.log = logging.getLogger('example') def create_event(self, eventType, message=None): + """Create a pipeline status event""" eventId = self.eventIdCounter self.eventIdCounter = self.eventIdCounter + 1 timestamp = 0 @@ -103,12 +112,16 @@ def create_process_event(self, message): return self.create_event(self.event_process, message) def define_events(self): + """Define all the status events for this pipeline""" self.eventsMap = {} - events = [self.create_event(eventType) for eventType in [self.event_pipelinestart, self.event_downloadinput]] + events = [self.create_event(eventType) + for eventType in [self.event_pipelinestart, self.event_downloadinput]] events = events + self.define_process_events() - events = events + [self.create_event(eventType) for eventType in [self.event_uploadoutput, self.event_pipelineend]] + events = events + [self.create_event(eventType) + for eventType in [self.event_uploadoutput, self.event_pipelineend]] for event in events: self.eventsMap[event.eventId] = event + # Keep original comments as below # then when it is their time to nofify, call notify passing in jobstatu_id and timestamp # need an imple method for subclasses to list their process events # maybe a map of event types to event, then a submap for process events? @@ -116,7 +129,8 @@ def define_events(self): # and maybe there is no reason to print them in the tracker anymore def register_events(self): - # get all the events, register them with the midas server + """"Get all the status events of this pipeline, register them with the + Midas server""" self.define_events() events = self.eventsMap.values() method = 'midas.pyslicer.add.jobstatuses' @@ -131,9 +145,9 @@ def register_events(self): for (eventId, jobstatusId) in eventId_to_jobstatusId.items(): event = self.eventsMap[eventId] event.jobstatusId = jobstatusId - + def get_events(self): - # get all the events for a given job + """Get all the registered status events for a given pipeline job""" self.define_events() method = 'midas.pyslicer.get.job.status' parameters = {} @@ -149,57 +163,66 @@ def get_events(self): event.jobstatusId = jobstatus['jobstatus_id'] def define_process_events(self): - # should be overwritten in the subclass + """Define the process events for a pipeline. It should be overwritten + in the subclass""" return [] def createTmpDir(self): + """Create a temporary directory for a pipeline job""" self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) # clear it out if it already exists if(os.path.exists(self.tmpdirpath)): self.removeTmpDir() os.mkdir(self.tmpdirpath) - # create a data dir + # create a directory for input data self.dataDir = os.path.join(self.tmpdirpath, 'data') os.mkdir(self.dataDir) - # create an out dir + # create a directory for output results self.outDir = os.path.join(self.tmpdirpath, 'out') os.mkdir(self.outDir) - + def downloadInput(self): + """Report download input event and then call the actual download function""" self.reportStatus(self.event_downloadinput) self.downloadInputImpl() - + def downloadInputImpl(self): + """Download one input file. It should be overwritten by subclasses if + they need more input data""" self.tempfiles = {} self.tempfiles['inputfile'] = self.downloadItem(self.itemId) print self.tempfiles def downloadItem(self, itemId): + """Download a single item from the Midas server using pydas""" (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) pydas.api._download_item(itemId, self.dataDir) - # unzip any zipped files + # Unzip any zipped files for filename in os.listdir(self.dataDir): if filename.endswith('.zip'): filepath = os.path.join(self.dataDir, filename) zip = zipfile.ZipFile(filepath) zip.extractall(self.dataDir) zip.close() - # return the path to the name of the item + # Return the path to the name of the item item = pydas.session.communicator.item_get(pydas.session.token, itemId) return item['name'] - + def executeDownload(self): + """Register a pipeline's events, start the pipeline as a Midas job, + create the temporary directory for this job, and then download + the input file(s)""" try: self.register_events() - # send pydas pipeline started + # Send pydas pipeline started event self.reportMidasStatus(self.midasstatus_started) self.reportStatus(self.event_pipelinestart) self.createTmpDir() self.downloadInput() print self.tempfiles return (self.dataDir, self.outDir, self.tempfiles) - except Exception as exception: + except StandardError as exception: # TODO where to do exceptions status and conditions self.log.exception(exception) print self.event_exception @@ -208,36 +231,43 @@ def executeDownload(self): emsg = repr(traceback.format_exception(etype, value, tb)) self.reportMidasStatus(self.midasstatus_exception, emsg) exit(1) - + def setTmpDir(self): + """Set temporary directory""" self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) self.dataDir = os.path.join(self.tmpdirpath, 'data') self.outDir = os.path.join(self.tmpdirpath, 'out') def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, itemDescription=None): - # read everything in the srcDir and upload it as a single item - # create a new item - # need a folder id + """Read everything in the srcDir and upload it as a single item. If + outFile is set, only upload that file""" (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) + # Create a new item (fold_id is required) if itemDescription is not None: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId, description=itemDescription) + item = pydas.session.communicator.create_item(pydas.session.token, + itemName, outputFolderId, description=itemDescription) else: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId) + item = pydas.session.communicator.create_item(pydas.session.token, + itemName, outputFolderId) itemId = item['item_id'] if srcDir is None: srcDir = self.outDir if outFile is not None: - # only upload this one file - uploadToken = pydas.session.communicator.generate_upload_token(pydas.session.token, itemId, outFile) + # Only upload this one file + uploadToken = pydas.session.communicator.generate_upload_token( + pydas.session.token, itemId, outFile) filepath = os.path.join(srcDir, outFile) - pydas.session.communicator.perform_upload(uploadToken, outFile, itemid=itemId, filepath=filepath) + pydas.session.communicator.perform_upload(uploadToken, outFile, + itemid=itemId, filepath=filepath) else: for filename in os.listdir(srcDir): - uploadToken = pydas.session.communicator.generate_upload_token(pydas.session.token, itemId, filename) + uploadToken = pydas.session.communicator.generate_upload_token( + pydas.session.token, itemId, filename) filepath = os.path.join(srcDir, filename) - pydas.session.communicator.perform_upload(uploadToken, filename, itemid=itemId, filepath=filepath) - # set the output item as an output for the job + pydas.session.communicator.perform_upload(uploadToken, filename, + itemid=itemId, filepath=filepath) + # Set the output item as an output for the job method = 'midas.pyslicer.add.job.output.item' parameters = {} parameters['token'] = pydas.session.token @@ -246,24 +276,30 @@ def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, itemDe print parameters pydas.session.communicator.request(method, parameters) return itemId - + def uploadOutput(self): + """Report upload output event and then call the actual upload function""" self.reportStatus(self.event_uploadoutput) self.uploadOutputImpl() - + def uploadOutputImpl(self): + """Upload output file(s) of the pipeline job. It should be overwritten + by subclasses""" pass def executeUpload(self): + """Get registered events for this pipeline, get the temporary directory + for this job, upload the output file(s), delete the temporary directory, + and then end the pipeline""" try: self.get_events() self.setTmpDir() self.uploadOutput() self.removeTmpDir() self.reportStatus(self.event_pipelineend) - # send pydas pipeline finished + # Send pydas pipeline finished event self.reportMidasStatus(self.midasstatus_done) - except Exception as exception: + except StandardError as exception: # TODO where to do exceptions status and conditions self.log.exception(exception) print self.event_exception @@ -272,15 +308,17 @@ def executeUpload(self): emsg = repr(traceback.format_exception(etype, value, tb)) self.reportMidasStatus(self.midasstatus_exception, emsg) exit(1) - + def removeTmpDir(self): + """delete the temporary directory and its sub-directories and files""" shutil.rmtree(self.tmpdirpath) def reportProcessStatus(self, message=None): self.reportStatus(self.event_process, message) def reportStatus(self, eventType, message=None): - # find the event + """Report event status to the Midas server""" + # Find the event match = None for event in self.eventsMap.values(): if event.eventType == eventType and event.message == message: @@ -298,8 +336,10 @@ def reportStatus(self, eventType, message=None): parameters['jobstatus_id'] = match.jobstatusId parameters['notify_date'] = timestamp pydas.session.communicator.request(method, parameters) - + def reportMidasStatus(self, status, condition=None): + """Report the pipeline job status (started, done, exception) to the + Midas server""" # TODO add these methods to pydas # TODO add condition to api call (email, apiKey, url) = self.pydasParams @@ -331,31 +371,40 @@ def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): if 'outputitemname' in jsonArgs: self.outputItemName = jsonArgs['outputitemname'][0] self.outputFolderId = jsonArgs['outputfolderid'][0] - # + def define_process_events(self): - process_events = [self.loaded_input_volume, self.started_segmentation, self.finished_segmentation, self.started_modelmaker, self.finished_modelmaker, self.wrote_model_output] + """Define the process events for simple region growing segmentation.""" + process_events = [self.loaded_input_volume, self.started_segmentation, + self.finished_segmentation, self.started_modelmaker, + self.finished_modelmaker, self.wrote_model_output] process_events = [self.create_process_event(eventType) for eventType in process_events] print process_events return process_events def uploadOutputImpl(self): - #print "segmodeluploadoutputimpl" + """Upload output item (surface model) to the Midas server.""" self.outFile = self.outputItemName + '.vtp' itemId = self.uploadItem(self.outFile, self.outputFolderId) (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) # TODO move metadata to superclass - # set metadata on the output item + # Set metadata on the output item method = 'midas.item.setmultiplemetadata' parameters = {} parameters['token'] = pydas.session.token parameters['itemid'] = itemId parameters['count'] = 2 + # Use red to display the input seed and the contour of the output + # surface model in ParaView parameters['element_1'] = 'ParaView' - parameters['element_2'] = 'ParaView' parameters['qualifier_1'] = 'DiffuseColor' - parameters['qualifier_2'] = 'Orientation' parameters['value_1'] = '[1.0,0.0,0.0]' + # In ParaView, use [180, 180, 0] orientation to display the surface model + # which is generated by Slicer's model maker + # TODO: make sure this hard-coded orientation on the ModelMaker output + # is always correct + parameters['element_2'] = 'ParaView' + parameters['qualifier_2'] = 'Orientation' parameters['value_2'] = '[180.0,180.0,0.0]' print parameters pydas.session.communicator.request(method, parameters) @@ -383,20 +432,21 @@ def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): self.outputTransformName = jsonArgs['output_transform_name'][0] def define_process_events(self): + """Define the process events for simple region growing registration.""" process_events = [self.loaded_input_volumes, self.finished_registration, self.wrote_transformed_volume, self.wrote_transform] process_events = [self.create_process_event(eventType) for eventType in process_events] print process_events return process_events - + def downloadInputImpl(self): - print "regmodeldownloadinputimpl" + """Download input items.""" self.tempfiles = {} self.tempfiles['fixed_volume_file'] = self.downloadItem(self.fixedItemId) self.tempfiles['moving_volume_file'] = self.downloadItem(self.movingItemId) print self.tempfiles def uploadOutputImpl(self): - #print "regmodeluploadoutputimpl" + """Upload output items to the Midas server.""" (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) diff --git a/library/reg_slicerjob.py b/library/reg_slicerjob.py index a284e87..5c1db69 100644 --- a/library/reg_slicerjob.py +++ b/library/reg_slicerjob.py @@ -1,21 +1,26 @@ -from __main__ import vtk, slicer import os import json + +import vtk, slicer + from slicerjob import SlicerJob import slicer_utils class SlicerReg(SlicerJob): - """This class implements a job executed in Slicer's Python environment - simple region growing registration""" + """This class implements a job executed in Slicer's Python environment: + simple region growing registration""" loaded_input_volumes = "Loaded Input Volumes" finished_registration = "Finished Registration" wrote_transformed_volume = "Wrote Transformed Volume" wrote_transform = "Wrote Transform" - - def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, - fixedVolumeFile, movingVolumeFile, fixedItemId, movingItemId, - fixedFiducialsList, movingFiducialsList, transformType, - outputFolderId, outputVolumeName, outputTransformName): - SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, fixedVolumeFile, movingVolumeFile, + fixedItemId, movingItemId, fixedFiducialsList, + movingFiducialsList, transformType, outputFolderId, + outputVolumeName, outputTransformName): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, + dataDir, outDir, proxyurl) self.fixedVolumeFile = fixedVolumeFile self.movingVolumeFile = movingVolumeFile self.fixedItemId = fixedItemId @@ -27,7 +32,7 @@ def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir self.outputVolumeName = outputVolumeName self.outputTransformName = outputTransformName - def parseSeedpointsList(self, seedpointsList): + def parseSeedpointsList(self, seedpointsList): print 'parseSeedpointsList' #print seedpointsList #print type(seedpointsList) @@ -41,6 +46,7 @@ def parseSeedpointsList(self, seedpointsList): #types = [type(seed) for seed in seedpointsList] #print types # TODO something better, email from jc + # Keep original comments as above seedpoints = [(-1 * x, -1 * y, z) for (x, y, z) in seedpointsList] return seedpoints @@ -92,15 +98,19 @@ def jobEndingNotification(self, args=None): print 'reg pipeline', sys.argv (script, jobId, tmpDirRoot, jsonArgs) = sys.argv argMap = json.loads(jsonArgs) - #print argMap + pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) - (fixedItemId, movingItemId, fixedFiducialsList, movingFiducialsList, - transformType, outputFolderId, outputVolumeName, outputTransformName) = \ - (argMap['fixed_volume_file'][0], argMap['moving_volume_file'][0], - argMap['fixed_item_id'][0], argMap['moving_item_id'][0], - json.loads(argMap['fixed_fiducials'][0]), json.loads(argMap['moving_fiducials'][0]), - argMap['transform_type'][0], argMap['output_folder_id'][0], - argMap['output_volume_name'][0], argMap['output_transform_name'][0]) + + fixedVolumeFile = argMap['fixed_volume_file'][0] + movingVolumeFile = argMap['moving_volume_file'][0] + fixedItemId = argMap['fixed_volume_file'][0] + movingItemId = argMap['moving_volume_file'][0] + fixedFiducialsList = json.loads(argMap['fixed_fiducials'][0]) + movingFiducialsList = json.loads(argMap['moving_fiducials'][0]) + transformType = argMap['transform_type'][0] + outputFolderId = argMap['output_folder_id'][0] + outputVolumeName = argMap['output_volume_name'][0] + outputTransformName = argMap['output_transform_name'][0] print fixed_item_id, moving_item_id rp = SlicerReg(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, diff --git a/library/seg_slicerjob.py b/library/seg_slicerjob.py index 751854e..f24149b 100644 --- a/library/seg_slicerjob.py +++ b/library/seg_slicerjob.py @@ -1,6 +1,7 @@ -from __main__ import vtk, slicer import os import json + +import vtk, slicer from slicerjob import SlicerJob class SlicerSeg(SlicerJob): @@ -19,15 +20,15 @@ def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir self.outputItemName = outputItemName self.outputFolderId = outputFolderId - def process(self): - print "segmodelprocessimpl" - # take first two coords and multiply by -1 + """Execute simple region growing segmentation """ + print "start simple region growing segmentation" + # Take first two coords and multiply by -1 # TODO something much more systematic dealing with coords (x, y, z) = [float(coord) for coord in self.seed.split(',')] seedPointCoords = (-1 * x, -1 * y, z) - - # create the path to the desired output file + + # Create the path to the desired output file outFile = self.outputItemName + '.vtp' outPath = os.path.join(self.outDir, outFile) print outPath @@ -35,7 +36,7 @@ def process(self): print inputPath (status, inputVolume) = slicer.util.loadVolume(inputPath, returnNode=True) self.report_status(self.event_process, self.loaded_input_volume) - + # Run simple region segmentation in Slicer outVolume = slicer.vtkMRMLScalarVolumeNode() slicer.mrmlScene.AddNode(outVolume) fiducialNode = slicer.vtkMRMLAnnotationFiducialNode() @@ -48,7 +49,7 @@ def process(self): cliNode = slicer.cli.run(slicer.modules.simpleregiongrowingsegmentation, None, params , wait_for_completion=True) self.report_status(self.event_process, self.finished_segmentation) - # make a model, only param is name of output file + # Call Slicer's model make to create a model, the only parameter is name of output file modelmaker = slicer.modules.modelmaker mhn = slicer.vtkMRMLModelHierarchyNode() slicer.mrmlScene.AddNode(mhn) @@ -57,7 +58,7 @@ def process(self): cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) self.report_status(self.event_process, self.finished_modelmaker) - # output the model + # Export the model to local disk # TODO change to method without memory leaks outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() modelStorage = outputModelNode.CreateDefaultStorageNode() @@ -67,8 +68,8 @@ def process(self): self.outFile = outFile self.report_status(self.event_process, self.wrote_model_output) - def jobEndingNotification(self, args=None): + """Send job ending notification to Twisted Server""" if args is not None: reqArgs = args.copy() else: @@ -78,6 +79,7 @@ def jobEndingNotification(self, args=None): SlicerJob.jobEndingNotification(self, reqArgs) def execute(self): + """Wrapper function to execute the entire slice job""" try: self.process() slicer.app.exit() @@ -97,10 +99,17 @@ def execute(self): (script, jobId, tmpDirRoot, jsonArgs) = sys.argv argMap = json.loads(jsonArgs) pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) - (pipelineName, dataDir, outDir, proxyurl, inputFile, coords, outputItemName, outputFolderId) \ - = (argMap['pipeline'][0], argMap['data_dir'][0], argMap['out_dir'][0], - argMap['proxyurl'][0], argMap['inputfile'][0], argMap['coords'][0], - argMap['outputitemname'][0], argMap['outputfolderid'][0]) + + pipelineName = argMap['pipeline'][0] + dataDir = argMap['data_dir'][0] + outDir = argMap['out_dir'][0] + proxyurl = argMap['proxyurl'][0] + inputFile = argMap['inputfile'][0] + coords = argMap['coords'][0] + outputItemName = argMap['outputitemname'][0] + outputFolderId = argMap['outputfolderid'][0] + sp = SlicerSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, - outDir, proxyurl, inputFile, coords, outputItemName, outputFolderId) + outDir, proxyurl, inputFile, coords, outputItemName, + outputFolderId) sp.execute() diff --git a/library/slicerjob.py b/library/slicerjob.py index bdc0afd..5cb1577 100644 --- a/library/slicerjob.py +++ b/library/slicerjob.py @@ -17,18 +17,23 @@ def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir self.proxyurl = proxyurl def report_status(self, eventType, message): + """Send the pipeline status event information to the Twisted Server and + let it report the status event to the Midas server.""" args = {} args['pipeline'] = self.pipelineName args['job_id'] = self.jobId (args['email'], args['apikey'], args['url']) = self.pydasParams args['event_type'] = eventType args['message'] = message + # By default, Python requests module is not available in Slicer's python environment data = urllib.urlencode(args) request = urllib2.Request(self.proxyurl + "slicerjob/reportstatus?" + data) response = urllib2.urlopen(request) print response def jobEndingNotification(self, args=None): + """Send the pipeline ending information to the Twisted Server, and let + it report the status event and upload the output to the Midas server.""" if args is not None: reqArgs = args.copy() else: diff --git a/library/slicerprocess.py b/library/slicerprocess.py index 3f4cd9d..f3740d8 100755 --- a/library/slicerprocess.py +++ b/library/slicerprocess.py @@ -6,13 +6,15 @@ from pipeline import PipelineStatusEvent, PipelineFactory class SlicerProcessJobManager(): - """This class should implement Pyslicer's own process manager. Pyslicer sitll uses Slicer's process manager as of now.""" + """This class should implement Pyslicer's own process manager. Pyslicer still uses Slicer's process manager as of now.""" + # Keep the original code of this class as of now def __init__(self, tmpDirRoot, slicerPath, proxyurl): self.jobs = {} #self.processCount = 0 self.tmpDirRoot = tmpDirRoot self.slicerPath = slicerPath self.proxyurl = proxyurl + # def getNextJobId(self): # # could be problematic if multithreaded # jobId = self.processCount @@ -41,7 +43,8 @@ def getStatus(self, jobId=None): return status class SlicerProcess(protocol.ProcessProtocol): - """This class implements a twisted process which runs a python script within Slicer's Python environment.""" + """This class implements a twisted process which runs a python script within + Slicer's Python environment.""" def __init__(self, jobManager, jobId, pipelineName, requestArgs): self.jobManager = jobManager self.jobId = jobId @@ -94,18 +97,22 @@ def processEnded(self, reason): print str(self.jobId) + "processEnded, status %d" % (reason.value.exitCode,) def run(self): + """Trigger the slicer job by running a python script in Slicer's environment""" xvfbLogfile = os.path.join(self.jobManager.tmpDirRoot, 'xvfb.log') xvfbCmdParts = ['xvfb-run', '-a', '-e', xvfbLogfile] slicerArgs = ['--no-main-window', '--python-script'] + # Get name of the python script for the pipeline pipelinefactory = PipelineFactory() slicerPythonScript = pipelinefactory.getSlicerScript(self.pipelineName) + # Input parameters from the HTTP request jsonArgs = json.dumps(self.requestArgs) - print jsonArgs - print [jsonArgs] print [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs] - slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + [slicerPythonScript] + [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs]#['slicerjob'] + # Slicer job in command line + slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + \ + [slicerPythonScript] + [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs] cmd = xvfbCmdParts + slicerCmdParts print str(self.jobId) + " run: " + str(cmd) print ">>>>>>>>>>>>>>>SlicerProcess running:",str(cmd) + # TODO: Ensure that the proper xvfb-run is invoked reactor.spawnProcess(self, 'xvfb-run', cmd, env=os.environ, usePTY=True) diff --git a/library/twserver.py b/library/twserver.py index 646a0bb..e346a57 100644 --- a/library/twserver.py +++ b/library/twserver.py @@ -34,12 +34,13 @@ class SlicerjobReportStatus(Resource): isLeaf = True def _report_status(self, request): + """Callback function to report pipeline status event to the Midas server""" print request.args print "SlicerjobReportStatus" if 'pipeline' in request.args and 'job_id' in request.args: jobId = request.args['job_id'][0] pipelineName = request.args['pipeline'][0] - tmpDir = os.getcwd(); + tmpDir = os.getcwd() pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) pipelinefactory = PipelineFactory() pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) @@ -55,6 +56,7 @@ def _report_status(self, request): request.finish() def render_GET(self, request): + """Handle report job status request asynchronously """ reactor.callLater(0, self._report_status, request) return NOT_DONE_YET @@ -65,19 +67,21 @@ def __init__(self, jobManager): self.jobManager = jobManager def _download_process(self, request): + """Callback function to download input file(s) from the Midas server, + and then start the slicer job""" print request.args print "SlicerjobInit download" request.write('init job') if 'pipeline' in request.args and 'job_id' in request.args: - print "YES" jobId = request.args['job_id'][0] pipelineName = request.args['pipeline'][0] print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER starting SlicerProcess" - tmpDir = os.getcwd(); + tmpDir = os.getcwd() pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) - print pydasParams; + print pydasParams request.write("\nstarted job " + str(jobId)) request.write("\nstarted downloading item(s)") + # Call pipeline's executeDownload function to do the real download pipelinefactory = PipelineFactory() pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) (self.dataDir, self.outDir, self.inputfiles) = pipeline.executeDownload() @@ -85,9 +89,11 @@ def _download_process(self, request): request.args['proxyurl'] = [self.jobManager.proxyurl] request.args['data_dir'] = [self.dataDir] request.args['out_dir'] = [self.outDir] + # Add the input files into the parameters for k, v in self.inputfiles.items(): - request.args[k] = [v] # save value in a list as other reqeust parameters + request.args[k] = [v] request.write("\nstarted processing item(s) ") + # Create a new process for the Slicer job asynchronously slicerJob = SlicerProcess(self.jobManager, jobId, pipelineName, request.args) d = defer.Deferred() reactor.callLater(0, d.callback, None) @@ -97,6 +103,7 @@ def _download_process(self, request): request.finish() def render_GET(self, request): + """Handle job init request asynchronously""" reactor.callLater(0, self._download_process, request) return NOT_DONE_YET @@ -106,18 +113,19 @@ class SlicerjobFinish(Resource): isLeaf = True def _upload(self, request): + """Callback function to upload output file(s) to the Midas server, + and then start the slicer job""" print request.args print "SlicerjobFinish" request.write('Upload output') if 'pipeline' in request.args and 'job_id' in request.args: - print "YES" jobId = request.args['job_id'][0] pipelineName = request.args['pipeline'][0] print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER finishing SlicerProcess" - tmpDir = os.getcwd(); - print tmpDir + tmpDir = os.getcwd() pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) response = "\nstarted uploading output item" + # Call pipeline's executeUpload function to do the real upload pipelinefactory = PipelineFactory() pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) pipeline.executeUpload() @@ -128,6 +136,7 @@ def _upload(self, request): request.finish() def render_GET(self, request): + """Handle job ennding request asynchronously""" reactor.callLater(0, self._upload, request) return NOT_DONE_YET @@ -139,22 +148,20 @@ def render_GET(self, request): if __name__ == '__main__': # read the config file for slicer_path - configFile = open('twserver.cfg') config = {} - for line in configFile: - line = line.strip() - if line is not None and line != '': - cols = line.split('=') - config[cols[0]] = cols[1] - configFile.close() - import os + with open('twserver.cfg') as config_file: + for line in config_file.readlines(): + line = line.strip() + if line is not None and line != '': + cols = line.split('=') + config[cols[0]] = cols[1] if 'slicer_path' not in config or not os.path.isfile(config['slicer_path']): print "You must specify the path to the Slicer exe as slicer_path in twserver.cfg" exit(1) if 'proxy_url' not in config: print "You must specify the Slicer Proxy Server URL" exit(1) - # set current dir as temp working dir + # Set current directory as temporary working directory jobManager = SlicerProcessJobManager(os.getcwd(), config['slicer_path'], config['proxy_url']) root = ServerRoot() slicerjobRoot = Slicerjob() @@ -165,5 +172,6 @@ def render_GET(self, request): slicerjobRoot.putChild('init', slicerjobInit) slicerjobRoot.putChild('finish', slicerjobFinish) slicerjobRoot.putChild('reportstatus', slicerjobReportStatus) + # Start Twisted server reactor.listenTCP(8880, server.Site(root)) reactor.run() From f2240da44dc547d81a30997833eb173efce45395 Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Wed, 5 Jun 2013 13:16:28 -0400 Subject: [PATCH 3/9] Add TubeTK PDF segmentation pipeline. --- constant/module.php | 10 +- controllers/components/ApiComponent.php | 188 +++++++++++++++++-- controllers/components/PipelineComponent.php | 60 +++++- library/pdfseg_slicerjob.py | 148 +++++++++++++++ library/pipeline.py | 84 ++++++++- library/reg_slicerjob.py | 1 + library/seg_slicerjob.py | 2 +- public/js/lib/pvw.paint.js | 56 ++++++ public/js/process/process.statuslist.js | 17 +- views/index/index.phtml | 1 + views/process/statuslist.phtml | 1 + 11 files changed, 541 insertions(+), 27 deletions(-) create mode 100644 library/pdfseg_slicerjob.py create mode 100644 public/js/lib/pvw.paint.js diff --git a/constant/module.php b/constant/module.php index 151b4d8..4e73ef9 100644 --- a/constant/module.php +++ b/constant/module.php @@ -24,11 +24,19 @@ define('MIDAS_PYSLICER_SEGMENTATION_PIPELINE', 'segmentation'); define('MIDAS_PYSLICER_VOLUMERENDERING_PIPELINE', 'volumerendering'); define('MIDAS_PYSLICER_REGISTRATION_PIPELINE', 'registration'); - +define('MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE', 'pdfsegmentation'); + +define('MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM', 100); +define('MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP', 101); +define('MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL', 102); +define('MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP', 103); + define('MIDAS_PYSLICER_SEGMENTATION_INPUT_COUNT', 1); define('MIDAS_PYSLICER_SEGMENTATION_OUTPUT_COUNT', 1); define('MIDAS_PYSLICER_REGISTRATION_INPUT_COUNT', 2); define('MIDAS_PYSLICER_REGISTRATION_OUTPUT_COUNT', 2); +define('MIDAS_PYSLICER_PDFSEGMENTATION_INPUT_COUNT', 1); +define('MIDAS_PYSLICER_PDFSEGMENTATION_OUTPUT_COUNT', 2); define('MIDAS_PYSLICER_EXPECTED_INPUTS', 'expectedinputs'); define('MIDAS_PYSLICER_EXPECTED_OUTPUTS', 'expectedoutputs'); diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index fb95ffa..baab9c5 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -108,14 +108,7 @@ public function startItemProcessing($args) throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); } - $userEmail = $userDao->getEmail(); - // get an api key for this user - $userApiModel = MidasLoader::loadModel('Userapi'); - $userApiDao = $userApiModel->getByAppAndUser('Default', $userDao); - if(!$userApiDao) - { - throw new Zend_Exception('You need to create a web-api key for this user for application: Default'); - } + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); // TODO store remote processing job info $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); @@ -128,7 +121,7 @@ public function startItemProcessing($args) // TODO json encode params set $job->setParams(JsonComponent::encode($seed)); $jobModel->save($job); - $jobModel->addItemRelation($job, $itemDao, MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); if(isset($args['job_name'])) { @@ -148,9 +141,6 @@ public function startItemProcessing($args) // TODO switch to different pipeline types $jobInitPath = "/slicerjob/init/"; - $midasPath = Zend_Registry::get('webroot'); - $midasUrl = 'http://' . $_SERVER['HTTP_HOST'] . $midasPath; - $apiKey = $userApiDao->getApikey(); $parentFolderId = $parentFolder->getFolderId(); // TODO probably a security hole to put the email and api key in the url @@ -209,6 +199,165 @@ public function startItemProcessing($args) } } + /** + * Get Pydas required parameters from current session + * @return array(email => pydas_email, + * apikey => pydas_apikey, + * url => midas_url) + */ + public function getPydasParams() + { + $userDao = Zend_Registry::get('userSession')->Dao; + if(!$userDao) + { + throw new Exception('Anonymous users cannot use Pydas', MIDAS_PYSLICER_INVALID_POLICY); + } + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); + return array('email' => $userEmail, + 'apikey' => $apiKey, + 'url' => $midasUrl); + } + + /** + * Start TubeTK PDF segmenation on an item using the initial labelmap + * @param item_id The id of the item to be segmented + * @param labelmap_item_id The id of input label map item + * @param output_item_name The name of the created output surface model + * @param output_labelmap The name of the created output label map + * @param output_folder_id (optional) The id of the folder where the output + * items be created; if not supplied, the first parent folder found on the + * input item will be used as the output folder. + * @param job_name (optional) The name of the processing job, if not supplied, + * will be given a name like "Slicer Job X" where x is the job id. + * @return array(redirect => Url to show this job's status) + */ + public function startPdfsegmentation($args) + { + $this->_checkKeys(array('item_id', 'labelmap_item_id', 'output_item_name', + 'output_labelmap'), $args); + $userDao = $this->_getUser($args); + if(!$userDao) + { + throw new Exception('Anonymous users may not process items', MIDAS_PYSLICER_INVALID_POLICY); + } + + $itemModel = MidasLoader::loadModel('Item'); + $folderModel = MidasLoader::loadModel('Folder'); + + $itemId = $args['item_id']; + $labelmapItemId = $args['labelmap_item_id']; + $outputItemName = $args['output_item_name']; + $outputLabelmap = $args['output_labelmap']; + + // Check the input item + $itemDao = $itemModel->load($itemId); + if($itemDao === false) + { + throw new Zend_Exception('This item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($itemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on this item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + // Check the input labelmap item + $labelmapItemDao = $itemModel->load($labelmapItemId); + if($labelmapItemDao === false) + { + throw new Zend_Exception('This input labelmap item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($labelmapItemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on the input labelmap item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + // Check the output folder + if(isset($args['output_folder_id'])) + { + $outputFolderId = $args['output_folder_id']; + $parentFolder = $folderModel->load($outputFolderId); + } + else + { + $parentFolders = $itemDao->getFolders(); + $parentFolder = $parentFolders[0]; + } + if($parentFolder === false) + { + throw new Zend_Exception('This output folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$folderModel->policyCheck($parentFolder, $userDao, MIDAS_POLICY_WRITE)) + { + throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + // Get pydas required parameters + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); + + // Create a job and add input items + $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); + $job = MidasLoader::newDao('JobDao', 'remoteprocessing'); + $job->setCreatorId($userDao->getUserId()); + $job->setStatus(MIDAS_REMOTEPROCESSING_STATUS_WAIT); + $segmentationPipeline = 'pdfsegmentation'; + $job->setScript($segmentationPipeline); + $jobModel->save($job); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); + $jobModel->addItemRelation($job, $labelmapItemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP); + + if(isset($args['job_name'])) + { + $jobName = $args['job_name']; + } + else + { + $jobName = 'Slicer Job ' . $job->getKey(); + } + $job->setName($jobName); + $jobModel->save($job); + + $settingModel = MidasLoader::loadModel('Setting'); + $twistedServerUrl = $settingModel->getValueByName('slicerProxyUrl', 'pyslicer'); + + $jobInitPath = "/slicerjob/init/"; + $parentFolderId = $parentFolder->getFolderId(); + + // Construct URL to be sent to Twisted server + $slicerjobParams = array('pipeline' => $segmentationPipeline, + 'url' => $midasUrl, + 'email' => $userEmail, + 'apikey' => $apiKey, + 'inputitemid' => $itemId, + 'inputlabelmapid' => $labelmapItemId, + 'outputfolderid' => $parentFolderId, + 'outputitemname' => $outputItemName, + 'outputlabelmap' => $outputLabelmap, + 'job_id' => $job->getKey()); + $requestParams = ""; + $ind = 0; + foreach ($slicerjobParams as $name => $value) + { + if ($ind > 0) + { + $requestParams .= "&"; + } + $requestParams .= $name . '=' . $value; + $ind++; + } + + $url = $twistedServerUrl . $jobInitPath . '?' . $requestParams; + // Send request to Twisted Server + $data = file_get_contents($url); + if($data === false) + { + throw new Zend_Exception("Cannot connect with Slicer Server."); + } + else + { + $redirectURL = $midasUrl . '/pyslicer/process/status?jobId='.$job->getJobId(); + return array('redirect' => $redirectURL); + } + } + protected function _loadValidItem($userDao, $itemId, $paramName) { @@ -254,7 +403,7 @@ protected function _getConnectionParams($userDao) $midasPath = Zend_Registry::get('webroot'); $midasUrl = 'http://' . $_SERVER['HTTP_HOST'] . $midasPath; - $userApiModel = MidasLoader::loadModel('Userapi', 'api'); + $userApiModel = MidasLoader::loadModel('Userapi'); $userApiDao = $userApiModel->getByAppAndUser('Default', $userDao); if(!$userApiDao) { @@ -279,7 +428,7 @@ protected function _createJob($userDao, $script, $params, $inputItems, $synthesi $jobModel->save($job); foreach($inputItems as $itemDao) { - $jobModel->addItemRelation($job, $itemDao, MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); } // now that a job id is defined... @@ -645,6 +794,8 @@ public function getJobstatus($args) * add an output item to a job * @param job_id the id of the job to update * @param item_id the id of the item to set as an output of the job + * @param type(Optional) output item types + * (currently only support two types: 'surface_model' (default), 'labelmap') * @return array('success' => 'true') if successful. */ public function addJobOutputItem($args) @@ -676,7 +827,14 @@ public function addJobOutputItem($args) throw new Zend_Exception('This item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); } - $jobModel->addItemRelation($job, $item, MIDAS_REMOTEPROCESSING_RELATION_TYPE_OUPUT); + if(array_key_exists('type', $args) && $args['type'] == 'labelmap') + { + $jobModel->addItemRelation($job, $item, MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP); + } + else + { + $jobModel->addItemRelation($job, $item, MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL); + } return array('success' => 'true'); } diff --git a/controllers/components/PipelineComponent.php b/controllers/components/PipelineComponent.php index b8cb770..16d397c 100644 --- a/controllers/components/PipelineComponent.php +++ b/controllers/components/PipelineComponent.php @@ -45,7 +45,12 @@ class Pyslicer_PipelineComponent extends AppComponent MIDAS_PYSLICER_EXPECTED_INPUTS => MIDAS_PYSLICER_REGISTRATION_INPUT_COUNT, MIDAS_PYSLICER_EXPECTED_OUTPUTS => MIDAS_PYSLICER_REGISTRATION_OUTPUT_COUNT, MIDAS_PYSLICER_INPUT_GENERATOR => 'registrationInputLinks', - MIDAS_PYSLICER_OUTPUT_GENERATOR => 'registrationOutputLinks')); + MIDAS_PYSLICER_OUTPUT_GENERATOR => 'registrationOutputLinks'), + MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE => array( + MIDAS_PYSLICER_EXPECTED_INPUTS => MIDAS_PYSLICER_PDFSEGMENTATION_INPUT_COUNT, + MIDAS_PYSLICER_EXPECTED_OUTPUTS => MIDAS_PYSLICER_PDFSEGMENTATION_OUTPUT_COUNT, + MIDAS_PYSLICER_INPUT_GENERATOR => 'pdfsegmentationInputLinks', + MIDAS_PYSLICER_OUTPUT_GENERATOR => 'pdfsegmentationOutputLinks'),); protected $missingInputs = array( array ('text' => 'Error: missing input', 'url' => '')); protected $missingOutputs = array( array ('text' => 'Error: missing output', 'url' => '')); @@ -58,7 +63,7 @@ function init() function segmentationInputLinks($job, $inputs, $outputs, $midasPath) { - $inputItemId = $inputs[0]->getItemId(); + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId; $sliceView = $midasPath . '/pvw/paraview/slice?itemId='.$inputItemId; @@ -120,6 +125,46 @@ function registrationOutputLinks($job, $inputs, $outputs, $midasPath) return array( array ('text' => $outputLinkText, 'url' => $outputLink)); } + function pdfsegmentationInputLinks($job, $inputs, $outputs, $midasPath) + { + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); + // Initial label map + $inputLabelmapItemId = + $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP]->getItemId(); + $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId; + $labelmapSliceView = $midasPath . '/pvw/paraview/slice?itemId=' . + $inputItemId . '&labelmaps=' . $inputLabelmapItemId; + + return array( array ('text' => 'slice view (with initial labelmap)', 'url' => $labelmapSliceView), + array ('text' => 'volume view', 'url' => $volumeView)); + } + + function pdfsegmentationOutputLinks($job, $inputs, $outputs, $midasPath) + { + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); + // Surface model of output label map + $outputModelItemId = + $outputs[MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL]->getItemId(); + // Output label map + $outputLabelmapItemId = + $outputs[MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP]->getItemId(); + + $meshView = $midasPath . '/pvw/paraview/surface?itemId=' . $outputModelItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId=' . $inputItemId . + '&meshes=' . $outputModelItemId . '&jsImports=' . + $midasPath . '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId=' . $inputItemId . + '&meshes=' . $outputModelItemId . '&jsImports=' . + $midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $labelmapSliceView = $midasPath . '/pvw/paraview/slice?itemId=' . + $inputItemId . '&labelmaps=' . $outputLabelmapItemId; + + return array( array ('text' => 'surface model mesh view', 'url' => $meshView), + array ('text' => 'surface model contour slice view', 'url' => $sliceView), + array ('text' => 'surface model volume view', 'url' => $volumeView), + array ('text' => 'label map slice view', 'url' => $labelmapSliceView)); + } + public function resolveInputsAndOutputs($job) { $midasPath = Zend_Registry::get('webroot'); @@ -129,13 +174,16 @@ public function resolveInputsAndOutputs($job) $relatedItems = $jobModel->getRelatedItems($job); foreach($relatedItems as $item) { - if($item->getType() == MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT) + $itemType = $item->getType(); + if($itemType == MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM || + $itemType == MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP) { - $inputs[] = $item; + $inputs[$itemType] = $item; } - elseif($item->getType() == MIDAS_REMOTEPROCESSING_RELATION_TYPE_OUPUT) + elseif($itemType == MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP || + $itemType == MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL) { - $outputs[] = $item; + $outputs[$itemType] = $item; } } diff --git a/library/pdfseg_slicerjob.py b/library/pdfseg_slicerjob.py new file mode 100644 index 0000000..39bc886 --- /dev/null +++ b/library/pdfseg_slicerjob.py @@ -0,0 +1,148 @@ +import os +import json + +import vtk, slicer + +from slicerjob import SlicerJob + +class SlicerPdfSeg(SlicerJob): + """This class implements a job executed in Slicer's Python environment - + TubeTK PDF segmentation""" + loaded_input_volume = "Loaded Input Volume" + loaded_label_map = "Loaded Input Label Map" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + wrote_segmentation_output = "Wrote Segmentation Output" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, inputLabelMap, outputItemName, + outputLabelMap, outputFolderId): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, + dataDir, outDir, proxyurl) + self.inputFile = inputFile + self.inputLabelMap = inputLabelMap + self.outputItemName = outputItemName + self.outputLabelMap = outputLabelMap + self.outputFolderId = outputFolderId + + def process(self): + """Execute TubeTK PDF segmentation """ + # Create the path to the desired output surface model file + outFile = self.outputItemName + '.vtp' + outPath = os.path.join(self.outDir, outFile) + print outPath + # Create the path to the desired output labelmap file + outLabelMap = self.outputLabelMap + '.mha' + outLabelMapPath = os.path.join(self.outDir, outLabelMap) + print outLabelMapPath + # Load input item and the initial labelmap + inputPath = os.path.join(self.dataDir, self.inputFile) + print inputPath + (input_status, inputVolume) = slicer.util.loadVolume(inputPath, returnNode=True) + self.report_status(self.event_process, self.loaded_input_volume) + inputLabelMapPath = os.path.join(self.dataDir, self.inputLabelMap) + print inputLabelMapPath + (labelmap_status, labelMapVolume) = slicer.util.loadVolume(inputLabelMapPath, returnNode=True) + self.report_status(self.event_process, self.loaded_label_map) + # Set parameters for PDF segmentation + outVolume = slicer.vtkMRMLScalarVolumeNode() + slicer.mrmlScene.AddNode(outVolume) + # use 0 as voidId + voidId = 0 + params = {'inputVolume1': inputVolume.GetID(), 'labelmap': labelMapVolume.GetID(), 'outputVolume': outVolume.GetID(), 'voidId': voidId} + # Get obejctId from the intial label map + accum = vtk.vtkImageAccumulate() + accum.SetInput(labelMapVolume.GetImageData()) + accum.UpdateWholeExtent() + data = accum.GetOutput() + data.Update() + numBins = accum.GetComponentExtent()[1] + labels = [] + for i in range(0, numBins + 1): + numVoxels = data.GetScalarComponentAsDouble(i, 0, 0, 0) + if (numVoxels != 0): + labels.append(i) + if voidId in labels: + labels.remove(voidId) + params["objectId"] = labels + print labels + + self.report_status(self.event_process, self.started_segmentation) + # Run PDF segmentation in Slicer + cliNode = slicer.cli.run(slicer.modules.segmentconnectedcomponentsusingparzenpdfs, None, params, wait_for_completion=True) + self.report_status(self.event_process, self.finished_segmentation) + + # Export output label map to local disk + save_node_params = {'fileType': 'mha'} + slicer.util.saveNode(outVolume, outLabelMapPath, save_node_params) + self.report_status(self.event_process, self.wrote_segmentation_output) + + # Call model maker to create a surface model for the output labelmap + modelmaker = slicer.modules.modelmaker + mhn = slicer.vtkMRMLModelHierarchyNode() + slicer.mrmlScene.AddNode(mhn) + parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} + self.report_status(self.event_process, self.started_modelmaker) + cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) + self.report_status(self.event_process, self.finished_modelmaker) + + # Export output surface model to local disk + # TODO change to method without memory leaks + outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() + modelStorage = outputModelNode.CreateDefaultStorageNode() + slicer.mrmlScene.AddNode(modelStorage) + modelStorage.SetFileName(outPath) + modelStorage.WriteData(outputModelNode) + self.outFile = outFile + self.report_status(self.event_process, self.wrote_model_output) + + def jobEndingNotification(self, args=None): + """Send job ending notification to Twisted Server""" + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['outputitemname'] = self.outputItemName + reqArgs['outputlabelmap'] = self.outputLabelMap + reqArgs['outputfolderid'] = self.outputFolderId + SlicerJob.jobEndingNotification(self, reqArgs) + + def execute(self): + """Wrapper function to execute the entire slice job""" + try: + self.process() + slicer.app.exit() + self.jobEndingNotification() + except StandardError as exception: + # TODO where to do exceptions status and conditions + # self.log.exception(exception) + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + print emsg + self.report_status(self.event_exception, emsg) + exit(1) + + +if __name__ == '__main__': + (script, jobId, tmpDirRoot, json_args) = sys.argv + arg_map = json.loads(json_args) + + pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) + pipelineName = arg_map['pipeline'][0] + dataDir = arg_map['data_dir'][0] + outDir = arg_map['out_dir'][0] + proxyurl = arg_map['proxyurl'][0] + inputFile = arg_map['inputfile'][0] + inputLabelMap = arg_map['inputlabelmap'][0] + outputItemName = arg_map['outputitemname'][0] + outputLabelMap = arg_map['outputlabelmap'][0] + outputFolderId = arg_map['outputfolderid'][0] + + sp = SlicerPdfSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, inputLabelMap, + outputItemName, outputLabelMap, outputFolderId) + sp.execute() diff --git a/library/pipeline.py b/library/pipeline.py index 33258db..b6fdf70 100755 --- a/library/pipeline.py +++ b/library/pipeline.py @@ -63,6 +63,9 @@ def getPipeline(self, pipelineName, jobId, pydasParams, tmpDirRoot, args): return SegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) elif pipelineName == 'registration': return RegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + elif pipelineName == 'pdfsegmentation': + return PdfSegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + else: return None @@ -73,6 +76,8 @@ def getSlicerScript(self, pipelineName): return 'seg_slicerjob.py' elif pipelineName == 'registration': return 'reg_slicerjob.py' + elif pipelineName == 'pdfsegmentation': + return 'pdfseg_slicerjob.py' else: return None @@ -238,7 +243,8 @@ def setTmpDir(self): self.dataDir = os.path.join(self.tmpdirpath, 'data') self.outDir = os.path.join(self.tmpdirpath, 'out') - def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, itemDescription=None): + def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, + itemDescription=None, type=None): """Read everything in the srcDir and upload it as a single item. If outFile is set, only upload that file""" (email, apiKey, url) = self.pydasParams @@ -273,6 +279,8 @@ def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, itemDe parameters['token'] = pydas.session.token parameters['job_id'] = self.jobId parameters['item_id'] = itemId + if type is not None: + parameters['type'] = type print parameters pydas.session.communicator.request(method, parameters) return itemId @@ -454,3 +462,77 @@ def uploadOutputImpl(self): itemId = self.uploadItem(self.outputVolumeName, folderId, self.transformed_volume, itemDescription='output volume') itemId = self.uploadItem(self.outputTransformName, folderId, self.transform, itemDescription='output transform') + +class PdfSegPipeline(Pipeline): + """This class implements a pipeline for TubeTK PDF segmentation.""" + loaded_input_volume = "Loaded Input Volume" + loaded_label_map = "Loaded Input Label Map" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + wrote_segmentation_output = "Wrote Segmentation Output" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, json_args): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print json_args + if 'inputitemid' in json_args: + self.itemId = json_args['inputitemid'][0] + self.labelMapItemId = json_args['inputlabelmapid'][0] + if 'outputitemname' in json_args: + self.outputItemName = json_args['outputitemname'][0] + self.outputLabelMap = json_args['outputlabelmap'][0] + self.outputFolderId = json_args['outputfolderid'][0] + + def define_process_events(self): + """Define the process events for TubeTK PDF segmentation.""" + process_events = [self.loaded_input_volume, self.loaded_label_map, + self.started_segmentation, self.finished_segmentation, + self.wrote_segmentation_output, self.started_modelmaker, + self.finished_modelmaker, self.wrote_model_output] + process_events = [self.create_process_event(event_type) for event_type in process_events] + print process_events + return process_events + + def downloadInputImpl(self): + """Download input item and initial label map from the Midas server.""" + self.tempfiles = {} + self.tempfiles['inputfile'] = self.downloadItem(self.itemId) + self.tempfiles['inputlabelmap'] = self.downloadItem(self.labelMapItemId) + print self.tempfiles + + def uploadOutputImpl(self): + """Upload output labelmap and its surface model to the Midas server.""" + # Upload output labelmap file (its type is 'labelmap') + self.outLabelMapFile = self.outputLabelMap + '.mha' + os.mkdir(os.path.join(self.outDir, "labelmap")) + labelMapTmpDir = os.path.join(self.outDir, "labelmap") + shutil.copy(os.path.join(self.outDir, self.outLabelMapFile), labelMapTmpDir) + labelmapItemId = self.uploadItem(self.outLabelMapFile, self.outputFolderId, srcDir=labelMapTmpDir, type='labelmap') + # Upload outpuf surface model file (default type) + self.outFile = self.outputItemName + '.vtp' + os.mkdir(os.path.join(self.outDir, "outfile")) + outFileTmpDir = os.path.join(self.outDir, "outfile") + shutil.copy(os.path.join(self.outDir, self.outFile), outFileTmpDir) + itemId = self.uploadItem(self.outFile, self.outputFolderId, srcDir=outFileTmpDir) + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + # Set metadata on the output item + method = 'midas.item.setmultiplemetadata' + parameters = {} + parameters['token'] = pydas.session.token + parameters['itemid'] = itemId + parameters['count'] = 2 + # Use red to display the input seed and the contour of the output + # surface model in ParaView + parameters['element_1'] = 'ParaView' + parameters['qualifier_1'] = 'DiffuseColor' + parameters['value_1'] = '[1.0,0.0,0.0]' + # In ParaView, use [180, 180, 0] orientation to display the surface model + # which is generated by Slicer's model maker + parameters['element_2'] = 'ParaView' + parameters['qualifier_2'] = 'Orientation' + parameters['value_2'] = '[180.0,180.0,0.0]' + print parameters + pydas.session.communicator.request(method, parameters) diff --git a/library/reg_slicerjob.py b/library/reg_slicerjob.py index 5c1db69..06cbbc8 100644 --- a/library/reg_slicerjob.py +++ b/library/reg_slicerjob.py @@ -9,6 +9,7 @@ class SlicerReg(SlicerJob): """This class implements a job executed in Slicer's Python environment: simple region growing registration""" + # TODO run this pipeline using pvw module instead of visualize module loaded_input_volumes = "Loaded Input Volumes" finished_registration = "Finished Registration" wrote_transformed_volume = "Wrote Transformed Volume" diff --git a/library/seg_slicerjob.py b/library/seg_slicerjob.py index f24149b..3211a97 100644 --- a/library/seg_slicerjob.py +++ b/library/seg_slicerjob.py @@ -84,7 +84,7 @@ def execute(self): self.process() slicer.app.exit() self.jobEndingNotification() - except Exception as exception: + except StandardError as exception: # TODO where to do exceptions status and conditions #self.log.exception(exception) import traceback diff --git a/public/js/lib/pvw.paint.js b/public/js/lib/pvw.paint.js new file mode 100644 index 0000000..b0a517d --- /dev/null +++ b/public/js/lib/pvw.paint.js @@ -0,0 +1,56 @@ +var midas = midas || {}; +midas.pvw = midas.pvw || {}; + +/** + * Activate paint mode as soon as we finished initialization + */ +midas.pvw.postInitCallback = function () { + $('button.paintButton').click(); +}; + +/** + * Callback handler to trigger pdf segmentation pipeline + */ +midas.pvw.handlePDFSegmentation = function (labelmapItemId) { + $('div.MainDialog').dialog('close'); + // Get output item name + var html = '


'; + html += ''; + html += '
'; + html += ''; + html += ''; + html += '
'; + midas.showDialogWithContent('Choose name for output item', html, false); + $('#processItemSlicerOutputName').focus(); + $('#processItemSlicerOutputName').select(); + + $('button.processItemSlicerYes').unbind('click').click(function () { + var outputItemName = $('#processItemSlicerOutputName').val(); + var outputLabelmap = $('#processItemSlicerOutputName').val() + '-label'; + + $('#processingPleaseWait').show(); + ajaxWebApi.ajax({ + method: 'midas.pyslicer.start.pdfsegmentation', + args: 'item_id=' + json.pvw.item.item_id + '&labelmap_item_id=' + labelmapItemId + '&output_item_name=' + outputItemName + '&output_labelmap=' + outputLabelmap, + success: function (results) { + $('div.MainDialog').dialog('close'); + $('#processingPleaseWait').hide(); + if (results.data.redirect) { + // Open job status url in another window so that users can continue paiting + window.open(results.data.redirect); + } + }, + error: function (XMLHttpRequest, textStatus, errorThrown) { + midas.createNotice(XMLHttpRequest.message, '4000', 'error'); + $('#processingPleaseWait').hide(); + } + }); + + }); + + $('button.processItemSlicerNo').unbind('click').click(function () { + $('div.MainDialog').dialog('close'); + }); +}; diff --git a/public/js/process/process.statuslist.js b/public/js/process/process.statuslist.js index 0eca0a1..98c1453 100644 --- a/public/js/process/process.statuslist.js +++ b/public/js/process/process.statuslist.js @@ -8,7 +8,7 @@ $(document).ready(function(){ $('a.paramsLink').click(function () { midas.showDialogWithContent('Job Parameters', $(this).attr('qtip'), false); }); - + $('a#segmentation').click(function() { midas.loadDialog("selectitem_volumeRendering","/browse/selectitem"); midas.showDialog('Browse for the Image to be Segmented'); @@ -19,8 +19,8 @@ $(document).ready(function(){ window.location = redirectUrl; }; }); - - + + $('a#registration').click(function() { midas.loadDialog("selectitem_registration_fixed","/browse/selectitem"); midas.showDialog('Browse for the Registration Fixed Image'); @@ -37,6 +37,17 @@ $(document).ready(function(){ }; }); + $('a#pdfsegmentation').click(function() { + midas.loadDialog("selectitem_volumeRendering","/browse/selectitem"); + midas.showDialog('Browse for the Image to be Segmented'); + + midas.pyslicer.statuslist.itemSelectionCallback = function(name, id) { + var redirectUrl = $('.webroot').val() + '/pvw/paraview/slice?itemId=' + id; + redirectUrl += '&operations=paint&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.paint.js'; + window.location = redirectUrl; + }; + }); + }); itemSelectionCallback = function(name, id) { diff --git a/views/index/index.phtml b/views/index/index.phtml index 5ce014e..49cd169 100644 --- a/views/index/index.phtml +++ b/views/index/index.phtml @@ -18,6 +18,7 @@ $this->headScript()->appendFile($this->moduleWebroot . '/public/js/index/pyslice echo '
  • Volume Rendering
  • '; echo '
  • Simple Region Growing Segmentation
  • '; echo '
  • Registration
  • '; + echo '
  • Parzen PDFs Segmentation (TubeTK)
  • '; echo ""; ?> diff --git a/views/process/statuslist.phtml b/views/process/statuslist.phtml index d33760e..5e59dc6 100644 --- a/views/process/statuslist.phtml +++ b/views/process/statuslist.phtml @@ -27,6 +27,7 @@ $this->headScript()->appendFile($this->moduleWebroot . '/public/js/process/proce From a4adf7328754754bbe99722d9e48a331f91320dc Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Thu, 6 Jun 2013 02:11:04 -0400 Subject: [PATCH 4/9] Support more colors for painting --- public/js/process/process.statuslist.js | 2 +- public/js/simplecolorpicker.js | 348 ++++++++++++++++++++++++ 2 files changed, 349 insertions(+), 1 deletion(-) create mode 100644 public/js/simplecolorpicker.js diff --git a/public/js/process/process.statuslist.js b/public/js/process/process.statuslist.js index 98c1453..03d53e4 100644 --- a/public/js/process/process.statuslist.js +++ b/public/js/process/process.statuslist.js @@ -43,7 +43,7 @@ $(document).ready(function(){ midas.pyslicer.statuslist.itemSelectionCallback = function(name, id) { var redirectUrl = $('.webroot').val() + '/pvw/paraview/slice?itemId=' + id; - redirectUrl += '&operations=paint&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.paint.js'; + redirectUrl += '&operations=paint&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.paint.js;' + $('.webroot').val() + '/modules/pyslicer/public/js/simplecolorpicker.js'; window.location = redirectUrl; }; }); diff --git a/public/js/simplecolorpicker.js b/public/js/simplecolorpicker.js new file mode 100644 index 0000000..5a88810 --- /dev/null +++ b/public/js/simplecolorpicker.js @@ -0,0 +1,348 @@ +/** + * Really Simple Color Picker in jQuery + * + * Licensed under the MIT (MIT-LICENSE.txt) licenses. + * + * Copyright (c) 2008-2012 + * Lakshan Perera (www.laktek.com) & Daniel Lacy (daniellacy.com) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +(function ($) { + /** + * Create a couple private variables. + **/ + var selectorOwner, + activePalette, + cItterate = 0, + templates = { + control : $(''), + palette : $('
    '), + swatch : $('
     
    '), + hexLabel: $(''), + hexField: $('') + }, + transparent = "transparent", + lastColor; + + /** + * Create our simpleColorPicker function + **/ + $.fn.simpleColorPicker = function (options) { + + return this.each(function () { + // Setup time. Clone new elements from our templates, set some IDs, make shortcuts, jazzercise. + var element = $(this), + opts = $.extend({}, $.fn.simpleColorPicker.defaults, options), + defaultColor = $.fn.simpleColorPicker.toHex( + (element.val().length > 0) ? element.val() : opts.pickerDefault + ), + newControl = templates.control.clone(), + newPalette = templates.palette.clone().attr('id', 'simpleColorPicker_palette-' + cItterate), + newHexLabel = templates.hexLabel.clone(), + newHexField = templates.hexField.clone(), + paletteId = newPalette[0].id, + swatch, controlText; + + + /** + * Build a color palette. + **/ + $.each(opts.colors, function (i) { + swatch = templates.swatch.clone(); + + if (opts.colors[i] === transparent) { + swatch.addClass(transparent).text('X'); + $.fn.simpleColorPicker.bindPalette(newHexField, swatch, transparent); + } else { + swatch.css("background-color", "#" + this); + $.fn.simpleColorPicker.bindPalette(newHexField, swatch); + } + swatch.appendTo(newPalette); + }); + + + newHexLabel.attr('for', 'simpleColorPicker_hex-' + cItterate); + + newHexField.attr({ + 'id' : 'simpleColorPicker_hex-' + cItterate, + 'value' : defaultColor + }); + + newHexField.bind("keydown", function (event) { + if (event.keyCode === 13) { + var hexColor = $.fn.simpleColorPicker.toHex($(this).val()); + $.fn.simpleColorPicker.changeColor(hexColor ? hexColor : element.val()); + } + if (event.keyCode === 27) { + $.fn.simpleColorPicker.hidePalette(); + } + }); + + newHexField.bind("keyup", function (event) { + var hexColor = $.fn.simpleColorPicker.toHex($(event.target).val()); + $.fn.simpleColorPicker.previewColor(hexColor ? hexColor : element.val()); + }); + + $('
    ').append(newHexLabel).appendTo(newPalette); + + newPalette.find('.simpleColorPicker_hexWrap').append(newHexField); + if (opts.showHexField === false) { + newHexField.hide(); + newHexLabel.hide(); + } + + $("body").append(newPalette); + + newPalette.hide(); + + + /** + * Build replacement interface for original color input. + **/ + newControl.css("background-color", defaultColor); + + newControl.bind("click", function () { + if( element.is( ':not(:disabled)' ) ) { + $.fn.simpleColorPicker.togglePalette($('#' + paletteId), $(this)); + } + }); + + if( options && options.onColorChange ) { + newControl.data('onColorChange', options.onColorChange); + } else { + newControl.data('onColorChange', function() {} ); + } + + if (controlText = element.data('text')) + newControl.html(controlText) + + element.after(newControl); + + element.bind("change", function () { + element.next(".simpleColorPicker-picker").css( + "background-color", $.fn.simpleColorPicker.toHex($(this).val()) + ); + }); + + element.val(defaultColor); + + // Hide the original input. + if (element[0].tagName.toLowerCase() === 'input') { + element.each(function () { this.type = 'hidden' }); + } else { + element.hide(); + } + + cItterate++; + }); + }; + + /** + * Extend simpleColorPicker with... all our functionality. + **/ + $.extend(true, $.fn.simpleColorPicker, { + /** + * Return a Hex color, convert an RGB value and return Hex, or return false. + * + * Inspired by http://code.google.com/p/jquery-color-utils + **/ + toHex : function (color) { + // If we have a standard or shorthand Hex color, return that value. + if (color.match(/[0-9A-F]{6}|[0-9A-F]{3}$/i)) { + return (color.charAt(0) === "#") ? color : ("#" + color); + + // Alternatively, check for RGB color, then convert and return it as Hex. + } else if (color.match(/^rgb\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*\)$/)) { + var c = ([parseInt(RegExp.$1, 10), parseInt(RegExp.$2, 10), parseInt(RegExp.$3, 10)]), + pad = function (str) { + if (str.length < 2) { + for (var i = 0, len = 2 - str.length; i < len; i++) { + str = '0' + str; + } + } + + return str; + }; + + if (c.length === 3) { + var r = pad(c[0].toString(16)), + g = pad(c[1].toString(16)), + b = pad(c[2].toString(16)); + + return '#' + r + g + b; + } + + // Otherwise we wont do anything. + } else { + return false; + + } + }, + + /** + * Check whether user clicked on the selector or owner. + **/ + checkMouse : function (event, paletteId) { + var selector = activePalette, + selectorParent = $(event.target).parents("#" + selector.attr('id')).length; + + if (event.target === $(selector)[0] || event.target === selectorOwner[0] || selectorParent > 0) { + return; + } + + $.fn.simpleColorPicker.hidePalette(); + }, + + /** + * Hide the color palette modal. + **/ + hidePalette : function () { + $(document).unbind("mousedown", $.fn.simpleColorPicker.checkMouse); + + $('.simpleColorPicker-palette').hide(); + }, + + /** + * Show the color palette modal. + **/ + showPalette : function (palette) { + var hexColor = selectorOwner.prev("input").val(); + + palette.css({ + top: selectorOwner.offset().top + (selectorOwner.outerHeight()), + left: selectorOwner.offset().left + }); + + $("#color_value").val(hexColor); + + palette.show(); + + $(document).bind("mousedown", $.fn.simpleColorPicker.checkMouse); + }, + + /** + * Toggle visibility of the simpleColorPicker palette. + **/ + togglePalette : function (palette, origin) { + // selectorOwner is the clicked .simpleColorPicker-picker. + if (origin) { + selectorOwner = origin; + } + + activePalette = palette; + + if (activePalette.is(':visible')) { + $.fn.simpleColorPicker.hidePalette(); + + } else { + $.fn.simpleColorPicker.showPalette(palette); + + } + }, + + /** + * Update the input with a newly selected color. + **/ + changeColor : function (value) { + selectorOwner.css("background-color", value); + selectorOwner.prev("input").val(value).change(); + + $.fn.simpleColorPicker.hidePalette(); + + selectorOwner.data('onColorChange').call(selectorOwner, $(selectorOwner).prev("input").attr("id"), value); + }, + + + /** + * Preview the input with a newly selected color. + **/ + previewColor : function (value) { + selectorOwner.css("background-color", value); + }, + + /** + * Bind events to the color palette swatches. + */ + bindPalette : function (paletteInput, element, color) { + color = color ? color : $.fn.simpleColorPicker.toHex(element.css("background-color")); + + element.bind({ + click : function (ev) { + lastColor = color; + + $.fn.simpleColorPicker.changeColor(color); + }, + mouseover : function (ev) { + lastColor = paletteInput.val(); + + $(this).css("border-color", "#598FEF"); + + paletteInput.val(color); + + $.fn.simpleColorPicker.previewColor(color); + }, + mouseout : function (ev) { + $(this).css("border-color", "#000"); + + paletteInput.val(selectorOwner.css("background-color")); + + paletteInput.val(lastColor); + + $.fn.simpleColorPicker.previewColor(lastColor); + } + }); + } + }); + + /** + * Default simpleColorPicker options. + * + * These are publibly available for global modification using a setting such as: + * + * $.fn.simpleColorPicker.defaults.colors = ['151337', '111111'] + * + * They can also be applied on a per-bound element basis like so: + * + * $('#element1').simpleColorPicker({pickerDefault: 'efefef', transparency: true}); + * $('#element2').simpleColorPicker({pickerDefault: '333333', colors: ['333333', '111111']}); + * + **/ + $.fn.simpleColorPicker.defaults = { + // simpleColorPicker default selected color. + pickerDefault : "FFFFFF", + + // Default color set. + colors : [ + '000000', '993300', '333300', '000080', '333399', '333333', '800000', 'FF6600', + '808000', '008000', '008080', '0000FF', '666699', '808080', 'FF0000', 'FF9900', + '99CC00', '339966', '33CCCC', '3366FF', '800080', '999999', 'FF00FF', 'FFCC00', + 'FFFF00', '00FF00', '00FFFF', '00CCFF', '993366', 'C0C0C0', 'FF99CC', 'FFCC99', + 'FFFF99', 'CCFFFF', '99CCFF', 'FFFFFF' + ], + + // If we want to simply add more colors to the default set, use addColors. + addColors : [], + + // Show hex field + showHexField: true + }; + +})(jQuery); From 72158ccdd55798d20e742c54ffd3011acddf117b Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Thu, 6 Jun 2013 17:44:39 -0400 Subject: [PATCH 5/9] Update input parameters to be synchronized with the new TubeTK release --- controllers/components/ApiComponent.php | 4 ++ controllers/components/PipelineComponent.php | 8 ++-- library/pdfseg_slicerjob.py | 49 ++++++++++++++------ library/pipeline.py | 6 ++- public/js/lib/pvw.paint.js | 9 ++-- 5 files changed, 51 insertions(+), 25 deletions(-) diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index baab9c5..44143eb 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -222,6 +222,7 @@ public function getPydasParams() * Start TubeTK PDF segmenation on an item using the initial labelmap * @param item_id The id of the item to be segmented * @param labelmap_item_id The id of input label map item + * @param object_id Array of object label values * @param output_item_name The name of the created output surface model * @param output_labelmap The name of the created output label map * @param output_folder_id (optional) The id of the folder where the output @@ -248,6 +249,7 @@ public function startPdfsegmentation($args) $labelmapItemId = $args['labelmap_item_id']; $outputItemName = $args['output_item_name']; $outputLabelmap = $args['output_labelmap']; + $objectId = JsonComponent::decode($args['object_id']); // Check the input item $itemDao = $itemModel->load($itemId); @@ -300,6 +302,7 @@ public function startPdfsegmentation($args) $job->setStatus(MIDAS_REMOTEPROCESSING_STATUS_WAIT); $segmentationPipeline = 'pdfsegmentation'; $job->setScript($segmentationPipeline); + $job->setParams('objectId: ' . JsonComponent::encode($objectId)); $jobModel->save($job); $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); $jobModel->addItemRelation($job, $labelmapItemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP); @@ -328,6 +331,7 @@ public function startPdfsegmentation($args) 'apikey' => $apiKey, 'inputitemid' => $itemId, 'inputlabelmapid' => $labelmapItemId, + 'objectid' => $objectId[0] . ',' . $objectId[1], 'outputfolderid' => $parentFolderId, 'outputitemname' => $outputItemName, 'outputlabelmap' => $outputLabelmap, diff --git a/controllers/components/PipelineComponent.php b/controllers/components/PipelineComponent.php index 16d397c..58178f9 100644 --- a/controllers/components/PipelineComponent.php +++ b/controllers/components/PipelineComponent.php @@ -159,10 +159,10 @@ function pdfsegmentationOutputLinks($job, $inputs, $outputs, $midasPath) $labelmapSliceView = $midasPath . '/pvw/paraview/slice?itemId=' . $inputItemId . '&labelmaps=' . $outputLabelmapItemId; - return array( array ('text' => 'surface model mesh view', 'url' => $meshView), - array ('text' => 'surface model contour slice view', 'url' => $sliceView), - array ('text' => 'surface model volume view', 'url' => $volumeView), - array ('text' => 'label map slice view', 'url' => $labelmapSliceView)); + return array( array ('text' => 'mesh view', 'url' => $meshView), + array ('text' => 'slice view (with surface model contour)', 'url' => $sliceView), + array ('text' => 'volume view', 'url' => $volumeView), + array ('text' => 'slice view (with output label map)', 'url' => $labelmapSliceView)); } public function resolveInputsAndOutputs($job) diff --git a/library/pdfseg_slicerjob.py b/library/pdfseg_slicerjob.py index 39bc886..aae8178 100644 --- a/library/pdfseg_slicerjob.py +++ b/library/pdfseg_slicerjob.py @@ -18,12 +18,13 @@ class SlicerPdfSeg(SlicerJob): wrote_model_output = "Wrote Model Output" def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, - outDir, proxyurl, inputFile, inputLabelMap, outputItemName, - outputLabelMap, outputFolderId): + outDir, proxyurl, inputFile, inputLabelMap, objectId, + outputItemName, outputLabelMap, outputFolderId): SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) self.inputFile = inputFile self.inputLabelMap = inputLabelMap + self.objectId = map(int, objectId.split(',')) self.outputItemName = outputItemName self.outputLabelMap = outputLabelMap self.outputFolderId = outputFolderId @@ -52,7 +53,12 @@ def process(self): slicer.mrmlScene.AddNode(outVolume) # use 0 as voidId voidId = 0 - params = {'inputVolume1': inputVolume.GetID(), 'labelmap': labelMapVolume.GetID(), 'outputVolume': outVolume.GetID(), 'voidId': voidId} + params = {'inputVolume1': inputVolume.GetID(), + 'labelmap': labelMapVolume.GetID(), + 'outputVolume': outVolume.GetID(), + 'voidId': voidId, + 'reclassifyObjectMask': False, + 'reclassifyNotObjectMask': False} # Get obejctId from the intial label map accum = vtk.vtkImageAccumulate() accum.SetInput(labelMapVolume.GetImageData()) @@ -60,15 +66,18 @@ def process(self): data = accum.GetOutput() data.Update() numBins = accum.GetComponentExtent()[1] - labels = [] - for i in range(0, numBins + 1): - numVoxels = data.GetScalarComponentAsDouble(i, 0, 0, 0) - if (numVoxels != 0): - labels.append(i) - if voidId in labels: - labels.remove(voidId) - params["objectId"] = labels - print labels + if self.objectId: + params["objectId"] = self.objectId + else: + labels = [] + for i in range(0, numBins + 1): + numVoxels = data.GetScalarComponentAsDouble(i, 0, 0, 0) + if (numVoxels != 0): + labels.append(i) + if voidId in labels: + labels.remove(voidId) + params["objectId"] = labels + print labels self.report_status(self.event_process, self.started_segmentation) # Run PDF segmentation in Slicer @@ -80,11 +89,20 @@ def process(self): slicer.util.saveNode(outVolume, outLabelMapPath, save_node_params) self.report_status(self.event_process, self.wrote_segmentation_output) - # Call model maker to create a surface model for the output labelmap + # Call model maker to create a surface model for foreground label only modelmaker = slicer.modules.modelmaker mhn = slicer.vtkMRMLModelHierarchyNode() slicer.mrmlScene.AddNode(mhn) - parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} + parameters = {'InputVolume': outVolume.GetID(), + 'ModelSceneFile': mhn.GetID(), + 'FilterType': "Sinc", + 'GenerateAll': False, + 'StartLabel': params["objectId"][0], # foreground label + 'EndLabel': params["objectId"][0], # foreground label + 'SplitNormals': True, + 'PointNormals': True, + 'SkipUnNamed': True + } self.report_status(self.event_process, self.started_modelmaker) cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) self.report_status(self.event_process, self.finished_modelmaker) @@ -141,8 +159,9 @@ def execute(self): outputItemName = arg_map['outputitemname'][0] outputLabelMap = arg_map['outputlabelmap'][0] outputFolderId = arg_map['outputfolderid'][0] + objectId = arg_map['objectid'][0] sp = SlicerPdfSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, - outDir, proxyurl, inputFile, inputLabelMap, + outDir, proxyurl, inputFile, inputLabelMap, objectId, outputItemName, outputLabelMap, outputFolderId) sp.execute() diff --git a/library/pipeline.py b/library/pipeline.py index b6fdf70..e679db8 100755 --- a/library/pipeline.py +++ b/library/pipeline.py @@ -225,7 +225,6 @@ def executeDownload(self): self.reportStatus(self.event_pipelinestart) self.createTmpDir() self.downloadInput() - print self.tempfiles return (self.dataDir, self.outDir, self.tempfiles) except StandardError as exception: # TODO where to do exceptions status and conditions @@ -499,7 +498,10 @@ def downloadInputImpl(self): """Download input item and initial label map from the Midas server.""" self.tempfiles = {} self.tempfiles['inputfile'] = self.downloadItem(self.itemId) - self.tempfiles['inputlabelmap'] = self.downloadItem(self.labelMapItemId) + # In case user saves initial label map with the same name multiple times + itemName = self.downloadItem(self.labelMapItemId) + bitstreamName = os.path.splitext(itemName)[0] + '.mha' + self.tempfiles['inputlabelmap'] = bitstreamName print self.tempfiles def uploadOutputImpl(self): diff --git a/public/js/lib/pvw.paint.js b/public/js/lib/pvw.paint.js index b0a517d..6d39a4c 100644 --- a/public/js/lib/pvw.paint.js +++ b/public/js/lib/pvw.paint.js @@ -5,13 +5,13 @@ midas.pvw = midas.pvw || {}; * Activate paint mode as soon as we finished initialization */ midas.pvw.postInitCallback = function () { - $('button.paintButton').click(); + midas.pvw.paintMode(); }; /** * Callback handler to trigger pdf segmentation pipeline */ -midas.pvw.handlePDFSegmentation = function (labelmapItemId) { +midas.pvw.handlePDFSegmentation = function (labelmapItemId, objectLabels) { $('div.MainDialog').dialog('close'); // Get output item name var html = ' From 2caece218b21ddc0d2c09e90bc22564fc89046dd Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Thu, 27 Jun 2013 17:36:25 -0400 Subject: [PATCH 8/9] Add parameter presets. --- controllers/components/ApiComponent.php | 111 ++++++++++++++++++++ library/pdfseg_slicerjob.py | 25 +++-- library/pipeline.py | 6 ++ public/js/lib/pvw.paint.js | 57 ----------- public/js/lib/pvw.pdfSegmenter.js | 130 ++++++++++++++++++++++++ public/js/process/process.statuslist.js | 39 ++++++- views/process/statuslist.phtml | 4 +- 7 files changed, 305 insertions(+), 67 deletions(-) delete mode 100644 public/js/lib/pvw.paint.js create mode 100644 public/js/lib/pvw.pdfSegmenter.js diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index 44143eb..f10deb1 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -271,6 +271,19 @@ public function startPdfsegmentation($args) { throw new Zend_Exception('Read access on the input labelmap item required.', MIDAS_PYSLICER_INVALID_POLICY); } + // Check the PDF preset json item + if (array_key_exists('preset_item_id', $args)) { + $presetItemId = $args['preset_item_id']; + $presetItemDao = $itemModel->load($presetItemId); + if($presetItemDao === false) + { + throw new Zend_Exception('This preset json item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($presetItemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on the preset json item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + } // Check the output folder if(isset($args['output_folder_id'])) @@ -336,6 +349,10 @@ public function startPdfsegmentation($args) 'outputitemname' => $outputItemName, 'outputlabelmap' => $outputLabelmap, 'job_id' => $job->getKey()); + if(isset($presetItemId)) + { + $slicerjobParams['presetitemid'] = $presetItemId; + } $requestParams = ""; $ind = 0; foreach ($slicerjobParams as $name => $value) @@ -362,6 +379,100 @@ public function startPdfsegmentation($args) } } + /** + * Create folder for TubeTK PDF segmenation based on the root folder + * @param root_folder_id The id of the root folder. + * @return array('dataFolderId' => dataFolderId, 'presetsFolderId' => presetsFolderId, + * 'outputFolderId' => outputFolderId) + */ + public function createPdfsegmentationFolders($args) + { + $this->_checkKeys(array('root_folder_id'), $args); + $userDao = $this->_getUser($args); + if(!$userDao) + { + throw new Exception('Anonymous users may not create folders.', MIDAS_PYSLICER_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + $rootFolderId = $args['root_folder_id']; + + // Check the input root folder + $rootFolderDao = $folderModel->load($rootFolderId); + if($rootFolderDao === false) + { + throw new Zend_Exception('This folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$folderModel->policyCheck($rootFolderDao, $userDao, MIDAS_POLICY_WRITE)) + { + throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + $dataFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'data', 'input data directory for PDF segmenter'); + $presetFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'preset', 'parameter preset directory for PDF segmenter'); + $outputFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'output', 'output results directory for PDF segmenter'); + return array('dataFolderId' => $dataFolderId, + 'presetFolderId' => $presetFolderId, + 'outputFolderId' => $outputFolderId); + } + + /** + * Create a child folder or use the existing one if it has the same name. + * @param type $userDao + * @param type $parentId parent folder Id + * @param type $name name of the child folder to be created + * @param type $description description of the child folder to be created + * @return id of newly created folder or the exsisting folder with the same name + * @throws Exception + */ + protected function _createChildFolder($userDao, $parentId, $name, $description='') + { + if($userDao == false) + { + throw new Exception('Cannot create folder anonymously', MIDAS_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + $record = false; + $uuid = ''; + if($parentId == -1) //top level user folder being created + { + $new_folder = $folderModel->createFolder($name, $description, $userDao->getFolderId(), $uuid); + } + else //child of existing folder + { + $folder = $folderModel->load($parentId); + if(($existing = $folderModel->getFolderExists($name, $folder))) + { + $returnArray = $existing->toArray(); + return $returnArray['folder_id']; + } + $new_folder = $folderModel->createFolder($name, $description, $folder, $uuid); + if($new_folder === false) + { + throw new Exception('Create folder failed', MIDAS_INTERNAL_ERROR); + } + $policyGroup = $folder->getFolderpolicygroup(); + $policyUser = $folder->getFolderpolicyuser(); + $folderpolicygroupModel = MidasLoader::loadModel('Folderpolicygroup'); + $folderpolicyuserModel = MidasLoader::loadModel('Folderpolicyuser'); + foreach($policyGroup as $policy) + { + $folderpolicygroupModel->createPolicy($policy->getGroup(), $new_folder, $policy->getPolicy()); + } + foreach($policyUser as $policy) + { + $folderpolicyuserModel->createPolicy($policy->getUser(), $new_folder, $policy->getPolicy()); + } + if(!$folderModel->policyCheck($new_folder, $userDao, MIDAS_POLICY_ADMIN)) + { + $folderpolicyuserModel->createPolicy($userDao, $new_folder, MIDAS_POLICY_ADMIN); + } + } + // reload folder to get up to date privacy status + $new_folder = $folderModel->load($new_folder->getFolderId()); + $returnArray = $new_folder->toArray(); + return $returnArray['folder_id']; + } + protected function _loadValidItem($userDao, $itemId, $paramName) { diff --git a/library/pdfseg_slicerjob.py b/library/pdfseg_slicerjob.py index 009bff3..07e9f88 100644 --- a/library/pdfseg_slicerjob.py +++ b/library/pdfseg_slicerjob.py @@ -19,7 +19,7 @@ class SlicerPdfSeg(SlicerJob): def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, inputFile, inputLabelMap, objectId, - outputItemName, outputLabelMap, outputFolderId): + outputItemName, outputLabelMap, outputFolderId, presetFile): SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) self.inputFile = inputFile @@ -28,6 +28,7 @@ def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, self.outputItemName = outputItemName self.outputLabelMap = outputLabelMap self.outputFolderId = outputFolderId + self.presetFile = presetFile def process(self): """Execute TubeTK PDF segmentation """ @@ -56,10 +57,18 @@ def process(self): params = {'inputVolume1': inputVolume.GetID(), 'labelmap': labelMapVolume.GetID(), 'outputVolume': outVolume.GetID(), - 'voidId': voidId, - 'reclassifyObjectMask': False, - 'reclassifyNotObjectMask': False} - # Get obejctId from the intial label map + 'voidId': voidId} + if not self.presetFile is None: + presetPath = os.path.join(self.dataDir, self.presetFile) + with open(presetPath) as preset_file: + data = json.load(preset_file) + params['erodeRadius'] = data['pdf_segmenter_parameters']['erosion_radius'] + params['holeFillIterations'] = data['pdf_segmenter_parameters']['hole_fill_iterations'] + params['probSmoothingStdDev'] = data['pdf_segmenter_parameters']['probability_smoothing_standard_deviation'] + params['reclassifyObjectMask'] = data['pdf_segmenter_parameters']['reclassify_foreground_mask'] + params['reclassifyNotObjectMask'] = data['pdf_segmenter_parameters']['reclassify_barrier_mask'] + params['forceClassification'] = data['pdf_segmenter_parameters']['force_classification'] + # Get obejctId from the initial label map accum = vtk.vtkImageAccumulate() accum.SetInput(labelMapVolume.GetImageData()) accum.UpdateWholeExtent() @@ -178,8 +187,12 @@ def execute(self): outputLabelMap = arg_map['outputlabelmap'][0] outputFolderId = arg_map['outputfolderid'][0] objectId = arg_map['objectid'][0] + if 'presetfile' in arg_map.keys(): + presetFile = arg_map['presetfile'][0] + else: + presetFile = None sp = SlicerPdfSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, inputFile, inputLabelMap, objectId, - outputItemName, outputLabelMap, outputFolderId) + outputItemName, outputLabelMap, outputFolderId, presetFile) sp.execute() diff --git a/library/pipeline.py b/library/pipeline.py index e679db8..32f40de 100755 --- a/library/pipeline.py +++ b/library/pipeline.py @@ -483,6 +483,10 @@ def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, json_args): self.outputItemName = json_args['outputitemname'][0] self.outputLabelMap = json_args['outputlabelmap'][0] self.outputFolderId = json_args['outputfolderid'][0] + if 'presetitemid' in json_args: + self.presetItemId = json_args['presetitemid'][0] + else: + self.presetItemId = None def define_process_events(self): """Define the process events for TubeTK PDF segmentation.""" @@ -502,6 +506,8 @@ def downloadInputImpl(self): itemName = self.downloadItem(self.labelMapItemId) bitstreamName = os.path.splitext(itemName)[0] + '.mha' self.tempfiles['inputlabelmap'] = bitstreamName + if not self.presetItemId is None: + self.tempfiles['presetfile'] = self.downloadItem(self.presetItemId) print self.tempfiles def uploadOutputImpl(self): diff --git a/public/js/lib/pvw.paint.js b/public/js/lib/pvw.paint.js deleted file mode 100644 index 6d39a4c..0000000 --- a/public/js/lib/pvw.paint.js +++ /dev/null @@ -1,57 +0,0 @@ -var midas = midas || {}; -midas.pvw = midas.pvw || {}; - -/** - * Activate paint mode as soon as we finished initialization - */ -midas.pvw.postInitCallback = function () { - midas.pvw.paintMode(); -}; - -/** - * Callback handler to trigger pdf segmentation pipeline - */ -midas.pvw.handlePDFSegmentation = function (labelmapItemId, objectLabels) { - $('div.MainDialog').dialog('close'); - // Get output item name - var html = '


    '; - html += ''; - html += '
    '; - html += ''; - html += ''; - html += '
    '; - midas.showDialogWithContent('Choose name for output item', html, false); - $('#processItemSlicerOutputName').focus(); - $('#processItemSlicerOutputName').select(); - - $('button.processItemSlicerYes').unbind('click').click(function () { - var outputItemName = $('#processItemSlicerOutputName').val(); - var outputLabelmap = $('#processItemSlicerOutputName').val() + '-label'; - - $('#processingPleaseWait').show(); - var objectId = '['+objectLabels[0]+', '+objectLabels[1]+']'; // serialize objectId value - ajaxWebApi.ajax({ - method: 'midas.pyslicer.start.pdfsegmentation', - args: 'item_id=' + json.pvw.item.item_id + '&labelmap_item_id=' + labelmapItemId + '&object_id=' + objectId + '&output_item_name=' + outputItemName + '&output_labelmap=' + outputLabelmap, - success: function (results) { - $('div.MainDialog').dialog('close'); - $('#processingPleaseWait').hide(); - if (results.data.redirect) { - // Open job status url in another window so that users can continue paiting - window.open(results.data.redirect, '_blank', 'resizable=yes,scrollbars=yes,toolbar=yes,menubar=no,location=yes,directories=no,status=yes'); - } - }, - error: function (XMLHttpRequest, textStatus, errorThrown) { - midas.createNotice(XMLHttpRequest.message, '4000', 'error'); - $('#processingPleaseWait').hide(); - } - }); - - }); - - $('button.processItemSlicerNo').unbind('click').click(function () { - $('div.MainDialog').dialog('close'); - }); -}; diff --git a/public/js/lib/pvw.pdfSegmenter.js b/public/js/lib/pvw.pdfSegmenter.js new file mode 100644 index 0000000..1fa7f7b --- /dev/null +++ b/public/js/lib/pvw.pdfSegmenter.js @@ -0,0 +1,130 @@ +var midas = midas || {}; +midas.pvw = midas.pvw || {}; + +/** + * Activate paint mode as soon as we finished initialization + */ +midas.pvw.postInitCallback = function () { + midas.pvw.paintMode(); + midas.pvw.pdfSegmenterPresets(); +}; + +/** + * Get the list of pdf segmenter preset json files + */ +midas.pvw.getPdfSegmenterPresets = function (id) { + ajaxWebApi.ajax({ + method: 'midas.folder.children', + args: 'id=' + id, + success: function (results) { + $("#selectedPresetFile").empty(); + $("#selectedPresetFile").append(''); + for (var key in results.data.items) { + var item = results.data.items[key]; + if (/\.json$/.test(item.name)) { // only care about json files + var optionHtml = ''; + $("#selectedPresetFile").append(optionHtml); + } + } + }, + error: function (XMLHttpRequest, textStatus, errorThrown) { + midas.createNotice(XMLHttpRequest.message, '4000', 'error'); + } + }); +} + +/* + * Drop down list to select preset parameters (json file) for PDF segmenter + */ +midas.pvw.pdfSegmenterPresets = function () { + midas.pvw.pdfSegmenterSelectedPresetItemId = ''; + midas.pvw.pdfSegmenterPresetFolderId = typeof(json.pvw.pdfSegmenterPresetFolderId) === 'undefined' ? '' : json.pvw.pdfSegmenterPresetFolderId; + midas.pvw.pdfSegmenterOutputFolderId = typeof(json.pvw.pdfSegmenterOutputFolderId) === 'undefined' ? '' : json.pvw.pdfSegmenterOutputFolderId; + + var html = '
    '; + html += '

    Change PDF Segmenter Presets Folder

    '; + html += '

    PDF Segmenter Presets

    '; + html += ''; + html += '
    '; + $(".viewSideBar").prepend(html); + if (midas.pvw.pdfSegmenterPresetFolderId) { + midas.pvw.getPdfSegmenterPresets(midas.pvw.pdfSegmenterPresetFolderId); + } + + $("h1#updatePdfPresets").hover( + function(){ + $("h1#updatePdfPresets").css("background-color","#E5E5E5"); + }, + function(){ + $("h1#updatePdfPresets").css("background-color","white"); + }); + + $('a#pdfSegmenterPresetsFolder').click(function() { + midas.loadDialog("selecfolder_pdfpresets","/browse/selectfolder?policy=read"); + midas.showDialog('Browse for the Folder Containng PDF Segmenter Preset JSON Files'); + midas.pvw.folderSelectionCallback = function(name, id) { + midas.pvw.getPdfSegmenterPresets(id); + midas.pvw.pdfSegmenterPresetFolderId = id; + }; + }); + + $('#selectedPresetFile').change(function(){ + midas.pvw.pdfSegmenterSelectedPresetItemId = $(this).val(); + }) +} + +/** + * Callback handler to trigger PDF segmentation pipeline + */ +midas.pvw.handlePDFSegmentation = function (labelmapItemId, objectLabels) { + $('div.MainDialog').dialog('close'); + // Get output item name + var html = '


    '; + html += ''; + html += '
    '; + html += ''; + html += ''; + html += '
    '; + midas.showDialogWithContent('Choose name for output item', html, false); + $('#processItemSlicerOutputName').focus(); + $('#processItemSlicerOutputName').select(); + + $('button.processItemSlicerYes').unbind('click').click(function () { + var outputItemName = $('#processItemSlicerOutputName').val(); + var outputLabelmap = $('#processItemSlicerOutputName').val() + '-label'; + + $('#processingPleaseWait').show(); + var objectId = '['+objectLabels[0]+', '+objectLabels[1]+']'; // serialize objectId value + var presetItemIdArg = midas.pvw.pdfSegmenterSelectedPresetItemId ? '&preset_item_id=' + midas.pvw.pdfSegmenterSelectedPresetItemId : ''; + var outputFolderIdArg = midas.pvw.pdfSegmenterOutputFolderId ? '&output_folder_id=' + midas.pvw.pdfSegmenterOutputFolderId : ''; + ajaxWebApi.ajax({ + method: 'midas.pyslicer.start.pdfsegmentation', + args: 'item_id=' + json.pvw.item.item_id + '&labelmap_item_id=' + labelmapItemId + '&object_id=' + objectId + '&output_item_name=' + outputItemName + '&output_labelmap=' + outputLabelmap + presetItemIdArg + outputFolderIdArg, + success: function (results) { + $('div.MainDialog').dialog('close'); + $('#processingPleaseWait').hide(); + if (results.data.redirect) { + // Open job status url in another window so that users can continue painting + window.open(results.data.redirect, '_blank', 'resizable=yes,scrollbars=yes,toolbar=yes,menubar=no,location=yes,directories=no,status=yes'); + } + }, + error: function (XMLHttpRequest, textStatus, errorThrown) { + midas.createNotice(XMLHttpRequest.message, '4000', 'error'); + $('#processingPleaseWait').hide(); + } + }); + }); + + $('button.processItemSlicerNo').unbind('click').click(function () { + $('div.MainDialog').dialog('close'); + }); +} + +folderSelectionCallback = function(name, id) { + midas.pvw.folderSelectionCallback(name, id); + return; +} \ No newline at end of file diff --git a/public/js/process/process.statuslist.js b/public/js/process/process.statuslist.js index 03d53e4..eab64e8 100644 --- a/public/js/process/process.statuslist.js +++ b/public/js/process/process.statuslist.js @@ -2,6 +2,11 @@ var midas = midas || {}; midas.pyslicer = midas.pyslicer || {}; midas.pyslicer.statuslist = midas.pyslicer.statuslist || {}; +midas.pyslicer.statuslist.pdfSegmenterRootFolderId = ''; +midas.pyslicer.statuslist.pdfSegmenterDataFolderId = ''; +midas.pyslicer.statuslist.pdfSegmenterPresetFolderId = ''; +midas.pyslicer.statuslist.pdfSegmenterOutputFolderId = ''; + $(document).ready(function(){ @@ -41,16 +46,44 @@ $(document).ready(function(){ midas.loadDialog("selectitem_volumeRendering","/browse/selectitem"); midas.showDialog('Browse for the Image to be Segmented'); + var pdfSegmenterFolderIdsArg = midas.pyslicer.statuslist.pdfSegmenterRootFolderId ? '&pdfSegmenterRootFolderId=' + midas.pyslicer.statuslist.pdfSegmenterRootFolderId : ''; + pdfSegmenterFolderIdsArg += midas.pyslicer.statuslist.pdfSegmenterDataFolderId ? '&pdfSegmenterDataFolderId=' + midas.pyslicer.statuslist.pdfSegmenterDataFolderId : ''; + pdfSegmenterFolderIdsArg += midas.pyslicer.statuslist.pdfSegmenterPresetFolderId ? '&pdfSegmenterPresetFolderId=' + midas.pyslicer.statuslist.pdfSegmenterPresetFolderId : ''; + pdfSegmenterFolderIdsArg += midas.pyslicer.statuslist.pdfSegmenterOutputFolderId ? '&pdfSegmenterOutputFolderId=' + midas.pyslicer.statuslist.pdfSegmenterOutputFolderId : ''; midas.pyslicer.statuslist.itemSelectionCallback = function(name, id) { - var redirectUrl = $('.webroot').val() + '/pvw/paraview/slice?itemId=' + id; - redirectUrl += '&operations=paint&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.paint.js;' + $('.webroot').val() + '/modules/pyslicer/public/js/simplecolorpicker.js'; + var redirectUrl = $('.webroot').val() + '/pvw/paraview/slice?itemId=' + id + pdfSegmenterFolderIdsArg; + redirectUrl += '&operations=paint&jsImports=' + $('.webroot').val() + '/modules/pyslicer/public/js/lib/pvw.pdfSegmenter.js;' + $('.webroot').val() + '/modules/pyslicer/public/js/simplecolorpicker.js'; window.location = redirectUrl; }; }); - + + $('a#createPdfSegmenterFolders').click(function() { + midas.loadDialog("selecfolder_pdfpresets","/browse/selectfolder?policy=read"); + midas.showDialog('Browse for PDF Segmentation Root Folder'); + midas.pyslicer.statuslist.folderSelectionCallback = function(name, id) { + midas.pyslicer.statuslist.pdfSegmenterRootId = id; + ajaxWebApi.ajax({ + method: 'midas.pyslicer.create.pdfsegmentation.folders', + args: 'root_folder_id=' + id, + success: function (results) { + midas.pyslicer.statuslist.pdfSegmenterDataFolderId = results.data.dataFolderId; + midas.pyslicer.statuslist.pdfSegmenterPresetFolderId = results.data.presetFolderId; + midas.pyslicer.statuslist.pdfSegmenterOutputFolderId = results.data.outputFolderId; + }, + error: function (XMLHttpRequest, textStatus, errorThrown) { + midas.createNotice(XMLHttpRequest.message, '4000', 'error'); + } + }); + }; + }); }); itemSelectionCallback = function(name, id) { midas.pyslicer.statuslist.itemSelectionCallback(name, id); return; +} + +folderSelectionCallback = function(name, id) { + midas.pyslicer.statuslist.folderSelectionCallback(name, id); + return; } \ No newline at end of file diff --git a/views/process/statuslist.phtml b/views/process/statuslist.phtml index e61148a..d6e5fd7 100644 --- a/views/process/statuslist.phtml +++ b/views/process/statuslist.phtml @@ -27,7 +27,9 @@ $this->headScript()->appendFile($this->moduleWebroot . '/public/js/process/proce
    From 12b68649a5ac0d70aec12e50677adc356e5014cf Mon Sep 17 00:00:00 2001 From: Yuzheng Zhou Date: Fri, 5 Jul 2013 18:58:09 -0400 Subject: [PATCH 9/9] Suppor default folders for PDF segmenter. --- Notification.php | 23 ++-- controllers/UserController.php | 150 +++++++++++++++++++++++ controllers/components/ApiComponent.php | 95 --------------- database/upgrade/0.1.0.php | 37 ++++++ models/base/UserModelBase.php | 155 ++++++++++++++++++++++++ models/dao/UserDao.php | 28 +++++ models/pdo/UserModel.php | 64 ++++++++++ public/css/user/user.index.css | 50 ++++++++ public/js/lib/pvw.pdfSegmenter.js | 18 --- public/js/process/process.statuslist.js | 42 +++---- public/js/user/user.index.js | 82 +++++++++++++ views/process/statuslist.phtml | 2 +- views/user/index.phtml | 53 ++++++++ 13 files changed, 653 insertions(+), 146 deletions(-) create mode 100644 controllers/UserController.php create mode 100644 database/upgrade/0.1.0.php create mode 100644 models/base/UserModelBase.php create mode 100644 models/dao/UserDao.php create mode 100644 models/pdo/UserModel.php create mode 100644 public/css/user/user.index.css create mode 100644 public/js/user/user.index.js create mode 100644 views/user/index.phtml diff --git a/Notification.php b/Notification.php index 514cc04..897022d 100644 --- a/Notification.php +++ b/Notification.php @@ -7,18 +7,19 @@ class Pyslicer_Notification extends ApiEnabled_Notification { public $moduleName = 'pyslicer'; public $_moduleComponents=array('Api'); - - + + /** Register callbacks */ public function init() { - $this->enableWebAPI($this->moduleName); + $this->enableWebAPI($this->moduleName); $fc = Zend_Controller_Front::getInstance(); $this->moduleWebroot = $fc->getBaseUrl().'/modules/'.$this->moduleName; $this->coreWebroot = $fc->getBaseUrl().'/core'; $this->addCallBack('CALLBACK_CORE_GET_FOOTER_HEADER', 'getHeader'); $this->addCallBack('CALLBACK_CORE_GET_LEFT_LINKS', 'getLeftLink'); $this->addCallBack('CALLBACK_CORE_GET_FOOTER_LAYOUT', 'getFooter'); + $this->addCallBack('CALLBACK_CORE_GET_CONFIG_TABS', 'getUserTabs'); } /** get layout header */ @@ -34,7 +35,7 @@ public function getFooter() $footer .= ''; return $footer; } - + /** *@method getLeftLink * will generate a link for this module to be displayed in the main view. @@ -45,7 +46,7 @@ public function getLeftLink() $baseURL = $fc->getBaseUrl(); $moduleWebroot = $baseURL . '/'.$this->moduleName; - + if(isset($this->userSession->Dao)) { $apiComponent = MidasLoader::loadComponent('Api', $this->moduleName); @@ -55,10 +56,18 @@ public function getLeftLink() } else { - return array(); + return array(); } } - + /** User account tabs */ + public function getUserTabs($params) + { + $user = $params['user']; + $fc = Zend_Controller_Front::getInstance(); + $moduleWebroot = $fc->getBaseUrl().'/'.$this->moduleName; + return array('Pyslicer' => $moduleWebroot.'/user?userId='.$user->getKey()); + } + } //end class ?> diff --git a/controllers/UserController.php b/controllers/UserController.php new file mode 100644 index 0000000..30cc731 --- /dev/null +++ b/controllers/UserController.php @@ -0,0 +1,150 @@ +disableLayout(); + $useAjax = $this->_getParam('useAjax'); + + $userDao = Zend_Registry::get('userSession')->Dao; + $userId = $this->_getParam('userId'); + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 404); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 403); + } + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + + $pyslicerUserDao = $this->Pyslicer_User->getByUser($userDao, $pipeline); + if (isset($useAjax) && $useAjax == true) + { + $this->disableView(); + $pyslicerUserArr = array(); + if ($pyslicerUserDao) + { + $pyslicerUserArr = $pyslicerUserDao->toArray(); + } + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'Get Pyslicer User settings', 'pyslicerUser' => $pyslicerUserArr)); + } + else + { + $this->view->pyslicerUser = $pyslicerUserDao; + } + } + + /** + * Create folders for the given pipeline or return existing folders if rootFolderId are same. + * @param userId Id of the user. Must be session user itself. + * @param rootFolderId root folder id for the given pipeline. + * @param pipeline Pipeline name. + */ + function createfoldersAction() + { + $this->disableLayout(); + $this->disableView(); + + $rootFolderId = $this->_getParam('rootFolderId'); + $userId = $this->_getParam('userId'); + $userDao = Zend_Registry::get('userSession')->Dao; + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 400); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 401); + } + + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + $pyslicerUserDao = $this->Pyslicer_User->createFolders($userDao, $rootFolderId, $pipeline); + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'Folders created', 'pyslicerUser' => $pyslicerUserDao)); + } + + /** + * Delete default Pyslicer settings for this user + * @param userId Id of the user. Must be session user itself. + * @param pipeline Pipeline name. + */ + function deleteAction() + { + $this->disableLayout(); + $this->disableView(); + + $userId = $this->_getParam('userId'); + $userDao = Zend_Registry::get('userSession')->Dao; + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 400); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 401); + } + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + + $pyslicerUserDao = $this->Pyslicer_User->getByUser($userDao, $pipeline); + if ($pyslicerUserDao) + { + $this->Pyslicer_User->delete($pyslicerUserDao); + } + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'No default Folders.')); + } + } // end class +?> diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index f10deb1..932b9de 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -379,101 +379,6 @@ public function startPdfsegmentation($args) } } - /** - * Create folder for TubeTK PDF segmenation based on the root folder - * @param root_folder_id The id of the root folder. - * @return array('dataFolderId' => dataFolderId, 'presetsFolderId' => presetsFolderId, - * 'outputFolderId' => outputFolderId) - */ - public function createPdfsegmentationFolders($args) - { - $this->_checkKeys(array('root_folder_id'), $args); - $userDao = $this->_getUser($args); - if(!$userDao) - { - throw new Exception('Anonymous users may not create folders.', MIDAS_PYSLICER_INVALID_POLICY); - } - $folderModel = MidasLoader::loadModel('Folder'); - $rootFolderId = $args['root_folder_id']; - - // Check the input root folder - $rootFolderDao = $folderModel->load($rootFolderId); - if($rootFolderDao === false) - { - throw new Zend_Exception('This folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); - } - if(!$folderModel->policyCheck($rootFolderDao, $userDao, MIDAS_POLICY_WRITE)) - { - throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); - } - - $dataFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'data', 'input data directory for PDF segmenter'); - $presetFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'preset', 'parameter preset directory for PDF segmenter'); - $outputFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'output', 'output results directory for PDF segmenter'); - return array('dataFolderId' => $dataFolderId, - 'presetFolderId' => $presetFolderId, - 'outputFolderId' => $outputFolderId); - } - - /** - * Create a child folder or use the existing one if it has the same name. - * @param type $userDao - * @param type $parentId parent folder Id - * @param type $name name of the child folder to be created - * @param type $description description of the child folder to be created - * @return id of newly created folder or the exsisting folder with the same name - * @throws Exception - */ - protected function _createChildFolder($userDao, $parentId, $name, $description='') - { - if($userDao == false) - { - throw new Exception('Cannot create folder anonymously', MIDAS_INVALID_POLICY); - } - $folderModel = MidasLoader::loadModel('Folder'); - $record = false; - $uuid = ''; - if($parentId == -1) //top level user folder being created - { - $new_folder = $folderModel->createFolder($name, $description, $userDao->getFolderId(), $uuid); - } - else //child of existing folder - { - $folder = $folderModel->load($parentId); - if(($existing = $folderModel->getFolderExists($name, $folder))) - { - $returnArray = $existing->toArray(); - return $returnArray['folder_id']; - } - $new_folder = $folderModel->createFolder($name, $description, $folder, $uuid); - if($new_folder === false) - { - throw new Exception('Create folder failed', MIDAS_INTERNAL_ERROR); - } - $policyGroup = $folder->getFolderpolicygroup(); - $policyUser = $folder->getFolderpolicyuser(); - $folderpolicygroupModel = MidasLoader::loadModel('Folderpolicygroup'); - $folderpolicyuserModel = MidasLoader::loadModel('Folderpolicyuser'); - foreach($policyGroup as $policy) - { - $folderpolicygroupModel->createPolicy($policy->getGroup(), $new_folder, $policy->getPolicy()); - } - foreach($policyUser as $policy) - { - $folderpolicyuserModel->createPolicy($policy->getUser(), $new_folder, $policy->getPolicy()); - } - if(!$folderModel->policyCheck($new_folder, $userDao, MIDAS_POLICY_ADMIN)) - { - $folderpolicyuserModel->createPolicy($userDao, $new_folder, MIDAS_POLICY_ADMIN); - } - } - // reload folder to get up to date privacy status - $new_folder = $folderModel->load($new_folder->getFolderId()); - $returnArray = $new_folder->toArray(); - return $returnArray['folder_id']; - } - - protected function _loadValidItem($userDao, $itemId, $paramName) { $itemModel = MidasLoader::loadModel('Item'); diff --git a/database/upgrade/0.1.0.php b/database/upgrade/0.1.0.php new file mode 100644 index 0000000..3c174a8 --- /dev/null +++ b/database/upgrade/0.1.0.php @@ -0,0 +1,37 @@ +db->query("CREATE TABLE IF NOT EXISTS `pyslicer_user` ( + `pyslicer_user_id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_id` bigint(20) NOT NULL, + `pipeline` varchar(255) NOT NULL, + `root_folder_id` bigint(20) NOT NULL, + `data_folder_id` bigint(20) NULL DEFAULT NULL, + `preset_folder_id` bigint(20) NULL DEFAULT NULL, + `output_folder_id` bigint(20) NULL DEFAULT NULL, + PRIMARY KEY (`pyslicer_user_id`) + )"); + } +} +?> diff --git a/models/base/UserModelBase.php b/models/base/UserModelBase.php new file mode 100644 index 0000000..71bbf9a --- /dev/null +++ b/models/base/UserModelBase.php @@ -0,0 +1,155 @@ +_name = 'pyslicer_user'; + $this->_daoName = 'UserDao'; + $this->_key = 'pyslicer_user_id'; + + $this->_mainData = array( + 'pyslicer_user_id' => array('type' => MIDAS_DATA), + 'user_id' => array('type' => MIDAS_ONE_TO_ONE, 'model' => 'User', 'parent_column' => 'user_id', 'child_column' => 'user_id'), + 'pipeline' => array('type' => MIDAS_DATA), + 'root_folder_id' => array('type' => MIDAS_DATA), + 'data_folder_id' => array('type' => MIDAS_DATA), + 'preset_folder_id' => array('type' => MIDAS_DATA), + 'output_folder_id' => array('type' => MIDAS_DATA) + ); + $this->initialize(); // required + } // end __construct() + + public abstract function deleteByUser($userDao, $pipeline); + public abstract function getByUser($userDao, $pipeline); + + /** + * Helper function to create a child folder or use the existing one if it has the same name. + * @param type $userDao + * @param type $parentId parent folder Id + * @param type $name name of the child folder to be created + * @param type $description description of the child folder to be created + * @return id of newly created folder or the exsisting folder with the same name + * @throws Exception + */ + private function _createChildFolder($userDao, $parentId, $name, $description='') + { + if($userDao == false) + { + throw new Exception('Cannot create folder anonymously', MIDAS_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + $record = false; + $uuid = ''; + if($parentId == -1) //top level user folder being created + { + $new_folder = $folderModel->createFolder($name, $description, $userDao->getFolderId(), $uuid); + } + else //child of existing folder + { + $folder = $folderModel->load($parentId); + if(($existing = $folderModel->getFolderExists($name, $folder))) + { + $returnArray = $existing->toArray(); + return $returnArray['folder_id']; + } + $new_folder = $folderModel->createFolder($name, $description, $folder, $uuid); + if($new_folder === false) + { + throw new Exception('Create folder failed', MIDAS_INTERNAL_ERROR); + } + $policyGroup = $folder->getFolderpolicygroup(); + $policyUser = $folder->getFolderpolicyuser(); + $folderpolicygroupModel = MidasLoader::loadModel('Folderpolicygroup'); + $folderpolicyuserModel = MidasLoader::loadModel('Folderpolicyuser'); + foreach($policyGroup as $policy) + { + $folderpolicygroupModel->createPolicy($policy->getGroup(), $new_folder, $policy->getPolicy()); + } + foreach($policyUser as $policy) + { + $folderpolicyuserModel->createPolicy($policy->getUser(), $new_folder, $policy->getPolicy()); + } + if(!$folderModel->policyCheck($new_folder, $userDao, MIDAS_POLICY_ADMIN)) + { + $folderpolicyuserModel->createPolicy($userDao, $new_folder, MIDAS_POLICY_ADMIN); + } + } + // reload folder to get up to date privacy status + $new_folder = $folderModel->load($new_folder->getFolderId()); + $returnArray = $new_folder->toArray(); + return $returnArray['folder_id']; + } + + /** + * Create folders for a given pipeline or return existing folders if rootFolderId are same. + * @param userDao The core user + * @param rootFolderId The id of the root folder. + * @return The pyslicer user dao that was created + */ + public function createFolders($userDao, $rootFolderId, $pipeline=MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE) + { + if(!$userDao) + { + throw new Exception('Anonymous users may not create folders.', MIDAS_PYSLICER_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + // Check input root folder + $rootFolderDao = $folderModel->load($rootFolderId); + if($rootFolderDao === false) + { + throw new Zend_Exception('This folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$folderModel->policyCheck($rootFolderDao, $userDao, MIDAS_POLICY_WRITE)) + { + throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + $existingPyslicerUserDao = $this->getByUser($userDao, $pipeline); + if ($existingPyslicerUserDao) { + // return existing one if rootFolderId are same + if ($existingPyslicerUserDao->getRootFolderId() == $rootFolderId) + { + return $existingPyslicerUserDao; + } + else + { + // every user can only have at most one set of default folders + $this->deleteByUser($userDao, $pipeline); + } + } + $pyslicerUserDao = MidasLoader::newDao('UserDao', 'pyslicer'); + $pyslicerUserDao->setUserId($userDao->getKey()); + $pyslicerUserDao->setPipeline($pipeline); + $pyslicerUserDao->setRootFolderId($rootFolderId); + $dataFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'data', 'input data directory'); + $pyslicerUserDao->setDataFolderId($dataFolderId); + $presetFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'presets', 'parameter preset directory'); + $pyslicerUserDao->setPresetFolderId($presetFolderId); + $outputFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'output', 'output results directory'); + $pyslicerUserDao->setOutputFolderId($outputFolderId); + $this->save($pyslicerUserDao); + return $pyslicerUserDao; + } + +} // end class Pyslicer_UserModelBase diff --git a/models/dao/UserDao.php b/models/dao/UserDao.php new file mode 100644 index 0000000..06334b9 --- /dev/null +++ b/models/dao/UserDao.php @@ -0,0 +1,28 @@ +database->getDB()->delete('pyslicer_user', array( + 'user_id = ?' => $userDao->getKey(), + 'pipeline = ?' => $pipeline + )); + } + + /** + * Returns the pyslicer_user corresponding to the core user, or false if the + * user is not an pyslicer_user. + * @param userDao The core user + * @param pipeline Pipeline name. + */ + public function getByUser($userDao, $pipeline) + { + $sql = $this->database->select() + ->where('user_id = ?', $userDao->getKey()) + ->where('pipeline = ?', $pipeline); + $row = $this->database->fetchRow($sql); + $dao = $this->initDao('User', $row, 'pyslicer'); + if($dao) + { + return $dao; + } + else + { + return false; + } + } +} diff --git a/public/css/user/user.index.css b/public/css/user/user.index.css new file mode 100644 index 0000000..bf3d452 --- /dev/null +++ b/public/css/user/user.index.css @@ -0,0 +1,50 @@ +div.sectionHeader { + padding-top: 3px; + margin-bottom: 5px; + font-size: 16px; + color: #666; + display: inline; + float: left; +} + +a.createPdfSegmenterFolders { + padding-top: 3px; + margin-left: 10px; + font-size: 14px; + float: left; +} + +div.sectionContainer { + margin-bottom: 30px; + padding-top: 6px; +} + +.pyslicerUserTable th { + background-color: #dde4e4; + padding: 3px 10px 3px 5px; + font-weight: normal; +} + +.pyslicerUserTable td { + padding: 3px; + background-color: #fbfbfb; +} + +.pyslicerUserTable { + width: 100%; +} + +.noFoldersMessage { + color: #888888; + font-style: italic; + padding-left: 5px; +} + +div.bottomButtons { + float: right; + margin-top: 25px; +} + +div.bottomButtons input { + margin-left: 10px; +} diff --git a/public/js/lib/pvw.pdfSegmenter.js b/public/js/lib/pvw.pdfSegmenter.js index 1fa7f7b..077d7b3 100644 --- a/public/js/lib/pvw.pdfSegmenter.js +++ b/public/js/lib/pvw.pdfSegmenter.js @@ -42,7 +42,6 @@ midas.pvw.pdfSegmenterPresets = function () { midas.pvw.pdfSegmenterOutputFolderId = typeof(json.pvw.pdfSegmenterOutputFolderId) === 'undefined' ? '' : json.pvw.pdfSegmenterOutputFolderId; var html = '
    '; - html += '

    Change PDF Segmenter Presets Folder

    '; html += '

    PDF Segmenter Presets

    '; html += ' + +
    +
    +