diff --git a/Notification.php b/Notification.php index 514cc04..897022d 100644 --- a/Notification.php +++ b/Notification.php @@ -7,18 +7,19 @@ class Pyslicer_Notification extends ApiEnabled_Notification { public $moduleName = 'pyslicer'; public $_moduleComponents=array('Api'); - - + + /** Register callbacks */ public function init() { - $this->enableWebAPI($this->moduleName); + $this->enableWebAPI($this->moduleName); $fc = Zend_Controller_Front::getInstance(); $this->moduleWebroot = $fc->getBaseUrl().'/modules/'.$this->moduleName; $this->coreWebroot = $fc->getBaseUrl().'/core'; $this->addCallBack('CALLBACK_CORE_GET_FOOTER_HEADER', 'getHeader'); $this->addCallBack('CALLBACK_CORE_GET_LEFT_LINKS', 'getLeftLink'); $this->addCallBack('CALLBACK_CORE_GET_FOOTER_LAYOUT', 'getFooter'); + $this->addCallBack('CALLBACK_CORE_GET_CONFIG_TABS', 'getUserTabs'); } /** get layout header */ @@ -34,7 +35,7 @@ public function getFooter() $footer .= ''; return $footer; } - + /** *@method getLeftLink * will generate a link for this module to be displayed in the main view. @@ -45,7 +46,7 @@ public function getLeftLink() $baseURL = $fc->getBaseUrl(); $moduleWebroot = $baseURL . '/'.$this->moduleName; - + if(isset($this->userSession->Dao)) { $apiComponent = MidasLoader::loadComponent('Api', $this->moduleName); @@ -55,10 +56,18 @@ public function getLeftLink() } else { - return array(); + return array(); } } - + /** User account tabs */ + public function getUserTabs($params) + { + $user = $params['user']; + $fc = Zend_Controller_Front::getInstance(); + $moduleWebroot = $fc->getBaseUrl().'/'.$this->moduleName; + return array('Pyslicer' => $moduleWebroot.'/user?userId='.$user->getKey()); + } + } //end class ?> diff --git a/README.md b/README.md index 3b45205..12d6272 100644 --- a/README.md +++ b/README.md @@ -4,65 +4,37 @@ pyslicer midas viewable python console to interact with slicer -To install and run the twisted based python server that will accept http requests from Midas and -create Slicer jobs in response, first you will need to install a python2.6. +1) To install and run the twisted based python server that will accept http requests from Midas and +create Slicer jobs in response, first you will need to install python (2.6+). +2) Download and extract a Slicer nightly binary, the directory where this is extracted to is SLICER_DIR (where the Slicer exe should be contained in). - - -Download and extract a Slicer nightly binary, the directory where this is extracted to is SLICER_DIR (where the Slicer exe should be contained in). - -Also, in the library/twserver.cfg file, set the path to your slicer exe like so: +3) Update the library/twserver.cfg file: +3a) Set the path to your slicer exe like so: slicer_path=SLICER_DIR/Slicer -Now change directories into your SLICER_DIR. - - -cd SLICER_DIR - - -As of Slicer Nightly Build 10-16-2012, you will no longer need to manually create the include path or copy over pyconfig.h - -We will need a pyconfig.h header for python26 in order to install pip. You can get this from an ubuntu python system install of 2.6 at -/usr/include/python2.6 - - - -Since we will also be compiling twisted, we will need all headers here, so go ahead and copy them over. - -mkdir lib/Python/include -mkdir lib/Python/include/python2.6 -cp /usr/include/python2.6/*.h lib/Python/include/python2.6/ - - - - - -Download and install distribute and pip, you will have to replace the path to the python exe with -the path to your installed python 2.6 interpreter exe. - -curl http://python-distribute.org/distribute_setup.py > distribute_setup.py -./Slicer --launch /usr/bin/python distribute_setup.py -curl https://raw.github.com/pypa/pip/master/contrib/get-pip.py > get_pip.py -./Slicer --launch /usr/bin/python get_pip.py - -Now that pip is installed in your Slicer Python, you can run pip to install pydas and the pydas depedencies. - -./Slicer --launch SLICER_DIR/lib/Python/bin/pip install pydas +3b) Set the Slicer proxy server URL like so: -You will need to install twisted, but I had trouble with this step: +proxy_url=YOUR_PROXY_URL -./Slicer --launch SLICER_DIR/lib/Python/bin/pip install twisted +4) Install pydas and twisted +4a) Download and install pip -An alternate approach is to download the source of twisted, extract it yourself to TWISTED_DIR, then run this command there: +curl http://python-distribute.org/distribute_setup.py > distribute_setup.py +[sudo] python distribute_setup.py +curl https://raw.github.com/pypa/pip/master/contrib/get-pip.py > get_pip.py +[sudo] python get_pip.py -SLICER_DIR/Slicer --no-main-window --python-script setup.py install +4b) Run pip to install pydas and the pydas dependencies. +pip install pydas +4c) Run pip to install twisted: +pip install twisted -Now you could run your twserver.py using your Slicer Python like this : +5) Now you could start twisted server like this : -SLICER_DIR/Slicer --no-main-window --python-script twserver.py +python library/twserver.py diff --git a/constant/module.php b/constant/module.php index 151b4d8..4e73ef9 100644 --- a/constant/module.php +++ b/constant/module.php @@ -24,11 +24,19 @@ define('MIDAS_PYSLICER_SEGMENTATION_PIPELINE', 'segmentation'); define('MIDAS_PYSLICER_VOLUMERENDERING_PIPELINE', 'volumerendering'); define('MIDAS_PYSLICER_REGISTRATION_PIPELINE', 'registration'); - +define('MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE', 'pdfsegmentation'); + +define('MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM', 100); +define('MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP', 101); +define('MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL', 102); +define('MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP', 103); + define('MIDAS_PYSLICER_SEGMENTATION_INPUT_COUNT', 1); define('MIDAS_PYSLICER_SEGMENTATION_OUTPUT_COUNT', 1); define('MIDAS_PYSLICER_REGISTRATION_INPUT_COUNT', 2); define('MIDAS_PYSLICER_REGISTRATION_OUTPUT_COUNT', 2); +define('MIDAS_PYSLICER_PDFSEGMENTATION_INPUT_COUNT', 1); +define('MIDAS_PYSLICER_PDFSEGMENTATION_OUTPUT_COUNT', 2); define('MIDAS_PYSLICER_EXPECTED_INPUTS', 'expectedinputs'); define('MIDAS_PYSLICER_EXPECTED_OUTPUTS', 'expectedoutputs'); diff --git a/controllers/UserController.php b/controllers/UserController.php new file mode 100644 index 0000000..30cc731 --- /dev/null +++ b/controllers/UserController.php @@ -0,0 +1,150 @@ +disableLayout(); + $useAjax = $this->_getParam('useAjax'); + + $userDao = Zend_Registry::get('userSession')->Dao; + $userId = $this->_getParam('userId'); + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 404); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 403); + } + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + + $pyslicerUserDao = $this->Pyslicer_User->getByUser($userDao, $pipeline); + if (isset($useAjax) && $useAjax == true) + { + $this->disableView(); + $pyslicerUserArr = array(); + if ($pyslicerUserDao) + { + $pyslicerUserArr = $pyslicerUserDao->toArray(); + } + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'Get Pyslicer User settings', 'pyslicerUser' => $pyslicerUserArr)); + } + else + { + $this->view->pyslicerUser = $pyslicerUserDao; + } + } + + /** + * Create folders for the given pipeline or return existing folders if rootFolderId are same. + * @param userId Id of the user. Must be session user itself. + * @param rootFolderId root folder id for the given pipeline. + * @param pipeline Pipeline name. + */ + function createfoldersAction() + { + $this->disableLayout(); + $this->disableView(); + + $rootFolderId = $this->_getParam('rootFolderId'); + $userId = $this->_getParam('userId'); + $userDao = Zend_Registry::get('userSession')->Dao; + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 400); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 401); + } + + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + $pyslicerUserDao = $this->Pyslicer_User->createFolders($userDao, $rootFolderId, $pipeline); + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'Folders created', 'pyslicerUser' => $pyslicerUserDao)); + } + + /** + * Delete default Pyslicer settings for this user + * @param userId Id of the user. Must be session user itself. + * @param pipeline Pipeline name. + */ + function deleteAction() + { + $this->disableLayout(); + $this->disableView(); + + $userId = $this->_getParam('userId'); + $userDao = Zend_Registry::get('userSession')->Dao; + if(isset($userId)) + { + $userDao = $this->User->load($userId); + } + if(!$userDao) + { + throw new Zend_Exception('Invalid userId', 400); + } + if(!$this->logged) + { + throw new Zend_Exception('Must be logged in', 401); + } + $pipeline = $this->_getParam('pipeline'); + if(!isset($pipeline)) + { + $pipeline = MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE; + } + + $pyslicerUserDao = $this->Pyslicer_User->getByUser($userDao, $pipeline); + if ($pyslicerUserDao) + { + $this->Pyslicer_User->delete($pyslicerUserDao); + } + echo JsonComponent::encode(array('status' => 'ok', 'message' => 'No default Folders.')); + } + } // end class +?> diff --git a/controllers/components/ApiComponent.php b/controllers/components/ApiComponent.php index 306501a..932b9de 100644 --- a/controllers/components/ApiComponent.php +++ b/controllers/components/ApiComponent.php @@ -108,14 +108,7 @@ public function startItemProcessing($args) throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); } - $userEmail = $userDao->getEmail(); - // get an api key for this user - $userApiModel = MidasLoader::loadModel('Userapi'); - $userApiDao = $userApiModel->getByAppAndUser('Default', $userDao); - if(!$userApiDao) - { - throw new Zend_Exception('You need to create a web-api key for this user for application: Default'); - } + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); // TODO store remote processing job info $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); @@ -128,7 +121,7 @@ public function startItemProcessing($args) // TODO json encode params set $job->setParams(JsonComponent::encode($seed)); $jobModel->save($job); - $jobModel->addItemRelation($job, $itemDao, MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); if(isset($args['job_name'])) { @@ -148,9 +141,6 @@ public function startItemProcessing($args) // TODO switch to different pipeline types $jobInitPath = "/slicerjob/init/"; - $midasPath = Zend_Registry::get('webroot'); - $midasUrl = 'http://' . $_SERVER['HTTP_HOST'] . $midasPath; - $apiKey = $userApiDao->getApikey(); $parentFolderId = $parentFolder->getFolderId(); // TODO probably a security hole to put the email and api key in the url @@ -209,6 +199,185 @@ public function startItemProcessing($args) } } + /** + * Get Pydas required parameters from current session + * @return array(email => pydas_email, + * apikey => pydas_apikey, + * url => midas_url) + */ + public function getPydasParams() + { + $userDao = Zend_Registry::get('userSession')->Dao; + if(!$userDao) + { + throw new Exception('Anonymous users cannot use Pydas', MIDAS_PYSLICER_INVALID_POLICY); + } + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); + return array('email' => $userEmail, + 'apikey' => $apiKey, + 'url' => $midasUrl); + } + + /** + * Start TubeTK PDF segmenation on an item using the initial labelmap + * @param item_id The id of the item to be segmented + * @param labelmap_item_id The id of input label map item + * @param object_id Array of object label values + * @param output_item_name The name of the created output surface model + * @param output_labelmap The name of the created output label map + * @param output_folder_id (optional) The id of the folder where the output + * items be created; if not supplied, the first parent folder found on the + * input item will be used as the output folder. + * @param job_name (optional) The name of the processing job, if not supplied, + * will be given a name like "Slicer Job X" where x is the job id. + * @return array(redirect => Url to show this job's status) + */ + public function startPdfsegmentation($args) + { + $this->_checkKeys(array('item_id', 'labelmap_item_id', 'output_item_name', + 'output_labelmap'), $args); + $userDao = $this->_getUser($args); + if(!$userDao) + { + throw new Exception('Anonymous users may not process items', MIDAS_PYSLICER_INVALID_POLICY); + } + + $itemModel = MidasLoader::loadModel('Item'); + $folderModel = MidasLoader::loadModel('Folder'); + + $itemId = $args['item_id']; + $labelmapItemId = $args['labelmap_item_id']; + $outputItemName = $args['output_item_name']; + $outputLabelmap = $args['output_labelmap']; + $objectId = JsonComponent::decode($args['object_id']); + + // Check the input item + $itemDao = $itemModel->load($itemId); + if($itemDao === false) + { + throw new Zend_Exception('This item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($itemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on this item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + // Check the input labelmap item + $labelmapItemDao = $itemModel->load($labelmapItemId); + if($labelmapItemDao === false) + { + throw new Zend_Exception('This input labelmap item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($labelmapItemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on the input labelmap item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + // Check the PDF preset json item + if (array_key_exists('preset_item_id', $args)) { + $presetItemId = $args['preset_item_id']; + $presetItemDao = $itemModel->load($presetItemId); + if($presetItemDao === false) + { + throw new Zend_Exception('This preset json item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$itemModel->policyCheck($presetItemDao, $userDao, MIDAS_POLICY_READ)) + { + throw new Zend_Exception('Read access on the preset json item required.', MIDAS_PYSLICER_INVALID_POLICY); + } + } + + // Check the output folder + if(isset($args['output_folder_id'])) + { + $outputFolderId = $args['output_folder_id']; + $parentFolder = $folderModel->load($outputFolderId); + } + else + { + $parentFolders = $itemDao->getFolders(); + $parentFolder = $parentFolders[0]; + } + if($parentFolder === false) + { + throw new Zend_Exception('This output folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$folderModel->policyCheck($parentFolder, $userDao, MIDAS_POLICY_WRITE)) + { + throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + // Get pydas required parameters + list($userEmail, $apiKey, $midasUrl) = $this->_getConnectionParams($userDao); + + // Create a job and add input items + $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); + $job = MidasLoader::newDao('JobDao', 'remoteprocessing'); + $job->setCreatorId($userDao->getUserId()); + $job->setStatus(MIDAS_REMOTEPROCESSING_STATUS_WAIT); + $segmentationPipeline = 'pdfsegmentation'; + $job->setScript($segmentationPipeline); + $job->setParams('objectId: ' . JsonComponent::encode($objectId)); + $jobModel->save($job); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); + $jobModel->addItemRelation($job, $labelmapItemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP); + + if(isset($args['job_name'])) + { + $jobName = $args['job_name']; + } + else + { + $jobName = 'Slicer Job ' . $job->getKey(); + } + $job->setName($jobName); + $jobModel->save($job); + + $settingModel = MidasLoader::loadModel('Setting'); + $twistedServerUrl = $settingModel->getValueByName('slicerProxyUrl', 'pyslicer'); + + $jobInitPath = "/slicerjob/init/"; + $parentFolderId = $parentFolder->getFolderId(); + + // Construct URL to be sent to Twisted server + $slicerjobParams = array('pipeline' => $segmentationPipeline, + 'url' => $midasUrl, + 'email' => $userEmail, + 'apikey' => $apiKey, + 'inputitemid' => $itemId, + 'inputlabelmapid' => $labelmapItemId, + 'objectid' => $objectId[0] . ',' . $objectId[1], + 'outputfolderid' => $parentFolderId, + 'outputitemname' => $outputItemName, + 'outputlabelmap' => $outputLabelmap, + 'job_id' => $job->getKey()); + if(isset($presetItemId)) + { + $slicerjobParams['presetitemid'] = $presetItemId; + } + $requestParams = ""; + $ind = 0; + foreach ($slicerjobParams as $name => $value) + { + if ($ind > 0) + { + $requestParams .= "&"; + } + $requestParams .= $name . '=' . $value; + $ind++; + } + + $url = $twistedServerUrl . $jobInitPath . '?' . $requestParams; + // Send request to Twisted Server + $data = file_get_contents($url); + if($data === false) + { + throw new Zend_Exception("Cannot connect with Slicer Server."); + } + else + { + $redirectURL = $midasUrl . '/pyslicer/process/status?jobId='.$job->getJobId(); + return array('redirect' => $redirectURL); + } + } protected function _loadValidItem($userDao, $itemId, $paramName) { @@ -254,7 +423,7 @@ protected function _getConnectionParams($userDao) $midasPath = Zend_Registry::get('webroot'); $midasUrl = 'http://' . $_SERVER['HTTP_HOST'] . $midasPath; - $userApiModel = MidasLoader::loadModel('Userapi', 'api'); + $userApiModel = MidasLoader::loadModel('Userapi'); $userApiDao = $userApiModel->getByAppAndUser('Default', $userDao); if(!$userApiDao) { @@ -279,7 +448,7 @@ protected function _createJob($userDao, $script, $params, $inputItems, $synthesi $jobModel->save($job); foreach($inputItems as $itemDao) { - $jobModel->addItemRelation($job, $itemDao, MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT); + $jobModel->addItemRelation($job, $itemDao, MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM); } // now that a job id is defined... @@ -592,10 +761,12 @@ public function addJobstatuses($args) * will return a job object for a job_id with the current status, * along with any related jobstatus objects for that job. * @param job_id the id of the job to query status for. - * @return array ('job' => the job object, - 'jobstatuses' => the array of jobstatus objects, - 'condition_rows' => array of job condition lines, if any, - 'output_links' => array of output links for this job, if any). + * @param status_only (Optional) If set, will only return the array of jobstatus objects + * @return if status_only is not set: array ('job' => the job object, + * 'jobstatuses' => the array of jobstatus objects, + * 'condition_rows' => array of job condition lines, if any, + * 'output_links' => array of output links for this job, if any); + * if status_only is set: the array of jobstatus objects */ public function getJobstatus($args) { @@ -622,10 +793,19 @@ public function getJobstatus($args) // get the status details $jobstatuses = $jobstatusModel->getForJob($job); - $pipelineComponent = MidasLoader::loadComponent('Pipeline', 'pyslicer'); - $conditionRows = $pipelineComponent->formatJobCondition($job->getCondition()); - $inputsAndOutputs = $pipelineComponent->resolveInputsAndOutputs($job); - return array('job' => $job, 'jobstatuses' => $jobstatuses, 'condition_rows' => $conditionRows, 'output_links' => $inputsAndOutputs['outputs']); + if(array_key_exists('status_only', $args)) + { + return $jobstatuses; + } + else + { + $pipelineComponent = MidasLoader::loadComponent('Pipeline', 'pyslicer'); + $conditionRows = $pipelineComponent->formatJobCondition($job->getCondition()); + $inputsAndOutputs = $pipelineComponent->resolveInputsAndOutputs($job); + return array('job' => $job, 'jobstatuses' => $jobstatuses, + 'condition_rows' => $conditionRows, + 'output_links' => $inputsAndOutputs['outputs']); + } } @@ -634,6 +814,8 @@ public function getJobstatus($args) * add an output item to a job * @param job_id the id of the job to update * @param item_id the id of the item to set as an output of the job + * @param type(Optional) output item types + * (currently only support two types: 'surface_model' (default), 'labelmap') * @return array('success' => 'true') if successful. */ public function addJobOutputItem($args) @@ -665,7 +847,14 @@ public function addJobOutputItem($args) throw new Zend_Exception('This item does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); } - $jobModel->addItemRelation($job, $item, MIDAS_REMOTEPROCESSING_RELATION_TYPE_OUPUT); + if(array_key_exists('type', $args) && $args['type'] == 'labelmap') + { + $jobModel->addItemRelation($job, $item, MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP); + } + else + { + $jobModel->addItemRelation($job, $item, MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL); + } return array('success' => 'true'); } diff --git a/controllers/components/PipelineComponent.php b/controllers/components/PipelineComponent.php index f94424f..58178f9 100644 --- a/controllers/components/PipelineComponent.php +++ b/controllers/components/PipelineComponent.php @@ -33,7 +33,7 @@ class Pyslicer_PipelineComponent extends AppComponent MIDAS_REMOTEPROCESSING_STATUS_STARTED => 'midas_pyslicer_started', MIDAS_REMOTEPROCESSING_STATUS_DONE => 'midas_pyslicer_done', MIDAS_PYSLICER_REMOTEPROCESSING_JOB_EXCEPTION => 'midas_pyslicer_error'); - + protected $pipelines = array( MIDAS_PYSLICER_SEGMENTATION_PIPELINE => array( @@ -45,22 +45,27 @@ class Pyslicer_PipelineComponent extends AppComponent MIDAS_PYSLICER_EXPECTED_INPUTS => MIDAS_PYSLICER_REGISTRATION_INPUT_COUNT, MIDAS_PYSLICER_EXPECTED_OUTPUTS => MIDAS_PYSLICER_REGISTRATION_OUTPUT_COUNT, MIDAS_PYSLICER_INPUT_GENERATOR => 'registrationInputLinks', - MIDAS_PYSLICER_OUTPUT_GENERATOR => 'registrationOutputLinks')); - + MIDAS_PYSLICER_OUTPUT_GENERATOR => 'registrationOutputLinks'), + MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE => array( + MIDAS_PYSLICER_EXPECTED_INPUTS => MIDAS_PYSLICER_PDFSEGMENTATION_INPUT_COUNT, + MIDAS_PYSLICER_EXPECTED_OUTPUTS => MIDAS_PYSLICER_PDFSEGMENTATION_OUTPUT_COUNT, + MIDAS_PYSLICER_INPUT_GENERATOR => 'pdfsegmentationInputLinks', + MIDAS_PYSLICER_OUTPUT_GENERATOR => 'pdfsegmentationOutputLinks'),); + protected $missingInputs = array( array ('text' => 'Error: missing input', 'url' => '')); protected $missingOutputs = array( array ('text' => 'Error: missing output', 'url' => '')); - + /** init method */ function init() { } - + function segmentationInputLinks($job, $inputs, $outputs, $midasPath) { - $inputItemId = $inputs[0]->getItemId(); - $volumeView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId; - $sliceView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId; + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); + $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId='.$inputItemId; return array( array ('text' => 'slice view', 'url' => $sliceView), array ('text' => 'volume view', 'url' => $volumeView)); @@ -70,16 +75,20 @@ function segmentationOutputLinks($job, $inputs, $outputs, $midasPath) { $inputItemId = $inputs[0]->getItemId(); $outputItemId = $outputs[0]->getItemId(); - - $meshView = $midasPath . '/visualize/paraview/surface?itemId='.$outputItemId; - $sliceView = $midasPath . '/visualize/paraview/slice?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; - $volumeView = $midasPath . '/visualize/paraview/volume?itemId='.$inputItemId.'&meshes='.$outputItemId.'&jsImports='.$midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; - + + $meshView = $midasPath . '/pvw/paraview/surface?itemId=' . $outputItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId=' . $inputItemId . + '&meshes=' . $outputItemId . '&jsImports=' . $midasPath . + '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId=' . $inputItemId . + '&meshes=' . $outputItemId . '&jsImports=' . $midasPath . + '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + return array( array ('text' => 'model mesh view', 'url' => $meshView), array ('text' => 'slice view', 'url' => $sliceView), array ('text' => 'volume view', 'url' => $volumeView)); } - + function registrationInputLinks($job, $inputs, $outputs, $midasPath) { $fixedItemId = $inputs[0]->getItemId(); @@ -94,7 +103,7 @@ function registrationOutputLinks($job, $inputs, $outputs, $midasPath) { $params = JsonComponent::decode($job->getParams()); $fixedItemId = $params['fixed_item_id']; - + // we need to get the output volume, but there are two outputs // we know the fact that the output volume is created first // and the outputs here are returned in reverse order of creation, but @@ -108,33 +117,76 @@ function registrationOutputLinks($job, $inputs, $outputs, $midasPath) $outputVolumeId = $output->getItemId(); } } - + $outputLink = $midasPath . '/visualize/paraview/dual?left='.$fixedItemId; $outputLink .= '&right=' . $outputVolumeId; $outputLink .= '&jsImports=' . $midasPath.'/modules/pyslicer/public/js/lib/visualize.regOutput.js'; - $outputLinkText = 'View'; + $outputLinkText = 'View'; return array( array ('text' => $outputLinkText, 'url' => $outputLink)); } - + + function pdfsegmentationInputLinks($job, $inputs, $outputs, $midasPath) + { + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); + // Initial label map + $inputLabelmapItemId = + $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP]->getItemId(); + $volumeView = $midasPath . '/pvw/paraview/volume?itemId='.$inputItemId; + $labelmapSliceView = $midasPath . '/pvw/paraview/slice?itemId=' . + $inputItemId . '&labelmaps=' . $inputLabelmapItemId; + + return array( array ('text' => 'slice view (with initial labelmap)', 'url' => $labelmapSliceView), + array ('text' => 'volume view', 'url' => $volumeView)); + } + + function pdfsegmentationOutputLinks($job, $inputs, $outputs, $midasPath) + { + $inputItemId = $inputs[MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM]->getItemId(); + // Surface model of output label map + $outputModelItemId = + $outputs[MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL]->getItemId(); + // Output label map + $outputLabelmapItemId = + $outputs[MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP]->getItemId(); + + $meshView = $midasPath . '/pvw/paraview/surface?itemId=' . $outputModelItemId; + $sliceView = $midasPath . '/pvw/paraview/slice?itemId=' . $inputItemId . + '&meshes=' . $outputModelItemId . '&jsImports=' . + $midasPath . '/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $volumeView = $midasPath . '/pvw/paraview/volume?itemId=' . $inputItemId . + '&meshes=' . $outputModelItemId . '&jsImports=' . + $midasPath.'/modules/pyslicer/public/js/lib/visualize.meshView.js'; + $labelmapSliceView = $midasPath . '/pvw/paraview/slice?itemId=' . + $inputItemId . '&labelmaps=' . $outputLabelmapItemId; + + return array( array ('text' => 'mesh view', 'url' => $meshView), + array ('text' => 'slice view (with surface model contour)', 'url' => $sliceView), + array ('text' => 'volume view', 'url' => $volumeView), + array ('text' => 'slice view (with output label map)', 'url' => $labelmapSliceView)); + } + public function resolveInputsAndOutputs($job) { - $midasPath = Zend_Registry::get('webroot'); + $midasPath = Zend_Registry::get('webroot'); $inputs = array(); $outputs = array(); $jobModel = MidasLoader::loadModel('Job', 'remoteprocessing'); $relatedItems = $jobModel->getRelatedItems($job); foreach($relatedItems as $item) { - if($item->getType() == MIDAS_REMOTEPROCESSING_RELATION_TYPE_INPUT) + $itemType = $item->getType(); + if($itemType == MIDAS_PYSLICER_RELATION_TYPE_INPUT_ITEM || + $itemType == MIDAS_PYSLICER_RELATION_TYPE_INPUT_LABELMAP) { - $inputs[] = $item; + $inputs[$itemType] = $item; } - elseif($item->getType() == MIDAS_REMOTEPROCESSING_RELATION_TYPE_OUPUT) + elseif($itemType == MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_LABELMAP || + $itemType == MIDAS_PYSLICER_RELATION_TYPE_OUTPUT_SURFACE_MODEL) { - $outputs[] = $item; + $outputs[$itemType] = $item; } } - + // generate inputs $expectedInputs = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_EXPECTED_INPUTS]; $inputGenerator = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_INPUT_GENERATOR]; @@ -147,7 +199,7 @@ public function resolveInputsAndOutputs($job) { $inputLinks = call_user_func_array(array($this, $inputGenerator), array($job, $inputs, $outputs, $midasPath)); } - + // generate outputs if done $expectedOutputs = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_EXPECTED_OUTPUTS]; $outputGenerator = $this->pipelines[$job->getScript()][MIDAS_PYSLICER_OUTPUT_GENERATOR]; @@ -162,7 +214,7 @@ public function resolveInputsAndOutputs($job) { if(sizeof($inputs) < $expectedInputs) { - $outputLinks = $this->missingInputs; + $outputLinks = $this->missingInputs; } else { @@ -172,8 +224,8 @@ public function resolveInputsAndOutputs($job) } return array('inputs' => $inputLinks, 'outputs' => $outputLinks); } - - + + public function formatJobCondition($condition) { $splitLines = array(); @@ -190,7 +242,7 @@ public function formatJobCondition($condition) } return $splitLines; } - + } // end class diff --git a/database/upgrade/0.1.0.php b/database/upgrade/0.1.0.php new file mode 100644 index 0000000..3c174a8 --- /dev/null +++ b/database/upgrade/0.1.0.php @@ -0,0 +1,37 @@ +db->query("CREATE TABLE IF NOT EXISTS `pyslicer_user` ( + `pyslicer_user_id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_id` bigint(20) NOT NULL, + `pipeline` varchar(255) NOT NULL, + `root_folder_id` bigint(20) NOT NULL, + `data_folder_id` bigint(20) NULL DEFAULT NULL, + `preset_folder_id` bigint(20) NULL DEFAULT NULL, + `output_folder_id` bigint(20) NULL DEFAULT NULL, + PRIMARY KEY (`pyslicer_user_id`) + )"); + } +} +?> diff --git a/library/pdfseg_slicerjob.py b/library/pdfseg_slicerjob.py new file mode 100644 index 0000000..07e9f88 --- /dev/null +++ b/library/pdfseg_slicerjob.py @@ -0,0 +1,198 @@ +import os +import json + +import vtk, slicer + +from slicerjob import SlicerJob + +class SlicerPdfSeg(SlicerJob): + """This class implements a job executed in Slicer's Python environment - + TubeTK PDF segmentation""" + loaded_input_volume = "Loaded Input Volume" + loaded_label_map = "Loaded Input Label Map" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + wrote_segmentation_output = "Wrote Segmentation Output" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, inputLabelMap, objectId, + outputItemName, outputLabelMap, outputFolderId, presetFile): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, + dataDir, outDir, proxyurl) + self.inputFile = inputFile + self.inputLabelMap = inputLabelMap + self.objectId = map(int, objectId.split(',')) + self.outputItemName = outputItemName + self.outputLabelMap = outputLabelMap + self.outputFolderId = outputFolderId + self.presetFile = presetFile + + def process(self): + """Execute TubeTK PDF segmentation """ + # Create the path to the desired output surface model file + outFile = self.outputItemName + '.vtp' + outPath = os.path.join(self.outDir, outFile) + print outPath + # Create the path to the desired output labelmap file + outLabelMap = self.outputLabelMap + '.mha' + outLabelMapPath = os.path.join(self.outDir, outLabelMap) + print outLabelMapPath + # Load input item and the initial labelmap + inputPath = os.path.join(self.dataDir, self.inputFile) + print inputPath + (input_status, inputVolume) = slicer.util.loadVolume(inputPath, returnNode=True) + self.report_status(self.event_process, self.loaded_input_volume) + inputLabelMapPath = os.path.join(self.dataDir, self.inputLabelMap) + print inputLabelMapPath + (labelmap_status, labelMapVolume) = slicer.util.loadVolume(inputLabelMapPath, returnNode=True) + self.report_status(self.event_process, self.loaded_label_map) + # Set parameters for PDF segmentation + outVolume = slicer.vtkMRMLScalarVolumeNode() + slicer.mrmlScene.AddNode(outVolume) + # use 0 as voidId + voidId = 0 + params = {'inputVolume1': inputVolume.GetID(), + 'labelmap': labelMapVolume.GetID(), + 'outputVolume': outVolume.GetID(), + 'voidId': voidId} + if not self.presetFile is None: + presetPath = os.path.join(self.dataDir, self.presetFile) + with open(presetPath) as preset_file: + data = json.load(preset_file) + params['erodeRadius'] = data['pdf_segmenter_parameters']['erosion_radius'] + params['holeFillIterations'] = data['pdf_segmenter_parameters']['hole_fill_iterations'] + params['probSmoothingStdDev'] = data['pdf_segmenter_parameters']['probability_smoothing_standard_deviation'] + params['reclassifyObjectMask'] = data['pdf_segmenter_parameters']['reclassify_foreground_mask'] + params['reclassifyNotObjectMask'] = data['pdf_segmenter_parameters']['reclassify_barrier_mask'] + params['forceClassification'] = data['pdf_segmenter_parameters']['force_classification'] + # Get obejctId from the initial label map + accum = vtk.vtkImageAccumulate() + accum.SetInput(labelMapVolume.GetImageData()) + accum.UpdateWholeExtent() + data = accum.GetOutput() + data.Update() + numBins = accum.GetComponentExtent()[1] + if self.objectId: + params["objectId"] = self.objectId + else: + labels = [] + for i in range(0, numBins + 1): + numVoxels = data.GetScalarComponentAsDouble(i, 0, 0, 0) + if (numVoxels != 0): + labels.append(i) + if voidId in labels: + labels.remove(voidId) + params["objectId"] = labels + print labels + + self.report_status(self.event_process, self.started_segmentation) + # Run PDF segmentation in Slicer + cliNode = slicer.cli.run(slicer.modules.segmentconnectedcomponentsusingparzenpdfs, None, params, wait_for_completion=True) + self.report_status(self.event_process, self.finished_segmentation) + + # Split foreground object label from output label map + foregroundObjectVolume = outVolume + if self.objectId: + thresholder = vtk.vtkImageThreshold() + thresholder.SetNumberOfThreads(1) + thresholder.SetInput(outVolume.GetImageData()) + thresholder.SetInValue(self.objectId[0]) # foreground label + thresholder.SetOutValue(0) + thresholder.ReplaceInOn() + thresholder.ReplaceOutOn() + thresholder.ThresholdBetween(self.objectId[0], self.objectId[0]) + thresholder.SetOutputScalarType(outVolume.GetImageData().GetScalarType()) + thresholder.Update() + if thresholder.GetOutput().GetScalarRange() != (0.0, 0.0): + volumesLogic = slicer.modules.volumes.logic() + foregroundObjectVolume = volumesLogic.CreateAndAddLabelVolume(outVolume, 'foregroundLabel') + foregroundObjectVolume.GetImageData().DeepCopy(thresholder.GetOutput()) + + # Export foreground object label to local disk + save_node_params = {'fileType': 'mha'} + slicer.util.saveNode(foregroundObjectVolume, outLabelMapPath, save_node_params) + self.report_status(self.event_process, self.wrote_segmentation_output) + + # Call model maker to create a surface model for foreground label only + modelmaker = slicer.modules.modelmaker + mhn = slicer.vtkMRMLModelHierarchyNode() + slicer.mrmlScene.AddNode(mhn) + parameters = {'InputVolume': outVolume.GetID(), + 'ModelSceneFile': mhn.GetID(), + 'FilterType': "Sinc", + 'GenerateAll': False, + 'StartLabel': params["objectId"][0], # foreground label + 'EndLabel': params["objectId"][0], # foreground label + 'SplitNormals': True, + 'PointNormals': True, + 'SkipUnNamed': True + } + self.report_status(self.event_process, self.started_modelmaker) + cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) + self.report_status(self.event_process, self.finished_modelmaker) + + # Export output surface model to local disk + # TODO change to method without memory leaks + outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() + modelStorage = outputModelNode.CreateDefaultStorageNode() + slicer.mrmlScene.AddNode(modelStorage) + modelStorage.SetFileName(outPath) + modelStorage.WriteData(outputModelNode) + self.outFile = outFile + self.report_status(self.event_process, self.wrote_model_output) + + def jobEndingNotification(self, args=None): + """Send job ending notification to Twisted Server""" + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['outputitemname'] = self.outputItemName + reqArgs['outputlabelmap'] = self.outputLabelMap + reqArgs['outputfolderid'] = self.outputFolderId + SlicerJob.jobEndingNotification(self, reqArgs) + + def execute(self): + """Wrapper function to execute the entire slice job""" + try: + self.process() + slicer.app.exit() + self.jobEndingNotification() + except StandardError as exception: + # TODO where to do exceptions status and conditions + # self.log.exception(exception) + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + print emsg + self.report_status(self.event_exception, emsg) + exit(1) + + +if __name__ == '__main__': + (script, jobId, tmpDirRoot, json_args) = sys.argv + arg_map = json.loads(json_args) + + pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) + pipelineName = arg_map['pipeline'][0] + dataDir = arg_map['data_dir'][0] + outDir = arg_map['out_dir'][0] + proxyurl = arg_map['proxyurl'][0] + inputFile = arg_map['inputfile'][0] + inputLabelMap = arg_map['inputlabelmap'][0] + outputItemName = arg_map['outputitemname'][0] + outputLabelMap = arg_map['outputlabelmap'][0] + outputFolderId = arg_map['outputfolderid'][0] + objectId = arg_map['objectid'][0] + if 'presetfile' in arg_map.keys(): + presetFile = arg_map['presetfile'][0] + else: + presetFile = None + + sp = SlicerPdfSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, inputLabelMap, objectId, + outputItemName, outputLabelMap, outputFolderId, presetFile) + sp.execute() diff --git a/library/pipeline.py b/library/pipeline.py new file mode 100755 index 0000000..32f40de --- /dev/null +++ b/library/pipeline.py @@ -0,0 +1,546 @@ +import re +import logging +import os +import sys +import shutil +import json + +import pydas + +class PipelineStatusEvent(): + """This class implements pipeline status events.""" + statuseventpattern = 'status&remoteprocessing_job_id=%s&event_id=%s×tamp=%s&event_type=%s' + statuseventmessagepattern = statuseventpattern + '&message=%s' + + def __init__(self, jobId, eventId, timestamp, eventType, message=None): + self.jobId = str(jobId) + self.eventId = str(eventId) + self.timestamp = str(timestamp) + self.eventType = eventType + self.message = message + self.jobstatusId = None + + def __repr__(self): + if self.message is not None: + string = self.statuseventmessagepattern % (self.jobId, self.eventId, + self.timestamp, self.eventType, self.message) + else: + string = self.statuseventpattern % (self.jobId, self.eventId, + self.timestamp, self.eventType) + return string + + @staticmethod + def parseEvent(data): + """Create a status event if the input data matches status event pattern""" + anychargroup = '(.*)' + # first search for pattern with message as it is a superset of messageless pattern + pattern = PipelineStatusEvent.statuseventmessagepattern + regex = pattern % tuple([anychargroup] * pattern.count('%s')) + match = False + m = re.search(regex, data) + message = None + if m is not None: + match = True + (jobId, eventId, timestamp, eventType, message) = m.groups() + return PipelineStatusEvent(jobId, eventId, timestamp, eventType, message) + else: + pattern = PipelineStatusEvent.statuseventpattern + regex = pattern % tuple([anychargroup] * pattern.count('%s')) + m = re.search(regex, data) + if m is not None: + match = True + (jobId, eventId, timestamp, eventType) = m.groups() + return PipelineStatusEvent(jobId, eventId, timestamp, eventType) + return None + + +class PipelineFactory(): + """This class implements an interface to get pipeline and the python script + running within Slicer.""" + def getPipeline(self, pipelineName, jobId, pydasParams, tmpDirRoot, args): + """Return a specific pipeline based on input parameters""" + if pipelineName == 'segmentation': + return SegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + elif pipelineName == 'registration': + return RegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + elif pipelineName == 'pdfsegmentation': + return PdfSegPipeline(pipelineName, jobId, pydasParams, tmpDirRoot, args) + + else: + return None + + def getSlicerScript(self, pipelineName): + """Return the name of a python script running within Slicer's python + environment.""" + if pipelineName == 'segmentation': + return 'seg_slicerjob.py' + elif pipelineName == 'registration': + return 'reg_slicerjob.py' + elif pipelineName == 'pdfsegmentation': + return 'pdfseg_slicerjob.py' + else: + return None + + +class Pipeline(): + """This class implements the base class for Pyslicer's pipelines.""" + event_pipelinestart = "PipelineStart" + event_downloadinput = "DownloadInput" + event_process = "Process" + event_uploadoutput = "UploadOutput" + event_pipelineend = "PipelineEnd" + event_exception = "Exception" + + midasstatus_started = 1 + midasstatus_done = 2 + midasstatus_exception = 3 + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot): + self.pipelineName = pipelineName + self.jobId = jobId + self.pydasParams = pydasParams + self.tmpDirRoot = tmpDirRoot + self.eventIdCounter = 0 + # TODO something better with logging + logging.basicConfig(level=logging.WARNING) + self.log = logging.getLogger('example') + + def create_event(self, eventType, message=None): + """Create a pipeline status event""" + eventId = self.eventIdCounter + self.eventIdCounter = self.eventIdCounter + 1 + timestamp = 0 + event = PipelineStatusEvent(self.jobId, eventId, timestamp, eventType, message) + return event + + def create_process_event(self, message): + return self.create_event(self.event_process, message) + + def define_events(self): + """Define all the status events for this pipeline""" + self.eventsMap = {} + events = [self.create_event(eventType) + for eventType in [self.event_pipelinestart, self.event_downloadinput]] + events = events + self.define_process_events() + events = events + [self.create_event(eventType) + for eventType in [self.event_uploadoutput, self.event_pipelineend]] + for event in events: + self.eventsMap[event.eventId] = event + # Keep original comments as below + # then when it is their time to nofify, call notify passing in jobstatu_id and timestamp + # need an imple method for subclasses to list their process events + # maybe a map of event types to event, then a submap for process events? + # somehow i need to keep up with all these events here + # and maybe there is no reason to print them in the tracker anymore + + def register_events(self): + """"Get all the status events of this pipeline, register them with the + Midas server""" + self.define_events() + events = self.eventsMap.values() + method = 'midas.pyslicer.add.jobstatuses' + parameters = {} + jsonEvents = json.dumps([str(event) for event in events]) + print jsonEvents + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + parameters['token'] = pydas.session.token + parameters['events'] = jsonEvents + eventId_to_jobstatusId = pydas.session.communicator.request(method, parameters) + for (eventId, jobstatusId) in eventId_to_jobstatusId.items(): + event = self.eventsMap[eventId] + event.jobstatusId = jobstatusId + + def get_events(self): + """Get all the registered status events for a given pipeline job""" + self.define_events() + method = 'midas.pyslicer.get.job.status' + parameters = {} + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['status_only'] = True + eventId_to_jobstatusId = pydas.session.communicator.request(method, parameters) + jobstatuses = pydas.session.communicator.request(method, parameters) + for jobstatus in jobstatuses: + event = self.eventsMap[jobstatus['event_id']] + event.jobstatusId = jobstatus['jobstatus_id'] + + def define_process_events(self): + """Define the process events for a pipeline. It should be overwritten + in the subclass""" + return [] + + def createTmpDir(self): + """Create a temporary directory for a pipeline job""" + self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) + # clear it out if it already exists + if(os.path.exists(self.tmpdirpath)): + self.removeTmpDir() + os.mkdir(self.tmpdirpath) + # create a directory for input data + self.dataDir = os.path.join(self.tmpdirpath, 'data') + os.mkdir(self.dataDir) + # create a directory for output results + self.outDir = os.path.join(self.tmpdirpath, 'out') + os.mkdir(self.outDir) + + def downloadInput(self): + """Report download input event and then call the actual download function""" + self.reportStatus(self.event_downloadinput) + self.downloadInputImpl() + + def downloadInputImpl(self): + """Download one input file. It should be overwritten by subclasses if + they need more input data""" + self.tempfiles = {} + self.tempfiles['inputfile'] = self.downloadItem(self.itemId) + print self.tempfiles + + def downloadItem(self, itemId): + """Download a single item from the Midas server using pydas""" + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + pydas.api._download_item(itemId, self.dataDir) + # Unzip any zipped files + for filename in os.listdir(self.dataDir): + if filename.endswith('.zip'): + filepath = os.path.join(self.dataDir, filename) + zip = zipfile.ZipFile(filepath) + zip.extractall(self.dataDir) + zip.close() + # Return the path to the name of the item + item = pydas.session.communicator.item_get(pydas.session.token, itemId) + return item['name'] + + def executeDownload(self): + """Register a pipeline's events, start the pipeline as a Midas job, + create the temporary directory for this job, and then download + the input file(s)""" + try: + self.register_events() + # Send pydas pipeline started event + self.reportMidasStatus(self.midasstatus_started) + self.reportStatus(self.event_pipelinestart) + self.createTmpDir() + self.downloadInput() + return (self.dataDir, self.outDir, self.tempfiles) + except StandardError as exception: + # TODO where to do exceptions status and conditions + self.log.exception(exception) + print self.event_exception + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + self.reportMidasStatus(self.midasstatus_exception, emsg) + exit(1) + + def setTmpDir(self): + """Set temporary directory""" + self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) + self.dataDir = os.path.join(self.tmpdirpath, 'data') + self.outDir = os.path.join(self.tmpdirpath, 'out') + + def uploadItem(self, itemName, outputFolderId, srcDir=None, outFile=None, + itemDescription=None, type=None): + """Read everything in the srcDir and upload it as a single item. If + outFile is set, only upload that file""" + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + # Create a new item (fold_id is required) + if itemDescription is not None: + item = pydas.session.communicator.create_item(pydas.session.token, + itemName, outputFolderId, description=itemDescription) + else: + item = pydas.session.communicator.create_item(pydas.session.token, + itemName, outputFolderId) + itemId = item['item_id'] + if srcDir is None: + srcDir = self.outDir + if outFile is not None: + # Only upload this one file + uploadToken = pydas.session.communicator.generate_upload_token( + pydas.session.token, itemId, outFile) + filepath = os.path.join(srcDir, outFile) + pydas.session.communicator.perform_upload(uploadToken, outFile, + itemid=itemId, filepath=filepath) + else: + for filename in os.listdir(srcDir): + uploadToken = pydas.session.communicator.generate_upload_token( + pydas.session.token, itemId, filename) + filepath = os.path.join(srcDir, filename) + pydas.session.communicator.perform_upload(uploadToken, filename, + itemid=itemId, filepath=filepath) + # Set the output item as an output for the job + method = 'midas.pyslicer.add.job.output.item' + parameters = {} + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['item_id'] = itemId + if type is not None: + parameters['type'] = type + print parameters + pydas.session.communicator.request(method, parameters) + return itemId + + def uploadOutput(self): + """Report upload output event and then call the actual upload function""" + self.reportStatus(self.event_uploadoutput) + self.uploadOutputImpl() + + def uploadOutputImpl(self): + """Upload output file(s) of the pipeline job. It should be overwritten + by subclasses""" + pass + + def executeUpload(self): + """Get registered events for this pipeline, get the temporary directory + for this job, upload the output file(s), delete the temporary directory, + and then end the pipeline""" + try: + self.get_events() + self.setTmpDir() + self.uploadOutput() + self.removeTmpDir() + self.reportStatus(self.event_pipelineend) + # Send pydas pipeline finished event + self.reportMidasStatus(self.midasstatus_done) + except StandardError as exception: + # TODO where to do exceptions status and conditions + self.log.exception(exception) + print self.event_exception + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + self.reportMidasStatus(self.midasstatus_exception, emsg) + exit(1) + + def removeTmpDir(self): + """delete the temporary directory and its sub-directories and files""" + shutil.rmtree(self.tmpdirpath) + + def reportProcessStatus(self, message=None): + self.reportStatus(self.event_process, message) + + def reportStatus(self, eventType, message=None): + """Report event status to the Midas server""" + # Find the event + match = None + for event in self.eventsMap.values(): + if event.eventType == eventType and event.message == message: + match = event + if match is None: + print 'reportStatus', eventType, message + print "NO MATCH" + exit(1) + import time + timestamp = time.time() + match.timestamp = timestamp + method = 'midas.pyslicer.notify.jobstatus' + parameters = {} + parameters['token'] = pydas.session.token + parameters['jobstatus_id'] = match.jobstatusId + parameters['notify_date'] = timestamp + pydas.session.communicator.request(method, parameters) + + def reportMidasStatus(self, status, condition=None): + """Report the pipeline job status (started, done, exception) to the + Midas server""" + # TODO add these methods to pydas + # TODO add condition to api call + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + method = 'midas.pyslicer.update.job' + parameters = {} + parameters['token'] = pydas.session.token + parameters['job_id'] = self.jobId + parameters['status'] = status + if condition is not None: parameters['condition'] = condition + print parameters + pydas.session.communicator.request(method, parameters) + + +class SegPipeline(Pipeline): + """This class implements a pipeline for simple region growing segmentation.""" + loaded_input_volume = "Loaded Input Volume" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print jsonArgs + if 'inputitemid' in jsonArgs: + self.itemId = jsonArgs['inputitemid'][0] + if 'outputitemname' in jsonArgs: + self.outputItemName = jsonArgs['outputitemname'][0] + self.outputFolderId = jsonArgs['outputfolderid'][0] + + def define_process_events(self): + """Define the process events for simple region growing segmentation.""" + process_events = [self.loaded_input_volume, self.started_segmentation, + self.finished_segmentation, self.started_modelmaker, + self.finished_modelmaker, self.wrote_model_output] + process_events = [self.create_process_event(eventType) for eventType in process_events] + print process_events + return process_events + + def uploadOutputImpl(self): + """Upload output item (surface model) to the Midas server.""" + self.outFile = self.outputItemName + '.vtp' + itemId = self.uploadItem(self.outFile, self.outputFolderId) + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + # TODO move metadata to superclass + # Set metadata on the output item + method = 'midas.item.setmultiplemetadata' + parameters = {} + parameters['token'] = pydas.session.token + parameters['itemid'] = itemId + parameters['count'] = 2 + # Use red to display the input seed and the contour of the output + # surface model in ParaView + parameters['element_1'] = 'ParaView' + parameters['qualifier_1'] = 'DiffuseColor' + parameters['value_1'] = '[1.0,0.0,0.0]' + # In ParaView, use [180, 180, 0] orientation to display the surface model + # which is generated by Slicer's model maker + # TODO: make sure this hard-coded orientation on the ModelMaker output + # is always correct + parameters['element_2'] = 'ParaView' + parameters['qualifier_2'] = 'Orientation' + parameters['value_2'] = '[180.0,180.0,0.0]' + print parameters + pydas.session.communicator.request(method, parameters) + + +class RegPipeline(Pipeline): + """This class implements a pipeline for simple region growing registration.""" + loaded_input_volumes = "Loaded Input Volumes" + finished_registration = "Finished Registration" + wrote_transformed_volume = "Wrote Transformed Volume" + wrote_transform = "Wrote Transform" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, jsonArgs): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print jsonArgs + if 'fixed_item_id' in jsonArgs: + self.fixedItemId = jsonArgs['fixed_item_id'][0] + self.movingItemId = jsonArgs['moving_item_id'][0] + self.fixedFiducialsList = json.loads(argMap['fixed_fiducials'][0]) + self.movingFiducialsList = json.loads(argMap['moving_fiducials'][0]) + self.transformType = jsonArgs['transformType'][0] + if 'output_folder_id' in jsonArgs: + self.outputFolderId = jsonArgs['output_folder_id'][0] + self.outputVolumeName = jsonArgs['output_volume_name'][0] + self.outputTransformName = jsonArgs['output_transform_name'][0] + + def define_process_events(self): + """Define the process events for simple region growing registration.""" + process_events = [self.loaded_input_volumes, self.finished_registration, self.wrote_transformed_volume, self.wrote_transform] + process_events = [self.create_process_event(eventType) for eventType in process_events] + print process_events + return process_events + + def downloadInputImpl(self): + """Download input items.""" + self.tempfiles = {} + self.tempfiles['fixed_volume_file'] = self.downloadItem(self.fixedItemId) + self.tempfiles['moving_volume_file'] = self.downloadItem(self.movingItemId) + print self.tempfiles + + def uploadOutputImpl(self): + """Upload output items to the Midas server.""" + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) + folderId = folder['folder_id'] + itemId = self.uploadItem(self.outputVolumeName, folderId, self.transformed_volume, itemDescription='output volume') + itemId = self.uploadItem(self.outputTransformName, folderId, self.transform, itemDescription='output transform') + + +class PdfSegPipeline(Pipeline): + """This class implements a pipeline for TubeTK PDF segmentation.""" + loaded_input_volume = "Loaded Input Volume" + loaded_label_map = "Loaded Input Label Map" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + wrote_segmentation_output = "Wrote Segmentation Output" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot, json_args): + Pipeline.__init__(self, pipelineName, jobId, pydasParams, tmpDirRoot) + print json_args + if 'inputitemid' in json_args: + self.itemId = json_args['inputitemid'][0] + self.labelMapItemId = json_args['inputlabelmapid'][0] + if 'outputitemname' in json_args: + self.outputItemName = json_args['outputitemname'][0] + self.outputLabelMap = json_args['outputlabelmap'][0] + self.outputFolderId = json_args['outputfolderid'][0] + if 'presetitemid' in json_args: + self.presetItemId = json_args['presetitemid'][0] + else: + self.presetItemId = None + + def define_process_events(self): + """Define the process events for TubeTK PDF segmentation.""" + process_events = [self.loaded_input_volume, self.loaded_label_map, + self.started_segmentation, self.finished_segmentation, + self.wrote_segmentation_output, self.started_modelmaker, + self.finished_modelmaker, self.wrote_model_output] + process_events = [self.create_process_event(event_type) for event_type in process_events] + print process_events + return process_events + + def downloadInputImpl(self): + """Download input item and initial label map from the Midas server.""" + self.tempfiles = {} + self.tempfiles['inputfile'] = self.downloadItem(self.itemId) + # In case user saves initial label map with the same name multiple times + itemName = self.downloadItem(self.labelMapItemId) + bitstreamName = os.path.splitext(itemName)[0] + '.mha' + self.tempfiles['inputlabelmap'] = bitstreamName + if not self.presetItemId is None: + self.tempfiles['presetfile'] = self.downloadItem(self.presetItemId) + print self.tempfiles + + def uploadOutputImpl(self): + """Upload output labelmap and its surface model to the Midas server.""" + # Upload output labelmap file (its type is 'labelmap') + self.outLabelMapFile = self.outputLabelMap + '.mha' + os.mkdir(os.path.join(self.outDir, "labelmap")) + labelMapTmpDir = os.path.join(self.outDir, "labelmap") + shutil.copy(os.path.join(self.outDir, self.outLabelMapFile), labelMapTmpDir) + labelmapItemId = self.uploadItem(self.outLabelMapFile, self.outputFolderId, srcDir=labelMapTmpDir, type='labelmap') + # Upload outpuf surface model file (default type) + self.outFile = self.outputItemName + '.vtp' + os.mkdir(os.path.join(self.outDir, "outfile")) + outFileTmpDir = os.path.join(self.outDir, "outfile") + shutil.copy(os.path.join(self.outDir, self.outFile), outFileTmpDir) + itemId = self.uploadItem(self.outFile, self.outputFolderId, srcDir=outFileTmpDir) + (email, apiKey, url) = self.pydasParams + pydas.login(email=email, api_key=apiKey, url=url) + # Set metadata on the output item + method = 'midas.item.setmultiplemetadata' + parameters = {} + parameters['token'] = pydas.session.token + parameters['itemid'] = itemId + parameters['count'] = 2 + # Use red to display the input seed and the contour of the output + # surface model in ParaView + parameters['element_1'] = 'ParaView' + parameters['qualifier_1'] = 'DiffuseColor' + parameters['value_1'] = '[1.0,0.0,0.0]' + # In ParaView, use [180, 180, 0] orientation to display the surface model + # which is generated by Slicer's model maker + parameters['element_2'] = 'ParaView' + parameters['qualifier_2'] = 'Orientation' + parameters['value_2'] = '[180.0,180.0,0.0]' + print parameters + pydas.session.communicator.request(method, parameters) diff --git a/library/reg_pipeline.py b/library/reg_pipeline.py deleted file mode 100644 index af86d5d..0000000 --- a/library/reg_pipeline.py +++ /dev/null @@ -1,119 +0,0 @@ -from __main__ import vtk, slicer -from slicerprocess import SlicerPipeline -import pydas - -import slicer_utils - - - -class SlicerRegPipeline(SlicerPipeline): - loaded_input_volumes = "Loaded Input Volumes" - finished_registration = "Finished Registration" - wrote_transformed_volume = "Wrote Transformed Volume" - wrote_transform = "Wrote Transform" - - def __init__(self, jobId, pydasParams, tmpDirRoot, fixedItemId, movingItemId, fixedFiducialsList, movingFiducialsList, transformType, outputFolderId, outputVolumeName, outputTransformName): - SlicerPipeline.__init__(self, 'fiducialregistration', jobId, pydasParams, tmpDirRoot) - self.fixedItemId = fixedItemId - self.movingItemId = movingItemId - print self.fixedItemId, self.movingItemId - self.fixedFiducialsList = fixedFiducialsList - self.movingFiducialsList = movingFiducialsList - self.transformType = transformType - self.outputFolderId = outputFolderId - self.outputVolumeName = outputVolumeName - self.outputTransformName = outputTransformName - - def define_process_events(self): - process_events = [self.loaded_input_volumes, self.finished_registration, self.wrote_transformed_volume, self.wrote_transform] - process_events = [self.create_process_event(event_type) for event_type in process_events] - print process_events - return process_events - - - def downloadInputImpl(self): - print "segmodeldownloadinputimpl" - print self.fixedItemId, self.movingItemId - self.fixedVolumeFile = self.downloadItem(self.fixedItemId) - self.movingVolumeFile = self.downloadItem(self.movingItemId) - print self.fixedVolumeFile, self.movingVolumeFile - - - def parseSeedpointsList(self, seedpointsList): - print 'parseSeedpointsList' - #print seedpointsList - #print type(seedpointsList) - # TODO this is pretty bad - if type(seedpointsList) == type([]) and type(seedpointsList[0]) == type([]): - #print type(seedpointsList[0][0]) - if type(seedpointsList[0][0]) != type(0): - #print "noe" - seedpointsList = [[float(p) for p in seed] for seed in seedpointsList] - #print seedpointsList - #types = [type(seed) for seed in seedpointsList] - #print types - # TODO something better, email from jc - seedpoints = [(-1 * x, -1 * y, z) for (x, y, z) in seedpointsList] - return seedpoints - - - def processImpl(self): - print "segmodelprocessimpl" - # take first two coords and multiply by -1 - # TODO something much more systematic dealing with coords - - # parse the seedpoints and create fiducials lists - fixedFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.fixedFiducialsList)) - movingFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.movingFiducialsList)) - - # load the volumes - print self.fixedVolumeFile - print self.movingVolumeFile - - fixedVolume = slicer_utils.load_volume(self.fixedVolumeFile) - movingVolume = slicer_utils.load_volume(self.movingVolumeFile) - self.reportProcessStatus(self.loaded_input_volumes) - outputTransform = slicer_utils.create_linear_transform() - - slicer_utils.run_fiducial_registration(fixedFiducialsList, movingFiducialsList, outputTransform, self.transformType) - self.reportProcessStatus(self.finished_registration) - - self.transformed_volume = self.outputVolumeName + '.mha' - outPath = os.path.join(self.outdir, self.transformed_volume) - # apply transform to moving image, then save volume - movingVolume.ApplyTransformMatrix(outputTransform.GetMatrixTransformToParent()) - slicer_utils.write_storable_node(movingVolume, outPath) - self.reportProcessStatus(self.wrote_transformed_volume) - - self.transform = self.outputTransformName + '.tfm' - outPath = os.path.join(self.outdir, self.transform) - slicer_utils.write_storable_node(outputTransform, outPath) - - self.reportProcessStatus(self.wrote_transform) - - def uploadOutputImpl(self): - #print "segmodeluploadoutputimpl" - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) - folder_id = folder['folder_id'] - - item_id = self.uploadItem(self.outputVolumeName, folder_id, self.transformed_volume, item_description='output volume') - item_id = self.uploadItem(self.outputTransformName, folder_id, self.transform, item_description='output transform') - - - -if __name__ == '__main__': - print 'reg pipeline', sys.argv - (script, jobId, tmpDirRoot, json_args) = sys.argv - import json - arg_map = json.loads(json_args) - #print arg_map - pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) - (fixed_item_id, moving_item_id, fixed_fiducials, moving_fiducials, transform_type, output_folder_id, output_volume_name, output_transform_name) = (arg_map['fixed_item_id'][0], arg_map['moving_item_id'][0], json.loads(arg_map['fixed_fiducials'][0]), json.loads(arg_map['moving_fiducials'][0]), arg_map['transform_type'][0], arg_map['output_folder_id'][0], arg_map['output_volume_name'][0], arg_map['output_transform_name'][0]) - - print fixed_item_id, moving_item_id - rp = SlicerRegPipeline(jobId, pydasParams, tmpDirRoot, fixed_item_id, moving_item_id, fixed_fiducials, moving_fiducials, transform_type, output_folder_id, output_volume_name, output_transform_name) - rp.execute() - - diff --git a/library/reg_slicerjob.py b/library/reg_slicerjob.py new file mode 100644 index 0000000..06cbbc8 --- /dev/null +++ b/library/reg_slicerjob.py @@ -0,0 +1,124 @@ +import os +import json + +import vtk, slicer + +from slicerjob import SlicerJob +import slicer_utils + +class SlicerReg(SlicerJob): + """This class implements a job executed in Slicer's Python environment: + simple region growing registration""" + # TODO run this pipeline using pvw module instead of visualize module + loaded_input_volumes = "Loaded Input Volumes" + finished_registration = "Finished Registration" + wrote_transformed_volume = "Wrote Transformed Volume" + wrote_transform = "Wrote Transform" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, fixedVolumeFile, movingVolumeFile, + fixedItemId, movingItemId, fixedFiducialsList, + movingFiducialsList, transformType, outputFolderId, + outputVolumeName, outputTransformName): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, + dataDir, outDir, proxyurl) + self.fixedVolumeFile = fixedVolumeFile + self.movingVolumeFile = movingVolumeFile + self.fixedItemId = fixedItemId + self.movingItemId = movingItemId + self.fixedFiducialsList = fixedFiducialsList + self.movingFiducialsList = movingFiducialsList + self.transformType = transformType + self.outputFolderId = outputFolderId + self.outputVolumeName = outputVolumeName + self.outputTransformName = outputTransformName + + def parseSeedpointsList(self, seedpointsList): + print 'parseSeedpointsList' + #print seedpointsList + #print type(seedpointsList) + # TODO this is pretty bad + if type(seedpointsList) == type([]) and type(seedpointsList[0]) == type([]): + #print type(seedpointsList[0][0]) + if type(seedpointsList[0][0]) != type(0): + #print "noe" + seedpointsList = [[float(p) for p in seed] for seed in seedpointsList] + #print seedpointsList + #types = [type(seed) for seed in seedpointsList] + #print types + # TODO something better, email from jc + # Keep original comments as above + seedpoints = [(-1 * x, -1 * y, z) for (x, y, z) in seedpointsList] + return seedpoints + + def process(self): + print "regmodelprocessimpl" + # take first two coords and multiply by -1 + # TODO something much more systematic dealing with coords + + # parse the seedpoints and create fiducials lists + fixedFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.fixedFiducialsList)) + movingFiducialsList = slicer_utils.create_fiducials_list(self.parseSeedpointsList(self.movingFiducialsList)) + + # load the volumes + print self.fixedVolumeFile + print self.movingVolumeFile + + fixedVolume = slicer_utils.load_volume(self.fixedVolumeFile) + movingVolume = slicer_utils.load_volume(self.movingVolumeFile) + self.report_status(self.event_process, self.loaded_input_volumes) + outputTransform = slicer_utils.create_linear_transform() + + slicer_utils.run_fiducial_registration(fixedFiducialsList, movingFiducialsList, outputTransform, self.transformType) + self.report_status(self.event_process, self.finished_registration) + + self.transformed_volume = self.outputVolumeName + '.mha' + outPath = os.path.join(self.outDir, self.transformed_volume) + # apply transform to moving image, then save volume + movingVolume.ApplyTransformMatrix(outputTransform.GetMatrixTransformToParent()) + slicer_utils.write_storable_node(movingVolume, outPath) + self.report_status(self.event_process, self.wrote_transformed_volume) + + self.transform = self.outputTransformName + '.tfm' + outPath = os.path.join(self.outDir, self.transform) + slicer_utils.write_storable_node(outputTransform, outPath) + + self.report_status(self.event_process, self.wrote_transform) + + def jobEndingNotification(self, args=None): + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['output_folder_id'] = self.outputFolderId + reqArgs['output_volume_name'] = self.outputVolumeName + reqArgs['output_transform_name'] = self.outputTransformName + SlicerJob.jobEndingNotification(self, reqArgs) + +if __name__ == '__main__': + print 'reg pipeline', sys.argv + (script, jobId, tmpDirRoot, jsonArgs) = sys.argv + argMap = json.loads(jsonArgs) + + pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) + + fixedVolumeFile = argMap['fixed_volume_file'][0] + movingVolumeFile = argMap['moving_volume_file'][0] + fixedItemId = argMap['fixed_volume_file'][0] + movingItemId = argMap['moving_volume_file'][0] + fixedFiducialsList = json.loads(argMap['fixed_fiducials'][0]) + movingFiducialsList = json.loads(argMap['moving_fiducials'][0]) + transformType = argMap['transform_type'][0] + outputFolderId = argMap['output_folder_id'][0] + outputVolumeName = argMap['output_volume_name'][0] + outputTransformName = argMap['output_transform_name'][0] + + print fixed_item_id, moving_item_id + rp = SlicerReg(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, fixedVolumeFile, movingVolumeFile, + fixedItemId, movingItemId, fixedFiducialsList, + movingFiducialsList, transformType, outputFolderId, + outputVolumeName, outputTransformName) + rp.execute() + + diff --git a/library/seg_pipeline.py b/library/seg_pipeline.py deleted file mode 100644 index c9d651a..0000000 --- a/library/seg_pipeline.py +++ /dev/null @@ -1,118 +0,0 @@ -from __main__ import vtk, slicer -from slicerprocess import SlicerPipeline -import pydas - - - -class SlicerSegPipeline(SlicerPipeline): - loaded_input_volume = "Loaded Input Volume" - started_segmentation = "Starting Segmentation" - finished_segmentation = "Finished Segmentation" - started_modelmaker = "Starting Modelmaker" - finished_modelmaker = "Finished Modelmaker" - wrote_model_output = "Wrote Model Output" - - def __init__(self, jobId, pydasParams, tmpDirRoot, itemId, seed, outputItemName, outputFolderId ): - SlicerPipeline.__init__(self, 'segmentationmodel', jobId, pydasParams, tmpDirRoot) - self.itemId = itemId - self.seed = seed - self.outputItemName = outputItemName - self.outputFolderId = outputFolderId - - def downloadInputImpl(self): - print "segmodeldownloadinputimpl" - self.headerFile = self.downloadItem(self.itemId) - - def define_process_events(self): - process_events = [self.loaded_input_volume, self.started_segmentation, self.finished_segmentation, self.started_modelmaker, self.finished_modelmaker, self.wrote_model_output] - process_events = [self.create_process_event(event_type) for event_type in process_events] - print process_events - return process_events - - def processImpl(self): - print "segmodelprocessimpl" - # take first two coords and multiply by -1 - # TODO something much more systematic dealing with coords - (x, y, z) = [float(coord) for coord in self.seed.split(',')] - seedPointCoords = (-1 * x, -1 * y, z) - - # create the path to the desired output file - outFile = self.outputItemName + '.vtp' - outPath = os.path.join(self.outdir, outFile) - - (status, inputVolume) = slicer.util.loadVolume(self.headerFile, returnNode=True) - self.reportProcessStatus(self.loaded_input_volume) - - outVolume = slicer.vtkMRMLScalarVolumeNode() - slicer.mrmlScene.AddNode(outVolume) - fiducialNode = slicer.vtkMRMLAnnotationFiducialNode() - fiducialNode.SetFiducialWorldCoordinates(seedPointCoords) - fiducialNode.SetName('Seed Point') - fiducialNode.Initialize(slicer.mrmlScene) - fiducialsList = getNode('Fiducials List') - params = {'inputVolume': inputVolume.GetID(), 'outputVolume': outVolume.GetID(), 'seed' : fiducialsList.GetID(), 'iterations' : 6} - self.reportProcessStatus(self.started_segmentation) - cliNode = slicer.cli.run(slicer.modules.simpleregiongrowingsegmentation,None, params , wait_for_completion=True) - #from time import sleep - #sleep(3) - self.reportProcessStatus(self.finished_segmentation) - - # make a model, only param is name of output file - modelmaker = slicer.modules.modelmaker - mhn = slicer.vtkMRMLModelHierarchyNode() - slicer.mrmlScene.AddNode(mhn) - parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} - self.reportProcessStatus(self.started_modelmaker) - cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) - self.reportProcessStatus(self.finished_modelmaker) - - # output the model - # TODO change to method without memory leaks - outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() - modelStorage = outputModelNode.CreateDefaultStorageNode() - slicer.mrmlScene.AddNode(modelStorage) - modelStorage.SetFileName(outPath) - modelStorage.WriteData(outputModelNode) - self.outFile = outFile - #sleep(3) - self.reportProcessStatus(self.wrote_model_output) - -#TODO metadata -# Visualize DiffuseColor 1.0,0.0,0.0 -#Visualize Orientation 180.0,180.0,0 - - - - def uploadOutputImpl(self): - #print "segmodeluploadoutputimpl" - item_id = self.uploadItem(self.outFile, self.outputFolderId) - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - # TODO move metadata to superclass - # set metadata on the output item - method = 'midas.item.setmultiplemetadata' - parameters = {} - parameters['token'] = pydas.session.token - parameters['itemid'] = item_id - parameters['count'] = 2 - parameters['element_1'] = 'Visualize' - parameters['element_2'] = 'Visualize' - parameters['qualifier_1'] = 'DiffuseColor' - parameters['qualifier_2'] = 'Orientation' - parameters['value_1'] = '[1.0,0.0,0.0]' - parameters['value_2'] = '[180.0,180.0,0.0]' - print parameters - pydas.session.communicator.request(method, parameters) - - - - - -if __name__ == '__main__': - (script, jobId, tmpDirRoot, json_args) = sys.argv - import json - arg_map = json.loads(json_args) - pydasParams = (arg_map['email'][0], arg_map['apikey'][0], arg_map['url'][0]) - (input_item_id, coords, output_item_name, output_folder_id) = (arg_map['inputitemid'][0], arg_map['coords'][0], arg_map['outputitemname'][0], arg_map['outputfolderid'][0], ) - sp = SlicerSegPipeline(jobId, pydasParams, tmpDirRoot, input_item_id, coords, output_item_name, output_folder_id) - sp.execute() diff --git a/library/seg_slicerjob.py b/library/seg_slicerjob.py new file mode 100644 index 0000000..3211a97 --- /dev/null +++ b/library/seg_slicerjob.py @@ -0,0 +1,115 @@ +import os +import json + +import vtk, slicer +from slicerjob import SlicerJob + +class SlicerSeg(SlicerJob): + """This class implements a job executed in Slicer's Python environment - simple region growing segmentation""" + loaded_input_volume = "Loaded Input Volume" + started_segmentation = "Starting Segmentation" + finished_segmentation = "Finished Segmentation" + started_modelmaker = "Starting Modelmaker" + finished_modelmaker = "Finished Modelmaker" + wrote_model_output = "Wrote Model Output" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl, inputFile, seed, outputItemName, outputFolderId): + SlicerJob.__init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl) + self.inputFile = inputFile + self.seed = seed + self.outputItemName = outputItemName + self.outputFolderId = outputFolderId + + def process(self): + """Execute simple region growing segmentation """ + print "start simple region growing segmentation" + # Take first two coords and multiply by -1 + # TODO something much more systematic dealing with coords + (x, y, z) = [float(coord) for coord in self.seed.split(',')] + seedPointCoords = (-1 * x, -1 * y, z) + + # Create the path to the desired output file + outFile = self.outputItemName + '.vtp' + outPath = os.path.join(self.outDir, outFile) + print outPath + inputPath = os.path.join(self.dataDir, self.inputFile) + print inputPath + (status, inputVolume) = slicer.util.loadVolume(inputPath, returnNode=True) + self.report_status(self.event_process, self.loaded_input_volume) + # Run simple region segmentation in Slicer + outVolume = slicer.vtkMRMLScalarVolumeNode() + slicer.mrmlScene.AddNode(outVolume) + fiducialNode = slicer.vtkMRMLAnnotationFiducialNode() + fiducialNode.SetFiducialWorldCoordinates(seedPointCoords) + fiducialNode.SetName('Seed Point') + fiducialNode.Initialize(slicer.mrmlScene) + fiducialsList = getNode('Fiducials List') + params = {'inputVolume': inputVolume.GetID(), 'outputVolume': outVolume.GetID(), 'seed' : fiducialsList.GetID(), 'iterations' : 6} + self.report_status(self.event_process, self.started_segmentation) + cliNode = slicer.cli.run(slicer.modules.simpleregiongrowingsegmentation, None, params , wait_for_completion=True) + self.report_status(self.event_process, self.finished_segmentation) + + # Call Slicer's model make to create a model, the only parameter is name of output file + modelmaker = slicer.modules.modelmaker + mhn = slicer.vtkMRMLModelHierarchyNode() + slicer.mrmlScene.AddNode(mhn) + parameters = {'InputVolume': outVolume.GetID(), 'ModelSceneFile': mhn.GetID()} + self.report_status(self.event_process, self.started_modelmaker) + cliModelNode = slicer.cli.run(modelmaker, None, parameters, wait_for_completion=True) + self.report_status(self.event_process, self.finished_modelmaker) + + # Export the model to local disk + # TODO change to method without memory leaks + outputModelNode = mhn.GetNthChildNode(0).GetAssociatedNode() + modelStorage = outputModelNode.CreateDefaultStorageNode() + slicer.mrmlScene.AddNode(modelStorage) + modelStorage.SetFileName(outPath) + modelStorage.WriteData(outputModelNode) + self.outFile = outFile + self.report_status(self.event_process, self.wrote_model_output) + + def jobEndingNotification(self, args=None): + """Send job ending notification to Twisted Server""" + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['outputitemname'] = self.outputItemName + reqArgs['outputfolderid'] = self.outputFolderId + SlicerJob.jobEndingNotification(self, reqArgs) + + def execute(self): + """Wrapper function to execute the entire slice job""" + try: + self.process() + slicer.app.exit() + self.jobEndingNotification() + except StandardError as exception: + # TODO where to do exceptions status and conditions + #self.log.exception(exception) + import traceback + etype, value, tb = sys.exc_info() + emsg = repr(traceback.format_exception(etype, value, tb)) + print emsg + self.report_status(self.event_exception, emsg) + exit(1) + + +if __name__ == '__main__': + (script, jobId, tmpDirRoot, jsonArgs) = sys.argv + argMap = json.loads(jsonArgs) + pydasParams = (argMap['email'][0], argMap['apikey'][0], argMap['url'][0]) + + pipelineName = argMap['pipeline'][0] + dataDir = argMap['data_dir'][0] + outDir = argMap['out_dir'][0] + proxyurl = argMap['proxyurl'][0] + inputFile = argMap['inputfile'][0] + coords = argMap['coords'][0] + outputItemName = argMap['outputitemname'][0] + outputFolderId = argMap['outputfolderid'][0] + + sp = SlicerSeg(jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, + outDir, proxyurl, inputFile, coords, outputItemName, + outputFolderId) + sp.execute() diff --git a/library/slicerjob.py b/library/slicerjob.py new file mode 100644 index 0000000..5cb1577 --- /dev/null +++ b/library/slicerjob.py @@ -0,0 +1,48 @@ +import os +import urllib2 +import urllib + +class SlicerJob(): + """This class implements the base class for python jobs excuted within Slicer's Python environment.""" + event_process = "Process" + event_exception = "Exception" + + def __init__(self, jobId, pipelineName, pydasParams, tmpDirRoot, dataDir, outDir, proxyurl): + self.jobId = jobId + self.pipelineName = pipelineName + self.pydasParams = pydasParams + self.tmpdirpath = tmpDirRoot + self.dataDir = os.path.join(tmpDirRoot, dataDir) + self.outDir = os.path.join(tmpDirRoot, outDir) + self.proxyurl = proxyurl + + def report_status(self, eventType, message): + """Send the pipeline status event information to the Twisted Server and + let it report the status event to the Midas server.""" + args = {} + args['pipeline'] = self.pipelineName + args['job_id'] = self.jobId + (args['email'], args['apikey'], args['url']) = self.pydasParams + args['event_type'] = eventType + args['message'] = message + # By default, Python requests module is not available in Slicer's python environment + data = urllib.urlencode(args) + request = urllib2.Request(self.proxyurl + "slicerjob/reportstatus?" + data) + response = urllib2.urlopen(request) + print response + + def jobEndingNotification(self, args=None): + """Send the pipeline ending information to the Twisted Server, and let + it report the status event and upload the output to the Midas server.""" + if args is not None: + reqArgs = args.copy() + else: + reqArgs = {} + reqArgs['pipeline'] = self.pipelineName + reqArgs['job_id'] = self.jobId + (reqArgs['email'], reqArgs['apikey'], reqArgs['url']) = self.pydasParams + data = urllib.urlencode(reqArgs) + request = urllib2.Request(self.proxyurl + "slicerjob/finish?" + data) + response = urllib2.urlopen(request) + print response + diff --git a/library/slicerprocess.py b/library/slicerprocess.py index edc74bc..f3740d8 100755 --- a/library/slicerprocess.py +++ b/library/slicerprocess.py @@ -1,19 +1,20 @@ from twisted.internet import reactor from twisted.internet import protocol -import re -import logging import os -import sys -import pydas -import shutil import json +from pipeline import PipelineStatusEvent, PipelineFactory + class SlicerProcessJobManager(): - def __init__(self, tmpDirRoot, slicerPath): + """This class should implement Pyslicer's own process manager. Pyslicer still uses Slicer's process manager as of now.""" + # Keep the original code of this class as of now + def __init__(self, tmpDirRoot, slicerPath, proxyurl): self.jobs = {} #self.processCount = 0 self.tmpDirRoot = tmpDirRoot self.slicerPath = slicerPath + self.proxyurl = proxyurl + # def getNextJobId(self): # # could be problematic if multithreaded # jobId = self.processCount @@ -23,16 +24,11 @@ def __init__(self, tmpDirRoot, slicerPath): def addJob(self, jobId): self.jobs[str(jobId)] = {} - - def processEvent(self, event): pass #jobEvents = self.jobs[str(event.jobId)] #jobEvents[event.eventId] = event - - - def getStatus(self, jobId=None): print "getStatus", jobId print self.jobs @@ -46,88 +42,15 @@ def getStatus(self, jobId=None): status = "" return status - - - -class SlicerProcessStatusEvent(): - statuseventpattern = 'status&remoteprocessing_job_id=%s&event_id=%s×tamp=%s&event_type=%s' - statuseventmessagepattern = statuseventpattern + '&message=%s' - - def __init__(self, jobId, eventId, timestamp, eventType, message=None): - self.jobId = str(jobId) - self.eventId = str(eventId) - self.timestamp = str(timestamp) - self.eventType = eventType - self.message = message - self.jobstatus_id = None - - def __repr__(self): - if self.message is not None: - string = self.statuseventmessagepattern % (self.jobId, self.eventId, self.timestamp, self.eventType, self.message) - else: - string = self.statuseventpattern % (self.jobId, self.eventId, self.timestamp, self.eventType) - return string - - @staticmethod - def parseEvent(data): - anychargroup = '(.*)' - # first search for pattern with message as it is a superset of messageless pattern - pattern = SlicerProcessStatusEvent.statuseventmessagepattern - regex = pattern % tuple([anychargroup] * pattern.count('%s')) - match = False - m = re.search(regex, data) - message = None - if m is not None: - match = True - #print "Match:", m.groups() - (jobId, eventId, timestamp, eventType, message) = m.groups() - return SlicerProcessStatusEvent(jobId, eventId, timestamp, eventType, message) - else: - pattern = SlicerProcessStatusEvent.statuseventpattern - regex = pattern % tuple([anychargroup] * pattern.count('%s')) - m = re.search(regex, data) - if m is not None: - match = True - #print "Match:", m.groups() - (jobId, eventId, timestamp, eventType) = m.groups() - return SlicerProcessStatusEvent(jobId, eventId, timestamp, eventType) - return None - - -class SlicerPipelineStatusTracker(): - def __init__(self, pipelineName, jobId): - self.clis = {} - self.started = True - self.finished = False - self.jobId = jobId - self.pipelineName = pipelineName - - def reportStatus(self, event): - # could change this to calling url if need be - print(event) - - def start(self): - self.started = True - - def finish(self): - self.finished = True - from __main__ import slicer - slicer.app.exit() - - class SlicerProcess(protocol.ProcessProtocol): - pipeline_scripts = {'segmentation' : 'seg_pipeline.py', 'registration' : 'reg_pipeline.py'} - - - - def __init__(self, jobManager, jobId, pipeline, request_args): - #def __init__(self, jobManager, jobId, requestArgs, jsonargs): + """This class implements a twisted process which runs a python script within + Slicer's Python environment.""" + def __init__(self, jobManager, jobId, pipelineName, requestArgs): self.jobManager = jobManager self.jobId = jobId self.jobManager.addJob(jobId) - #self.requestArgs = requestArgs - self.pipeline = pipeline - self.request_args = request_args + self.pipelineName = pipelineName + self.requestArgs = requestArgs self.data = "" self.err = "" self.events = {} @@ -136,14 +59,12 @@ def connectionMade(self): print str(self.jobId) + "connectionMade!" self.transport.closeStdin() # tell them we're done - - def outReceived(self, data): #print str(self.jobId) + "outReceived! with %d bytes!" % len(data) # look for status events self.data = self.data + data print data - event = SlicerProcessStatusEvent.parseEvent(data) + event = PipelineStatusEvent.parseEvent(data) if event is not None: self.jobManager.processEvent(event) @@ -161,12 +82,10 @@ def outConnectionLost(self): lines = self.data.split('\n') print len(lines) for line in lines: - event = SlicerProcessStatusEvent.parseEvent(line) + event = PipelineStatusEvent.parseEvent(line) if event is not None: self.jobManager.processEvent(event) - - def errConnectionLost(self): print str(self.jobId) + "errConnectionLost! The child closed their stderr." print self.err @@ -178,247 +97,22 @@ def processEnded(self, reason): print str(self.jobId) + "processEnded, status %d" % (reason.value.exitCode,) def run(self): + """Trigger the slicer job by running a python script in Slicer's environment""" xvfbLogfile = os.path.join(self.jobManager.tmpDirRoot, 'xvfb.log') xvfbCmdParts = ['xvfb-run', '-a', '-e', xvfbLogfile] slicerArgs = ['--no-main-window', '--python-script'] - slicerPythonScript = [self.pipeline_scripts[self.pipeline]] - #licerPythonScript = ['seg_pipeline.py'] - import json - json_args = json.dumps(self.request_args) - print json_args - print [json_args] - print [str(self.jobId), self.jobManager.tmpDirRoot, json_args] - slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + slicerPythonScript + [str(self.jobId), self.jobManager.tmpDirRoot, json_args]#['slicerjob'] + # Get name of the python script for the pipeline + pipelinefactory = PipelineFactory() + slicerPythonScript = pipelinefactory.getSlicerScript(self.pipelineName) + # Input parameters from the HTTP request + jsonArgs = json.dumps(self.requestArgs) + print [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs] + # Slicer job in command line + slicerCmdParts = [self.jobManager.slicerPath] + slicerArgs + \ + [slicerPythonScript] + [str(self.jobId), self.jobManager.tmpDirRoot, jsonArgs] cmd = xvfbCmdParts + slicerCmdParts print str(self.jobId) + " run: " + str(cmd) print ">>>>>>>>>>>>>>>SlicerProcess running:",str(cmd) - reactor.spawnProcess(self, 'xvfb-run', cmd, {}, usePTY=True) - - - - - -class SlicerPipeline(): - - event_pipelinestart = "PipelineStart" - event_downloadinput = "DownloadInput" - event_process = "Process" - event_uploadoutput = "UploadOutput" - event_pipelineend = "PipelineEnd" - event_exception = "Exception" - - midasstatus_started = 1 - midasstatus_done = 2 - midasstatus_exception = 3 - - def __init__(self, pipelineName, jobId, pydasParams, tmpDirRoot): - self.pipelineName = pipelineName - self.jobId = jobId - self.pydasParams = pydasParams - self.tmpDirRoot = tmpDirRoot - self.tracker = SlicerPipelineStatusTracker(pipelineName, jobId) - self.eventIdCounter = 0 - #TODO something better with logging - logging.basicConfig(level=logging.WARNING) - self.log = logging.getLogger('example') - self.register_events() - - def create_event(self, event_type, message=None): - event_id = self.eventIdCounter - self.eventIdCounter = self.eventIdCounter + 1 - timestamp = 0 - event = SlicerProcessStatusEvent(self.jobId, event_id, timestamp, event_type, message) - return event - - def create_process_event(self, message): - return self.create_event(self.event_process, message) - - def define_events(self): - self.events_map = {} - events = [self.create_event(event_type) for event_type in [self.event_pipelinestart, self.event_downloadinput]] - events = events + self.define_process_events() - events = events + [self.create_event(event_type) for event_type in [self.event_uploadoutput, self.event_pipelineend]] - for event in events: - self.events_map[event.eventId] = event - # then when it is their time to nofify, call notify passing in jobstatu_id and timestamp - # need an imple method for subclasses to list their process events - # maybe a map of event types to event, then a submap for process events? - # somehow i need to keep up with all these events here - # and maybe there is no reason to print them in the tracker anymore - - def register_events(self): - # get all the events, register them with the midas server - self.define_events() - events = self.events_map.values() - method = 'midas.pyslicer.add.jobstatuses' - parameters = {} - json_events = json.dumps([str(event) for event in events]) - print json_events - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - parameters['token'] = pydas.session.token - parameters['events'] = json_events - event_id_to_jobstatus_id = pydas.session.communicator.request(method, parameters) - for (event_id, jobstatus_id) in event_id_to_jobstatus_id.items(): - event = self.events_map[event_id] - event.jobstatus_id = jobstatus_id - - - - def define_process_events(self): - # should be overwritten in the subclass - return [] - - def createTmpDir(self): - self.tmpdirpath = ('%s_%s_tmpdir') % (self.jobId, self.pipelineName) - # clear it out if it already exists - if(os.path.exists(self.tmpdirpath)): - self.removeTmpDir() - os.mkdir(self.tmpdirpath) - # create a data dir - self.datadir = os.path.join(self.tmpdirpath, 'data') - os.mkdir(self.datadir) - # create an out dir - self.outdir = os.path.join(self.tmpdirpath, 'out') - os.mkdir(self.outdir) - - def removeTmpDir(self): - shutil.rmtree(self.tmpdirpath) - - - def downloadInput(self): - self.reportStatus(self.event_downloadinput) - self.downloadInputImpl() - - def downloadInputImpl(self): - pass - - def downloadItem(self, itemId): - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - - pydas.api._download_item(itemId, self.datadir) - # unzip any zipped files - for filename in os.listdir(self.datadir): - if filename.endswith('.zip'): - filepath = os.path.join(self.datadir, filename) - zip = zipfile.ZipFile(filepath) - zip.extractall(self.datadir) - zip.close() - # return the path to the name of the item - item = pydas.session.communicator.item_get(pydas.session.token, itemId) - return os.path.join(self.datadir, item['name']) - - - def uploadItem(self, itemName, outputFolderId, out_file=None, item_description=None): - # read everything in the outdir and upload it as a single item - # create a new item - # need a folder id - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - if item_description is not None: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId, description=item_description) - else: - item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId) - item_id = item['item_id'] - if out_file is not None: - # only upload this one file - upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, out_file) - filepath=os.path.join(self.outdir, out_file) - pydas.session.communicator.perform_upload(upload_token, out_file, itemid=item_id, filepath=filepath) - else: - for filename in os.listdir(self.outdir): - upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, filename) - filepath=os.path.join(self.outdir, filename) - pydas.session.communicator.perform_upload(upload_token, filename, itemid=item_id, filepath=filepath) - # set the output item as an output for the job - method = 'midas.pyslicer.add.job.output.item' - parameters = {} - parameters['token'] = pydas.session.token - parameters['job_id'] = self.jobId - parameters['item_id'] = item_id - print parameters - pydas.session.communicator.request(method, parameters) - return item_id - - - def process(self): - self.processImpl() - - def processImpl(self): - pass - - def reportStatus(self, eventType, message=None): - # find the event - match = None - for event in self.events_map.values(): - if event.eventType == eventType and event.message == message: - match = event - if match is None: - print 'reportStatus', eventType, message - print "NO MATCH" - exit(1) - import time - timestamp = time.time() - match.timestamp = timestamp - method = 'midas.pyslicer.notify.jobstatus' - parameters = {} - parameters['token'] = pydas.session.token - parameters['jobstatus_id'] = match.jobstatus_id - parameters['notify_date'] = timestamp - pydas.session.communicator.request(method, parameters) - - - def reportProcessStatus(self, message=None): - self.reportStatus(self.event_process, message) - - def uploadOutput(self): - self.reportStatus(self.event_uploadoutput) - self.uploadOutputImpl() - - def uploadOutputImpl(self): - pass - - - def reportMidasStatus(self, status, condition=None): - # TODO add these methods to pydas - # TODO add condition to api call - (email, apiKey, url) = self.pydasParams - pydas.login(email=email, api_key=apiKey, url=url) - method = 'midas.pyslicer.update.job' - parameters = {} - parameters['token'] = pydas.session.token - parameters['job_id'] = self.jobId - parameters['status'] = status - if condition is not None: parameters['condition'] = condition - print parameters - pydas.session.communicator.request(method, parameters) - - - def execute(self): - try: - self.tracker.start() - # send pydas pipeline started - self.reportMidasStatus(self.midasstatus_started) - self.reportStatus(self.event_pipelinestart) - self.createTmpDir() - self.downloadInput() - self.process() - self.uploadOutput() - self.removeTmpDir() - self.reportStatus(self.event_pipelineend) - # send pydas pipeline finished - self.reportMidasStatus(self.midasstatus_done) - self.tracker.finish() - except Exception as exception: - # TODO where to do exceptions status and conditions - # TODO send this through status tracker - self.log.exception(exception) - self.tracker.reportStatus(self.event_exception) - import traceback - etype, value, tb = sys.exc_info() - emsg = repr(traceback.format_exception(etype, value, tb)) - self.reportMidasStatus(self.midasstatus_exception, emsg) - exit(1) - + # TODO: Ensure that the proper xvfb-run is invoked + reactor.spawnProcess(self, 'xvfb-run', cmd, env=os.environ, usePTY=True) diff --git a/library/twserver.cfg b/library/twserver.cfg index 727e368..0885ce3 100644 --- a/library/twserver.cfg +++ b/library/twserver.cfg @@ -1,3 +1,4 @@ slicer_path=/Applications/Slicer.app/Contents/MacOS/Slicer +proxy_url=http://localhost:8880/ diff --git a/library/twserver.py b/library/twserver.py index 2ee12e7..e346a57 100644 --- a/library/twserver.py +++ b/library/twserver.py @@ -1,69 +1,144 @@ -from twisted.internet.task import deferLater from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from twisted.internet import reactor +from twisted.internet import reactor, defer from twisted.web import server, resource - - - -#TODO clean imports +import os from slicerprocess import SlicerProcess, SlicerProcessJobManager +from pipeline import PipelineFactory class ServerRoot(Resource): + """This class implements a twisted Resource Object - server root""" def getChild(self, name, request): if name == '': return self return Resource.getChild(self, name, request) def render_GET(self, request): - return "Server Root, the following paths have service: slicerjob, status" + return "Server Root, the following paths have service: slicerjob" class Slicerjob(Resource): + """This class implements a twisted Resource Object - /slicerjob URL segment""" def getChild(self, name, request): if name == '': return self return Resource.getChild(self, name, request) def render_GET(self, request): - return "slicerjob root, the following paths have service: init, status" + return "slicerjob root, the following paths have service: init, finish, reportstatus" -class SlicerjobStatus(Resource): +class SlicerjobReportStatus(Resource): + """This class implements a twisted Resource Object - /reportstatus URL segment""" isLeaf = True - def __init__(self, jobManager): - self.jobManager = jobManager - - def render_GET(self, request): - if 'job_id' in request.args: - return str(self.jobManager.getStatus(jobId=request.args['job_id'][0])) + + def _report_status(self, request): + """Callback function to report pipeline status event to the Midas server""" + print request.args + print "SlicerjobReportStatus" + if 'pipeline' in request.args and 'job_id' in request.args: + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + tmpDir = os.getcwd() + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + pipeline.get_events() + if 'message'in request.args: + pipeline.reportStatus(request.args['event_type'][0], request.args['message'][0]) + request.write("\nreport job status:" + str(request.args['event_type'][0]) + str(request.args['message'][0])) + else: + pipeline.reportStatus(request.args['event_type'][0]) + request.write("\nreport job status:" + str(request.args['event_type'][0])) + request.finish() else: - return '' -# else: -# return str(self.jobManager.getStatus()) - + request.finish() + def render_GET(self, request): + """Handle report job status request asynchronously """ + reactor.callLater(0, self._report_status, request) + return NOT_DONE_YET class SlicerjobInit(Resource): + """This class implements a twisted Resource Object - /init URL segment""" isLeaf = True def __init__(self, jobManager): self.jobManager = jobManager + + def _download_process(self, request): + """Callback function to download input file(s) from the Midas server, + and then start the slicer job""" + print request.args + print "SlicerjobInit download" + request.write('init job') + if 'pipeline' in request.args and 'job_id' in request.args: + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER starting SlicerProcess" + tmpDir = os.getcwd() + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + print pydasParams + request.write("\nstarted job " + str(jobId)) + request.write("\nstarted downloading item(s)") + # Call pipeline's executeDownload function to do the real download + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + (self.dataDir, self.outDir, self.inputfiles) = pipeline.executeDownload() + request.write("\nfinished downloading item(s)") + request.args['proxyurl'] = [self.jobManager.proxyurl] + request.args['data_dir'] = [self.dataDir] + request.args['out_dir'] = [self.outDir] + # Add the input files into the parameters + for k, v in self.inputfiles.items(): + request.args[k] = [v] + request.write("\nstarted processing item(s) ") + # Create a new process for the Slicer job asynchronously + slicerJob = SlicerProcess(self.jobManager, jobId, pipelineName, request.args) + d = defer.Deferred() + reactor.callLater(0, d.callback, None) + d.addCallback(lambda ignored: slicerJob.run()) + request.finish() + else: + request.finish() def render_GET(self, request): + """Handle job init request asynchronously""" + reactor.callLater(0, self._download_process, request) + return NOT_DONE_YET + + +class SlicerjobFinish(Resource): + """This class implements a twisted Resource Object - /finish URL segment""" + isLeaf = True + + def _upload(self, request): + """Callback function to upload output file(s) to the Midas server, + and then start the slicer job""" print request.args - print "SlicerjobInit" - response = 'job:' + print "SlicerjobFinish" + request.write('Upload output') if 'pipeline' in request.args and 'job_id' in request.args: - print "YES" - job_id = request.args['job_id'][0] - pipeline = request.args['pipeline'][0] - print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER starting SlicerProcess" - slicerJob = SlicerProcess(jobManager, job_id, pipeline, request.args) - slicerJob.run() - response = "started job " + str(job_id) - return response - + jobId = request.args['job_id'][0] + pipelineName = request.args['pipeline'][0] + print ">>>>>>>>>>>>>>>>>>>>>>TWSERVER finishing SlicerProcess" + tmpDir = os.getcwd() + pydasParams = (request.args['email'][0], request.args['apikey'][0], request.args['url'][0]) + response = "\nstarted uploading output item" + # Call pipeline's executeUpload function to do the real upload + pipelinefactory = PipelineFactory() + pipeline = pipelinefactory.getPipeline(pipelineName, jobId, pydasParams, tmpDir, request.args) + pipeline.executeUpload() + request.write("\nfinished uploading output item") + request.write("\nfinished job " + str(jobId)) + request.finish() + else: + request.finish() + + def render_GET(self, request): + """Handle job ennding request asynchronously""" + reactor.callLater(0, self._upload, request) + return NOT_DONE_YET # check status like this: #http://localhost:8880/slicerjob/status/?jobid=122 @@ -73,26 +148,30 @@ def render_GET(self, request): if __name__ == '__main__': # read the config file for slicer_path - config_file = open('twserver.cfg') config = {} - for line in config_file: - line = line.strip() - if line is not None and line != '': - cols = line.split('=') - config[cols[0]] = cols[1] - config_file.close() - import os + with open('twserver.cfg') as config_file: + for line in config_file.readlines(): + line = line.strip() + if line is not None and line != '': + cols = line.split('=') + config[cols[0]] = cols[1] if 'slicer_path' not in config or not os.path.isfile(config['slicer_path']): print "You must specify the path to the Slicer exe as slicer_path in twserver.cfg" exit(1) - # set current dir as temp working dir - jobManager = SlicerProcessJobManager(os.getcwd(), config['slicer_path']) + if 'proxy_url' not in config: + print "You must specify the Slicer Proxy Server URL" + exit(1) + # Set current directory as temporary working directory + jobManager = SlicerProcessJobManager(os.getcwd(), config['slicer_path'], config['proxy_url']) root = ServerRoot() slicerjobRoot = Slicerjob() slicerjobInit = SlicerjobInit(jobManager) - slicerjobStatus = SlicerjobStatus(jobManager) + slicerjobFinish = SlicerjobFinish() + slicerjobReportStatus = SlicerjobReportStatus() root.putChild('slicerjob', slicerjobRoot) slicerjobRoot.putChild('init', slicerjobInit) - slicerjobRoot.putChild('status', slicerjobStatus) + slicerjobRoot.putChild('finish', slicerjobFinish) + slicerjobRoot.putChild('reportstatus', slicerjobReportStatus) + # Start Twisted server reactor.listenTCP(8880, server.Site(root)) reactor.run() diff --git a/models/base/UserModelBase.php b/models/base/UserModelBase.php new file mode 100644 index 0000000..71bbf9a --- /dev/null +++ b/models/base/UserModelBase.php @@ -0,0 +1,155 @@ +_name = 'pyslicer_user'; + $this->_daoName = 'UserDao'; + $this->_key = 'pyslicer_user_id'; + + $this->_mainData = array( + 'pyslicer_user_id' => array('type' => MIDAS_DATA), + 'user_id' => array('type' => MIDAS_ONE_TO_ONE, 'model' => 'User', 'parent_column' => 'user_id', 'child_column' => 'user_id'), + 'pipeline' => array('type' => MIDAS_DATA), + 'root_folder_id' => array('type' => MIDAS_DATA), + 'data_folder_id' => array('type' => MIDAS_DATA), + 'preset_folder_id' => array('type' => MIDAS_DATA), + 'output_folder_id' => array('type' => MIDAS_DATA) + ); + $this->initialize(); // required + } // end __construct() + + public abstract function deleteByUser($userDao, $pipeline); + public abstract function getByUser($userDao, $pipeline); + + /** + * Helper function to create a child folder or use the existing one if it has the same name. + * @param type $userDao + * @param type $parentId parent folder Id + * @param type $name name of the child folder to be created + * @param type $description description of the child folder to be created + * @return id of newly created folder or the exsisting folder with the same name + * @throws Exception + */ + private function _createChildFolder($userDao, $parentId, $name, $description='') + { + if($userDao == false) + { + throw new Exception('Cannot create folder anonymously', MIDAS_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + $record = false; + $uuid = ''; + if($parentId == -1) //top level user folder being created + { + $new_folder = $folderModel->createFolder($name, $description, $userDao->getFolderId(), $uuid); + } + else //child of existing folder + { + $folder = $folderModel->load($parentId); + if(($existing = $folderModel->getFolderExists($name, $folder))) + { + $returnArray = $existing->toArray(); + return $returnArray['folder_id']; + } + $new_folder = $folderModel->createFolder($name, $description, $folder, $uuid); + if($new_folder === false) + { + throw new Exception('Create folder failed', MIDAS_INTERNAL_ERROR); + } + $policyGroup = $folder->getFolderpolicygroup(); + $policyUser = $folder->getFolderpolicyuser(); + $folderpolicygroupModel = MidasLoader::loadModel('Folderpolicygroup'); + $folderpolicyuserModel = MidasLoader::loadModel('Folderpolicyuser'); + foreach($policyGroup as $policy) + { + $folderpolicygroupModel->createPolicy($policy->getGroup(), $new_folder, $policy->getPolicy()); + } + foreach($policyUser as $policy) + { + $folderpolicyuserModel->createPolicy($policy->getUser(), $new_folder, $policy->getPolicy()); + } + if(!$folderModel->policyCheck($new_folder, $userDao, MIDAS_POLICY_ADMIN)) + { + $folderpolicyuserModel->createPolicy($userDao, $new_folder, MIDAS_POLICY_ADMIN); + } + } + // reload folder to get up to date privacy status + $new_folder = $folderModel->load($new_folder->getFolderId()); + $returnArray = $new_folder->toArray(); + return $returnArray['folder_id']; + } + + /** + * Create folders for a given pipeline or return existing folders if rootFolderId are same. + * @param userDao The core user + * @param rootFolderId The id of the root folder. + * @return The pyslicer user dao that was created + */ + public function createFolders($userDao, $rootFolderId, $pipeline=MIDAS_PYSLICER_PDF_SEGMENTATION_PIPELINE) + { + if(!$userDao) + { + throw new Exception('Anonymous users may not create folders.', MIDAS_PYSLICER_INVALID_POLICY); + } + $folderModel = MidasLoader::loadModel('Folder'); + // Check input root folder + $rootFolderDao = $folderModel->load($rootFolderId); + if($rootFolderDao === false) + { + throw new Zend_Exception('This folder does not exist.', MIDAS_PYSLICER_INVALID_PARAMETER); + } + if(!$folderModel->policyCheck($rootFolderDao, $userDao, MIDAS_POLICY_WRITE)) + { + throw new Zend_Exception('Write access on this folder required.', MIDAS_PYSLICER_INVALID_POLICY); + } + + $existingPyslicerUserDao = $this->getByUser($userDao, $pipeline); + if ($existingPyslicerUserDao) { + // return existing one if rootFolderId are same + if ($existingPyslicerUserDao->getRootFolderId() == $rootFolderId) + { + return $existingPyslicerUserDao; + } + else + { + // every user can only have at most one set of default folders + $this->deleteByUser($userDao, $pipeline); + } + } + $pyslicerUserDao = MidasLoader::newDao('UserDao', 'pyslicer'); + $pyslicerUserDao->setUserId($userDao->getKey()); + $pyslicerUserDao->setPipeline($pipeline); + $pyslicerUserDao->setRootFolderId($rootFolderId); + $dataFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'data', 'input data directory'); + $pyslicerUserDao->setDataFolderId($dataFolderId); + $presetFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'presets', 'parameter preset directory'); + $pyslicerUserDao->setPresetFolderId($presetFolderId); + $outputFolderId = $this->_createChildFolder($userDao, $rootFolderId, 'output', 'output results directory'); + $pyslicerUserDao->setOutputFolderId($outputFolderId); + $this->save($pyslicerUserDao); + return $pyslicerUserDao; + } + +} // end class Pyslicer_UserModelBase diff --git a/models/dao/UserDao.php b/models/dao/UserDao.php new file mode 100644 index 0000000..06334b9 --- /dev/null +++ b/models/dao/UserDao.php @@ -0,0 +1,28 @@ +database->getDB()->delete('pyslicer_user', array( + 'user_id = ?' => $userDao->getKey(), + 'pipeline = ?' => $pipeline + )); + } + + /** + * Returns the pyslicer_user corresponding to the core user, or false if the + * user is not an pyslicer_user. + * @param userDao The core user + * @param pipeline Pipeline name. + */ + public function getByUser($userDao, $pipeline) + { + $sql = $this->database->select() + ->where('user_id = ?', $userDao->getKey()) + ->where('pipeline = ?', $pipeline); + $row = $this->database->fetchRow($sql); + $dao = $this->initDao('User', $row, 'pyslicer'); + if($dao) + { + return $dao; + } + else + { + return false; + } + } +} diff --git a/public/css/user/user.index.css b/public/css/user/user.index.css new file mode 100644 index 0000000..bf3d452 --- /dev/null +++ b/public/css/user/user.index.css @@ -0,0 +1,50 @@ +div.sectionHeader { + padding-top: 3px; + margin-bottom: 5px; + font-size: 16px; + color: #666; + display: inline; + float: left; +} + +a.createPdfSegmenterFolders { + padding-top: 3px; + margin-left: 10px; + font-size: 14px; + float: left; +} + +div.sectionContainer { + margin-bottom: 30px; + padding-top: 6px; +} + +.pyslicerUserTable th { + background-color: #dde4e4; + padding: 3px 10px 3px 5px; + font-weight: normal; +} + +.pyslicerUserTable td { + padding: 3px; + background-color: #fbfbfb; +} + +.pyslicerUserTable { + width: 100%; +} + +.noFoldersMessage { + color: #888888; + font-style: italic; + padding-left: 5px; +} + +div.bottomButtons { + float: right; + margin-top: 25px; +} + +div.bottomButtons input { + margin-left: 10px; +} diff --git a/public/js/lib/pvw.pdfSegmenter.js b/public/js/lib/pvw.pdfSegmenter.js new file mode 100644 index 0000000..077d7b3 --- /dev/null +++ b/public/js/lib/pvw.pdfSegmenter.js @@ -0,0 +1,112 @@ +var midas = midas || {}; +midas.pvw = midas.pvw || {}; + +/** + * Activate paint mode as soon as we finished initialization + */ +midas.pvw.postInitCallback = function () { + midas.pvw.paintMode(); + midas.pvw.pdfSegmenterPresets(); +}; + +/** + * Get the list of pdf segmenter preset json files + */ +midas.pvw.getPdfSegmenterPresets = function (id) { + ajaxWebApi.ajax({ + method: 'midas.folder.children', + args: 'id=' + id, + success: function (results) { + $("#selectedPresetFile").empty(); + $("#selectedPresetFile").append(''); + for (var key in results.data.items) { + var item = results.data.items[key]; + if (/\.json$/.test(item.name)) { // only care about json files + var optionHtml = ''; + $("#selectedPresetFile").append(optionHtml); + } + } + }, + error: function (XMLHttpRequest, textStatus, errorThrown) { + midas.createNotice(XMLHttpRequest.message, '4000', 'error'); + } + }); +} + +/* + * Drop down list to select preset parameters (json file) for PDF segmenter + */ +midas.pvw.pdfSegmenterPresets = function () { + midas.pvw.pdfSegmenterSelectedPresetItemId = ''; + midas.pvw.pdfSegmenterPresetFolderId = typeof(json.pvw.pdfSegmenterPresetFolderId) === 'undefined' ? '' : json.pvw.pdfSegmenterPresetFolderId; + midas.pvw.pdfSegmenterOutputFolderId = typeof(json.pvw.pdfSegmenterOutputFolderId) === 'undefined' ? '' : json.pvw.pdfSegmenterOutputFolderId; + + var html = '
Root Folder | Data Folder | Preset Folder | Output Folder | |
---|---|---|---|---|
' . $rootFolderId . ''. + ' | ' . $dataFolderId . ''. + ' | ' . $presetFolderId . ''. + ' | ' . $outputFolderId . ''. + ' | Unset |