[2] | 1 | from __init__ import ToolAction |
---|
| 2 | from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper |
---|
| 3 | from galaxy.util.odict import odict |
---|
| 4 | |
---|
| 5 | import logging |
---|
| 6 | log = logging.getLogger( __name__ ) |
---|
| 7 | |
---|
| 8 | class SetMetadataToolAction( ToolAction ): |
---|
| 9 | """Tool action used for setting external metadata on an existing dataset""" |
---|
| 10 | |
---|
| 11 | def execute( self, tool, trans, incoming = {}, set_output_hid = False, overwrite = True ): |
---|
| 12 | for name, value in incoming.iteritems(): |
---|
| 13 | if isinstance( value, trans.app.model.HistoryDatasetAssociation ): |
---|
| 14 | dataset = value |
---|
| 15 | dataset_name = name |
---|
| 16 | break |
---|
| 17 | else: |
---|
| 18 | raise Exception( 'The dataset to set metadata on could not be determined.' ) |
---|
| 19 | |
---|
| 20 | # Create the job object |
---|
| 21 | job = trans.app.model.Job() |
---|
| 22 | job.session_id = trans.get_galaxy_session().id |
---|
| 23 | job.history_id = trans.history.id |
---|
| 24 | job.tool_id = tool.id |
---|
| 25 | start_job_state = job.state #should be job.states.NEW |
---|
| 26 | try: |
---|
| 27 | # For backward compatibility, some tools may not have versions yet. |
---|
| 28 | job.tool_version = tool.version |
---|
| 29 | except: |
---|
| 30 | job.tool_version = "1.0.1" |
---|
| 31 | job.state = job.states.WAITING #we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters |
---|
| 32 | trans.sa_session.add( job ) |
---|
| 33 | trans.sa_session.flush() #ensure job.id is available |
---|
| 34 | |
---|
| 35 | #add parameters to job_parameter table |
---|
| 36 | # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? |
---|
| 37 | incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state |
---|
| 38 | external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) |
---|
| 39 | cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, |
---|
| 40 | trans.sa_session, |
---|
| 41 | exec_dir = None, |
---|
| 42 | tmp_dir = trans.app.config.new_file_path, |
---|
| 43 | dataset_files_path = trans.app.model.Dataset.file_path, |
---|
| 44 | output_fnames = None, |
---|
| 45 | config_root = None, |
---|
| 46 | datatypes_config = None, |
---|
| 47 | job_metadata = None, |
---|
| 48 | kwds = { 'overwrite' : overwrite } ) |
---|
| 49 | incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line |
---|
| 50 | for name, value in tool.params_to_strings( incoming, trans.app ).iteritems(): |
---|
| 51 | job.add_parameter( name, value ) |
---|
| 52 | #add the dataset to job_to_input_dataset table |
---|
| 53 | job.add_input_dataset( dataset_name, dataset ) |
---|
| 54 | #Need a special state here to show that metadata is being set and also allow the job to run |
---|
| 55 | # i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state |
---|
| 56 | dataset._state = dataset.states.SETTING_METADATA |
---|
| 57 | job.state = start_job_state #job inputs have been configured, restore initial job state |
---|
| 58 | trans.sa_session.flush() |
---|
| 59 | |
---|
| 60 | # Queue the job for execution |
---|
| 61 | trans.app.job_queue.put( job.id, tool ) |
---|
| 62 | trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id ) |
---|
| 63 | |
---|
| 64 | #clear e.g. converted files |
---|
| 65 | dataset.datatype.before_setting_metadata( dataset ) |
---|
| 66 | |
---|
| 67 | return job, odict() |
---|