| 1 | """ | 
|---|
| 2 | Upload class | 
|---|
| 3 | """ | 
|---|
| 4 |  | 
|---|
| 5 | from galaxy.web.base.controller import * | 
|---|
| 6 | from galaxy.util.bunch import Bunch | 
|---|
| 7 | from galaxy.tools import DefaultToolState | 
|---|
| 8 | from galaxy.tools.parameters.basic import UnvalidatedValue | 
|---|
| 9 | from galaxy.tools.actions import upload_common | 
|---|
| 10 |  | 
|---|
| 11 | import logging | 
|---|
| 12 | log = logging.getLogger( __name__ ) | 
|---|
| 13 |  | 
|---|
| 14 | class AddFrameData: | 
|---|
| 15 | def __init__( self ): | 
|---|
| 16 | self.wiki_url = None | 
|---|
| 17 | self.debug = None | 
|---|
| 18 | self.from_noframe = None | 
|---|
| 19 |  | 
|---|
| 20 | class ToolRunner( BaseController ): | 
|---|
| 21 |  | 
|---|
| 22 | #Hack to get biomart to work, ideally, we could pass tool_id to biomart and receive it back | 
|---|
| 23 | @web.expose | 
|---|
| 24 | def biomart(self, trans, tool_id='biomart', **kwd): | 
|---|
| 25 | """Catches the tool id and redirects as needed""" | 
|---|
| 26 | return self.index(trans, tool_id=tool_id, **kwd) | 
|---|
| 27 |  | 
|---|
| 28 | #test to get hapmap to work, ideally, we could pass tool_id to hapmap biomart and receive it back | 
|---|
| 29 | @web.expose | 
|---|
| 30 | def hapmapmart(self, trans, tool_id='hapmapmart', **kwd): | 
|---|
| 31 | """Catches the tool id and redirects as needed""" | 
|---|
| 32 | return self.index(trans, tool_id=tool_id, **kwd) | 
|---|
| 33 |  | 
|---|
| 34 | @web.expose | 
|---|
| 35 | def default(self, trans, tool_id=None, **kwd): | 
|---|
| 36 | """Catches the tool id and redirects as needed""" | 
|---|
| 37 | return self.index(trans, tool_id=tool_id, **kwd) | 
|---|
| 38 |  | 
|---|
| 39 | @web.expose | 
|---|
| 40 | def index(self, trans, tool_id=None, from_noframe=None, **kwd): | 
|---|
| 41 | # No tool id passed, redirect to main page | 
|---|
| 42 | if tool_id is None: | 
|---|
| 43 | return trans.response.send_redirect( url_for( "/static/welcome.html" ) ) | 
|---|
| 44 | # Load the tool | 
|---|
| 45 | toolbox = self.get_toolbox() | 
|---|
| 46 | #Backwards compatibility for datasource tools that have default tool_id configured, but which are now using only GALAXY_URL | 
|---|
| 47 | if isinstance( tool_id, list ): | 
|---|
| 48 | tool_ids = tool_id | 
|---|
| 49 | else: | 
|---|
| 50 | tool_ids = [ tool_id ] | 
|---|
| 51 | for tool_id in tool_ids: | 
|---|
| 52 | tool = toolbox.tools_by_id.get( tool_id, None ) | 
|---|
| 53 | if tool: | 
|---|
| 54 | break | 
|---|
| 55 | # No tool matching the tool id, display an error (shouldn't happen) | 
|---|
| 56 | if not tool: | 
|---|
| 57 | tool_id = ','.join( tool_ids ) | 
|---|
| 58 | log.error( "index called with tool id '%s' but no such tool exists", tool_id ) | 
|---|
| 59 | trans.log_event( "Tool id '%s' does not exist" % tool_id ) | 
|---|
| 60 | return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd) | 
|---|
| 61 | params = util.Params( kwd, sanitize = False ) #Sanitize parameters when substituting into command line via input wrappers | 
|---|
| 62 | #do param translation here, used by datasource tools | 
|---|
| 63 | if tool.input_translator: | 
|---|
| 64 | tool.input_translator.translate( params ) | 
|---|
| 65 | # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ), | 
|---|
| 66 | # so make sure to create a new history if we've never had one before. | 
|---|
| 67 | history = trans.get_history( create=True ) | 
|---|
| 68 | template, vars = tool.handle_input( trans, params.__dict__ ) | 
|---|
| 69 | if len(params) > 0: | 
|---|
| 70 | trans.log_event( "Tool params: %s" % (str(params)), tool_id=tool_id ) | 
|---|
| 71 | add_frame = AddFrameData() | 
|---|
| 72 | add_frame.debug = trans.debug | 
|---|
| 73 | if from_noframe is not None: | 
|---|
| 74 | add_frame.wiki_url = trans.app.config.wiki_url | 
|---|
| 75 | add_frame.from_noframe = True | 
|---|
| 76 | return trans.fill_template( template, history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars ) | 
|---|
| 77 |  | 
|---|
| 78 | @web.expose | 
|---|
| 79 | def rerun( self, trans, id=None, from_noframe=None, **kwd ): | 
|---|
| 80 | """ | 
|---|
| 81 | Given a HistoryDatasetAssociation id, find the job and that created | 
|---|
| 82 | the dataset, extract the parameters, and display the appropriate tool | 
|---|
| 83 | form with parameters already filled in. | 
|---|
| 84 | """ | 
|---|
| 85 | if not id: | 
|---|
| 86 | error( "'id' parameter is required" ); | 
|---|
| 87 | try: | 
|---|
| 88 | id = int( id ) | 
|---|
| 89 | except: | 
|---|
| 90 | error( "Invalid value for 'id' parameter" ) | 
|---|
| 91 | # Get the dataset object | 
|---|
| 92 | data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id ) | 
|---|
| 93 | # Get the associated job, if any. If this hda was copied from another, | 
|---|
| 94 | # we need to find the job that created the origial hda | 
|---|
| 95 | job_hda = data | 
|---|
| 96 | while job_hda.copied_from_history_dataset_association:#should this check library datasets as well? | 
|---|
| 97 | job_hda = job_hda.copied_from_history_dataset_association | 
|---|
| 98 | if not job_hda.creating_job_associations: | 
|---|
| 99 | error( "Could not find the job for this dataset" ) | 
|---|
| 100 | # Get the job object | 
|---|
| 101 | job = None | 
|---|
| 102 | for assoc in job_hda.creating_job_associations: | 
|---|
| 103 | job = assoc.job | 
|---|
| 104 | break | 
|---|
| 105 | if not job: | 
|---|
| 106 | raise Exception("Failed to get job information for dataset hid %d" % data.hid) | 
|---|
| 107 | # Get the tool object | 
|---|
| 108 | tool_id = job.tool_id | 
|---|
| 109 | try: | 
|---|
| 110 | # Load the tool | 
|---|
| 111 | toolbox = self.get_toolbox() | 
|---|
| 112 | tool = toolbox.tools_by_id.get( tool_id, None ) | 
|---|
| 113 | assert tool is not None, 'Requested tool has not been loaded.' | 
|---|
| 114 | except: | 
|---|
| 115 | #this is expected, so not an exception | 
|---|
| 116 | error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id ) | 
|---|
| 117 | # Can't rerun upload, external data sources, et cetera. Workflow | 
|---|
| 118 | # compatible will proxy this for now | 
|---|
| 119 | if not tool.is_workflow_compatible: | 
|---|
| 120 | error( "The '%s' tool does not currently support rerunning." % tool.name ) | 
|---|
| 121 | # Get the job's parameters | 
|---|
| 122 | try: | 
|---|
| 123 | params_objects = job.get_param_values( trans.app ) | 
|---|
| 124 | except: | 
|---|
| 125 | raise Exception( "Failed to get paramemeters for dataset id %d " % data.id ) | 
|---|
| 126 | # Unpack unvalidated values to strings, they'll be validated when the | 
|---|
| 127 | # form is submitted (this happens when re-running a job that was | 
|---|
| 128 | # initially run by a workflow) | 
|---|
| 129 | validated_params = {} | 
|---|
| 130 | for name, value in params_objects.items(): | 
|---|
| 131 | if isinstance( value, UnvalidatedValue ): | 
|---|
| 132 | validated_params [ str(name) ] = str(value) | 
|---|
| 133 | else: | 
|---|
| 134 | validated_params [ str(name) ] = value | 
|---|
| 135 | params_objects = validated_params | 
|---|
| 136 | # Need to remap dataset parameters. Job parameters point to original | 
|---|
| 137 | # dataset used; parameter should be the analygous dataset in the | 
|---|
| 138 | # current history. | 
|---|
| 139 | history = trans.get_history() | 
|---|
| 140 | hda_source_dict = {} # Mapping from HDA in history to source HDAs. | 
|---|
| 141 | for hda in history.datasets: | 
|---|
| 142 | source_hda = hda.copied_from_history_dataset_association | 
|---|
| 143 | while source_hda:#should this check library datasets as well? | 
|---|
| 144 | hda_source_dict[ source_hda ] = hda | 
|---|
| 145 | source_hda = source_hda.copied_from_history_dataset_association | 
|---|
| 146 | for name, value in validated_params.items(): | 
|---|
| 147 | if isinstance( value, trans.app.model.HistoryDatasetAssociation ): | 
|---|
| 148 | if value not in history.datasets: | 
|---|
| 149 | validated_params[ name ] = hda_source_dict[ value ] | 
|---|
| 150 | # Create a fake tool_state for the tool, with the parameters values | 
|---|
| 151 | state = tool.new_state( trans ) | 
|---|
| 152 | state.inputs = params_objects | 
|---|
| 153 | tool_state_string = util.object_to_string(state.encode(tool, trans.app)) | 
|---|
| 154 | # Setup context for template | 
|---|
| 155 | vars = dict( tool_state=state, errors = {} ) | 
|---|
| 156 | # Is the "add frame" stuff neccesary here? | 
|---|
| 157 | add_frame = AddFrameData() | 
|---|
| 158 | add_frame.debug = trans.debug | 
|---|
| 159 | if from_noframe is not None: | 
|---|
| 160 | add_frame.wiki_url = trans.app.config.wiki_url | 
|---|
| 161 | add_frame.from_noframe = True | 
|---|
| 162 | return trans.fill_template( "tool_form.mako", history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars ) | 
|---|
| 163 | @web.expose | 
|---|
| 164 | def redirect( self, trans, redirect_url=None, **kwd ): | 
|---|
| 165 | if not redirect_url: | 
|---|
| 166 | return trans.show_error_message( "Required URL for redirection missing" ) | 
|---|
| 167 | trans.log_event( "Redirecting to: %s" % redirect_url ) | 
|---|
| 168 | return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url ) | 
|---|
| 169 | @web.json | 
|---|
| 170 | def upload_async_create( self, trans, tool_id=None, **kwd ): | 
|---|
| 171 | """ | 
|---|
| 172 | Precreate datasets for asynchronous uploading. | 
|---|
| 173 | """ | 
|---|
| 174 | cntrller = kwd.get( 'cntrller', '' ) | 
|---|
| 175 | roles = kwd.get( 'roles', False ) | 
|---|
| 176 | if roles: | 
|---|
| 177 | # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles. | 
|---|
| 178 | # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS | 
|---|
| 179 | # permission if the library is not public ( this should always be the case since any ill-legitimate roles | 
|---|
| 180 | # were filtered out of the roles displayed on the upload form.  In addition, we need to ensure that the user | 
|---|
| 181 | # did not associated roles that would make the dataset in-accessible by everyone. | 
|---|
| 182 | library_id = trans.app.security.decode_id( kwd.get( 'library_id', '' ) ) | 
|---|
| 183 | vars = dict( DATASET_ACCESS_in=roles ) | 
|---|
| 184 | permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars ) | 
|---|
| 185 | if error: | 
|---|
| 186 | return [ 'error', msg ] | 
|---|
| 187 | def create_dataset( name ): | 
|---|
| 188 | ud = Bunch( name=name, file_type=None, dbkey=None ) | 
|---|
| 189 | if nonfile_params.get( 'folder_id', False ): | 
|---|
| 190 | replace_id = nonfile_params.get( 'replace_id', None ) | 
|---|
| 191 | if replace_id not in [ None, 'None' ]: | 
|---|
| 192 | replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) ) | 
|---|
| 193 | else: | 
|---|
| 194 | replace_dataset = None | 
|---|
| 195 | # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd | 
|---|
| 196 | # should be passed so that complex objects that may have been included in the initial request remain. | 
|---|
| 197 | library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) | 
|---|
| 198 | else: | 
|---|
| 199 | library_bunch = None | 
|---|
| 200 | return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD ) | 
|---|
| 201 | tool = self.get_toolbox().tools_by_id.get( tool_id, None ) | 
|---|
| 202 | if not tool: | 
|---|
| 203 | return False # bad tool_id | 
|---|
| 204 | nonfile_params = util.Params( kwd, sanitize=False ) | 
|---|
| 205 | if kwd.get( 'tool_state', None ) not in ( None, 'None' ): | 
|---|
| 206 | encoded_state = util.string_to_object( kwd["tool_state"] ) | 
|---|
| 207 | tool_state = DefaultToolState() | 
|---|
| 208 | tool_state.decode( encoded_state, tool, trans.app ) | 
|---|
| 209 | else: | 
|---|
| 210 | tool_state = tool.new_state( trans ) | 
|---|
| 211 | errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True ) | 
|---|
| 212 | datasets = [] | 
|---|
| 213 | dataset_upload_inputs = [] | 
|---|
| 214 | for input_name, input in tool.inputs.iteritems(): | 
|---|
| 215 | if input.type == "upload_dataset": | 
|---|
| 216 | dataset_upload_inputs.append( input ) | 
|---|
| 217 | assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) | 
|---|
| 218 | for dataset_upload_input in dataset_upload_inputs: | 
|---|
| 219 | d_type = dataset_upload_input.get_datatype( trans, kwd ) | 
|---|
| 220 |  | 
|---|
| 221 | if d_type.composite_type is not None: | 
|---|
| 222 | datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name( kwd ) ) ) | 
|---|
| 223 | else: | 
|---|
| 224 | params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) | 
|---|
| 225 | if params.file_data not in [ None, "" ]: | 
|---|
| 226 | name = params.file_data | 
|---|
| 227 | if name.count('/'): | 
|---|
| 228 | name = name.rsplit('/',1)[1] | 
|---|
| 229 | if name.count('\\'): | 
|---|
| 230 | name = name.rsplit('\\',1)[1] | 
|---|
| 231 | datasets.append( create_dataset( name ) ) | 
|---|
| 232 | if params.url_paste not in [ None, "" ]: | 
|---|
| 233 | url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) | 
|---|
| 234 | url = False | 
|---|
| 235 | for line in url_paste: | 
|---|
| 236 | line = line.rstrip( '\r\n' ).strip() | 
|---|
| 237 | if not line: | 
|---|
| 238 | continue | 
|---|
| 239 | elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): | 
|---|
| 240 | url = True | 
|---|
| 241 | datasets.append( create_dataset( line ) ) | 
|---|
| 242 | else: | 
|---|
| 243 | if url: | 
|---|
| 244 | continue # non-url when we've already processed some urls | 
|---|
| 245 | else: | 
|---|
| 246 | # pasted data | 
|---|
| 247 | datasets.append( create_dataset( 'Pasted Entry' ) ) | 
|---|
| 248 | break | 
|---|
| 249 | return [ d.id for d in datasets ] | 
|---|
| 250 |  | 
|---|
| 251 | @web.expose | 
|---|
| 252 | def upload_async_message( self, trans, **kwd ): | 
|---|
| 253 | # might be more appropriate in a different controller | 
|---|
| 254 | msg = """<p>Your upload has been queued.  History entries that are still uploading will be blue, and turn green upon completion.</p> | 
|---|
| 255 | <p><b>Please do not use your browser\'s "stop" or "reload" buttons until the upload is complete, or it may be interrupted.</b></p> | 
|---|
| 256 | <p>You may safely continue to use Galaxy while the upload is in progress.  Using "stop" and "reload" on pages other than Galaxy is also safe.</p> | 
|---|
| 257 | """ | 
|---|
| 258 | return trans.show_message( msg, refresh_frames='history' ) | 
|---|