1 | """ |
---|
2 | Constructs for grouping tool parameters |
---|
3 | """ |
---|
4 | |
---|
5 | from basic import ToolParameter |
---|
6 | from galaxy.util.expressions import ExpressionContext |
---|
7 | |
---|
8 | import logging |
---|
9 | log = logging.getLogger( __name__ ) |
---|
10 | |
---|
11 | import StringIO, os, urllib |
---|
12 | from galaxy.datatypes import sniff |
---|
13 | from galaxy.util.bunch import Bunch |
---|
14 | from galaxy.util.odict import odict |
---|
15 | from galaxy.util import json |
---|
16 | |
---|
17 | class Group( object ): |
---|
18 | def __init__( self ): |
---|
19 | self.name = None |
---|
20 | def value_to_basic( self, value, app ): |
---|
21 | """ |
---|
22 | Convert value to a (possibly nested) representation using only basic |
---|
23 | types (dict, list, tuple, str, unicode, int, long, float, bool, None) |
---|
24 | """ |
---|
25 | return value |
---|
26 | def value_from_basic( self, value, app, ignore_errors=False ): |
---|
27 | """ |
---|
28 | Convert a basic representation as produced by `value_to_basic` back |
---|
29 | into the preferred value form. |
---|
30 | """ |
---|
31 | return value |
---|
32 | def get_initial_value( self, trans, context ): |
---|
33 | """ |
---|
34 | Return the initial state/value for this group |
---|
35 | """ |
---|
36 | raise TypeError( "Not implemented" ) |
---|
37 | |
---|
38 | class Repeat( Group ): |
---|
39 | type = "repeat" |
---|
40 | def __init__( self ): |
---|
41 | Group.__init__( self ) |
---|
42 | self.title = None |
---|
43 | self.inputs = None |
---|
44 | self.default = 0 |
---|
45 | self.min = None |
---|
46 | self.max = None |
---|
47 | @property |
---|
48 | def title_plural( self ): |
---|
49 | if self.title.endswith( "s" ): |
---|
50 | return self.title |
---|
51 | else: |
---|
52 | return self.title + "s" |
---|
53 | def label( self ): |
---|
54 | return "Repeat (%s)" % self.title |
---|
55 | def value_to_basic( self, value, app ): |
---|
56 | rval = [] |
---|
57 | for d in value: |
---|
58 | rval_dict = {} |
---|
59 | # Propogate __index__ |
---|
60 | if '__index__' in d: |
---|
61 | rval_dict['__index__'] = d['__index__'] |
---|
62 | for input in self.inputs.itervalues(): |
---|
63 | rval_dict[ input.name ] = input.value_to_basic( d[input.name], app ) |
---|
64 | rval.append( rval_dict ) |
---|
65 | return rval |
---|
66 | def value_from_basic( self, value, app, ignore_errors=False ): |
---|
67 | rval = [] |
---|
68 | for i, d in enumerate( value ): |
---|
69 | rval_dict = {} |
---|
70 | # If the special __index__ key is not set, create it (for backward |
---|
71 | # compatibility) |
---|
72 | rval_dict['__index__'] = d.get( '__index__', i ) |
---|
73 | # Restore child inputs |
---|
74 | for input in self.inputs.itervalues(): |
---|
75 | if ignore_errors and input.name not in d: |
---|
76 | # If we do not have a value, and are ignoring errors, we simply |
---|
77 | # do nothing. There will be no value for the parameter in the |
---|
78 | # conditional's values dictionary. |
---|
79 | pass |
---|
80 | else: |
---|
81 | rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors ) |
---|
82 | rval.append( rval_dict ) |
---|
83 | return rval |
---|
84 | def visit_inputs( self, prefix, value, callback ): |
---|
85 | for i, d in enumerate( value ): |
---|
86 | for input in self.inputs.itervalues(): |
---|
87 | new_prefix = prefix + "%s_%d|" % ( self.name, i ) |
---|
88 | if isinstance( input, ToolParameter ): |
---|
89 | callback( new_prefix, input, d[input.name], parent = d ) |
---|
90 | else: |
---|
91 | input.visit_inputs( new_prefix, d[input.name], callback ) |
---|
92 | def get_initial_value( self, trans, context ): |
---|
93 | rval = [] |
---|
94 | for i in range( self.default ): |
---|
95 | rval_dict = { '__index__': i} |
---|
96 | for input in self.inputs.itervalues(): |
---|
97 | rval_dict[ input.name ] = input.get_initial_value( trans, context ) |
---|
98 | rval.append( rval_dict ) |
---|
99 | return rval |
---|
100 | |
---|
101 | class UploadDataset( Group ): |
---|
102 | type = "upload_dataset" |
---|
103 | def __init__( self ): |
---|
104 | Group.__init__( self ) |
---|
105 | self.title = None |
---|
106 | self.inputs = None |
---|
107 | self.file_type_name = 'file_type' |
---|
108 | self.default_file_type = 'txt' |
---|
109 | self.file_type_to_ext = { 'auto':self.default_file_type } |
---|
110 | self.metadata_ref = 'files_metadata' |
---|
111 | def get_composite_dataset_name( self, context ): |
---|
112 | #FIXME: HACK |
---|
113 | #Special case of using 'base_name' metadata for use as Dataset name needs to be done in a General Fashion, as defined within a particular Datatype. |
---|
114 | |
---|
115 | #We get two different types of contexts here, one straight from submitted parameters, the other after being parsed into tool inputs |
---|
116 | dataset_name = context.get('files_metadata|base_name', None ) |
---|
117 | if dataset_name is None: |
---|
118 | dataset_name = context.get('files_metadata', {} ).get( 'base_name', None ) |
---|
119 | if dataset_name is None: |
---|
120 | dataset_name = 'Uploaded Composite Dataset (%s)' % self.get_file_type( context ) |
---|
121 | return dataset_name |
---|
122 | def get_file_base_name( self, context ): |
---|
123 | fd = context.get('files_metadata|base_name','Galaxy_Composite_file') |
---|
124 | return fd |
---|
125 | def get_file_type( self, context ): |
---|
126 | return context.get( self.file_type_name, self.default_file_type ) |
---|
127 | def get_datatype_ext( self, trans, context ): |
---|
128 | ext = self.get_file_type( context ) |
---|
129 | if ext in self.file_type_to_ext: |
---|
130 | ext = self.file_type_to_ext[ext] #when using autodetect, we will use composite info from 'text', i.e. only the main file |
---|
131 | return ext |
---|
132 | def get_datatype( self, trans, context ): |
---|
133 | ext = self.get_datatype_ext( trans, context ) |
---|
134 | return trans.app.datatypes_registry.get_datatype_by_extension( ext ) |
---|
135 | @property |
---|
136 | def title_plural( self ): |
---|
137 | if self.title.endswith( "s" ): |
---|
138 | return self.title |
---|
139 | else: |
---|
140 | return self.title + "s" |
---|
141 | def group_title( self, context ): |
---|
142 | return "%s (%s)" % ( self.title, context.get( self.file_type_name, self.default_file_type ) ) |
---|
143 | def title_by_index( self, trans, index, context ): |
---|
144 | d_type = self.get_datatype( trans, context ) |
---|
145 | for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ): |
---|
146 | if i == index: |
---|
147 | rval = composite_name |
---|
148 | if composite_file.description: |
---|
149 | rval = "%s (%s)" % ( rval, composite_file.description ) |
---|
150 | if composite_file.optional: |
---|
151 | rval = "%s [optional]" % rval |
---|
152 | return rval |
---|
153 | return None |
---|
154 | def value_to_basic( self, value, app ): |
---|
155 | rval = [] |
---|
156 | for d in value: |
---|
157 | rval_dict = {} |
---|
158 | # Propogate __index__ |
---|
159 | if '__index__' in d: |
---|
160 | rval_dict['__index__'] = d['__index__'] |
---|
161 | for input in self.inputs.itervalues(): |
---|
162 | rval_dict[ input.name ] = input.value_to_basic( d[input.name], app ) |
---|
163 | rval.append( rval_dict ) |
---|
164 | return rval |
---|
165 | def value_from_basic( self, value, app, ignore_errors=False ): |
---|
166 | rval = [] |
---|
167 | for i, d in enumerate( value ): |
---|
168 | rval_dict = {} |
---|
169 | # If the special __index__ key is not set, create it (for backward |
---|
170 | # compatibility) |
---|
171 | rval_dict['__index__'] = d.get( '__index__', i ) |
---|
172 | # Restore child inputs |
---|
173 | for input in self.inputs.itervalues(): |
---|
174 | if ignore_errors and input.name not in d: #this wasn't tested |
---|
175 | rval_dict[ input.name ] = input.get_initial_value( None, d ) |
---|
176 | else: |
---|
177 | rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors ) |
---|
178 | rval.append( rval_dict ) |
---|
179 | return rval |
---|
180 | def visit_inputs( self, prefix, value, callback ): |
---|
181 | for i, d in enumerate( value ): |
---|
182 | for input in self.inputs.itervalues(): |
---|
183 | new_prefix = prefix + "%s_%d|" % ( self.name, i ) |
---|
184 | if isinstance( input, ToolParameter ): |
---|
185 | callback( new_prefix, input, d[input.name], parent = d ) |
---|
186 | else: |
---|
187 | input.visit_inputs( new_prefix, d[input.name], callback ) |
---|
188 | def get_initial_value( self, trans, context ): |
---|
189 | d_type = self.get_datatype( trans, context ) |
---|
190 | rval = [] |
---|
191 | for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ): |
---|
192 | rval_dict = {} |
---|
193 | rval_dict['__index__'] = i # create __index__ |
---|
194 | for input in self.inputs.itervalues(): |
---|
195 | rval_dict[ input.name ] = input.get_initial_value( trans, context ) #input.value_to_basic( d[input.name], app ) |
---|
196 | rval.append( rval_dict ) |
---|
197 | return rval |
---|
198 | def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ): |
---|
199 | def get_data_file_filename( data_file, override_name = None, override_info = None ): |
---|
200 | dataset_name = override_name |
---|
201 | dataset_info = override_info |
---|
202 | def get_file_name( file_name ): |
---|
203 | file_name = file_name.split( '\\' )[-1] |
---|
204 | file_name = file_name.split( '/' )[-1] |
---|
205 | return file_name |
---|
206 | try: |
---|
207 | # Use the existing file |
---|
208 | if not dataset_name and 'filename' in data_file: |
---|
209 | dataset_name = get_file_name( data_file['filename'] ) |
---|
210 | if not dataset_info: |
---|
211 | dataset_info = 'uploaded file' |
---|
212 | return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) ) |
---|
213 | #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info |
---|
214 | except: |
---|
215 | # The uploaded file should've been persisted by the upload tool action |
---|
216 | return Bunch( type=None, path=None, name=None ) |
---|
217 | #return None, None, None, None, None |
---|
218 | def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ): |
---|
219 | filenames = [] |
---|
220 | url_paste_file = group_incoming.get( 'url_paste', None ) |
---|
221 | if url_paste_file is not None: |
---|
222 | url_paste = open( url_paste_file, 'r' ).read( 1024 ) |
---|
223 | if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ): |
---|
224 | url_paste = url_paste.replace( '\r', '' ).split( '\n' ) |
---|
225 | for line in url_paste: |
---|
226 | line = line.strip() |
---|
227 | if line: |
---|
228 | if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ): |
---|
229 | continue # non-url line, ignore |
---|
230 | precreated_name = line |
---|
231 | dataset_name = override_name |
---|
232 | if not dataset_name: |
---|
233 | dataset_name = line |
---|
234 | dataset_info = override_info |
---|
235 | if not dataset_info: |
---|
236 | dataset_info = 'uploaded url' |
---|
237 | yield Bunch( type='url', path=line, name=precreated_name ) |
---|
238 | #yield ( 'url', line, precreated_name, dataset_name, dataset_info ) |
---|
239 | else: |
---|
240 | dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here |
---|
241 | if override_name: |
---|
242 | dataset_name = override_name |
---|
243 | if override_info: |
---|
244 | dataset_info = override_info |
---|
245 | yield Bunch( type='file', path=url_paste_file, name=precreated_name ) |
---|
246 | #yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info ) |
---|
247 | def get_one_filename( context ): |
---|
248 | data_file = context['file_data'] |
---|
249 | url_paste = context['url_paste'] |
---|
250 | name = context.get( 'NAME', None ) |
---|
251 | info = context.get( 'INFO', None ) |
---|
252 | warnings = [] |
---|
253 | space_to_tab = False |
---|
254 | if context.get( 'space_to_tab', None ) not in [ "None", None, False ]: |
---|
255 | space_to_tab = True |
---|
256 | file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info ) |
---|
257 | if file_bunch.path and url_paste: |
---|
258 | if url_paste.strip(): |
---|
259 | warnings.append( "All file contents specified in the paste box were ignored." ) |
---|
260 | else: #we need to use url_paste |
---|
261 | for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ): |
---|
262 | if file_bunch.path: |
---|
263 | break |
---|
264 | file_bunch.space_to_tab = space_to_tab |
---|
265 | return file_bunch, warnings |
---|
266 | def get_filenames( context ): |
---|
267 | rval = [] |
---|
268 | data_file = context['file_data'] |
---|
269 | url_paste = context['url_paste'] |
---|
270 | name = context.get( 'NAME', None ) |
---|
271 | info = context.get( 'INFO', None ) |
---|
272 | space_to_tab = False |
---|
273 | if context.get( 'space_to_tab', None ) not in [ "None", None, False ]: |
---|
274 | space_to_tab = True |
---|
275 | warnings = [] |
---|
276 | file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info ) |
---|
277 | if file_bunch.path: |
---|
278 | file_bunch.space_to_tab = space_to_tab |
---|
279 | rval.append( file_bunch ) |
---|
280 | for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ): |
---|
281 | if file_bunch.path: |
---|
282 | file_bunch.space_to_tab = space_to_tab |
---|
283 | rval.append( file_bunch ) |
---|
284 | return rval |
---|
285 | file_type = self.get_file_type( context ) |
---|
286 | d_type = self.get_datatype( trans, context ) |
---|
287 | dbkey = context.get( 'dbkey', None ) |
---|
288 | writable_files = d_type.writable_files |
---|
289 | writable_files_offset = 0 |
---|
290 | groups_incoming = [ None for filename in writable_files ] |
---|
291 | for group_incoming in context.get( self.name, [] ): |
---|
292 | i = int( group_incoming['__index__'] ) |
---|
293 | groups_incoming[ i ] = group_incoming |
---|
294 | if d_type.composite_type is not None: |
---|
295 | #handle uploading of composite datatypes |
---|
296 | #Only one Dataset can be created |
---|
297 | dataset = Bunch() |
---|
298 | dataset.type = 'composite' |
---|
299 | dataset.file_type = file_type |
---|
300 | dataset.dbkey = dbkey |
---|
301 | dataset.datatype = d_type |
---|
302 | dataset.warnings = [] |
---|
303 | dataset.metadata = {} |
---|
304 | dataset.composite_files = {} |
---|
305 | #load metadata |
---|
306 | files_metadata = context.get( self.metadata_ref, {} ) |
---|
307 | for meta_name, meta_spec in d_type.metadata_spec.iteritems(): |
---|
308 | if meta_spec.set_in_upload: |
---|
309 | if meta_name in files_metadata: |
---|
310 | dataset.metadata[ meta_name ] = files_metadata[ meta_name ] |
---|
311 | dataset.precreated_name = dataset.name = self.get_composite_dataset_name( context ) |
---|
312 | if dataset.datatype.composite_type == 'auto_primary_file': |
---|
313 | #replace sniff here with just creating an empty file |
---|
314 | temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' ) |
---|
315 | dataset.primary_file = temp_name |
---|
316 | dataset.space_to_tab = False |
---|
317 | else: |
---|
318 | file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] ) |
---|
319 | writable_files_offset = 1 |
---|
320 | dataset.primary_file = file_bunch.path |
---|
321 | dataset.space_to_tab = file_bunch.space_to_tab |
---|
322 | dataset.warnings.extend( warnings ) |
---|
323 | if dataset.primary_file is None:#remove this before finish, this should create an empty dataset |
---|
324 | raise Exception( 'No primary dataset file was available for composite upload' ) |
---|
325 | keys = [ value.name for value in writable_files.values() ] |
---|
326 | for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ): |
---|
327 | key = keys[ i + writable_files_offset ] |
---|
328 | if group_incoming is None and not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional: |
---|
329 | dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) ) |
---|
330 | dataset.composite_files[ key ] = None |
---|
331 | else: |
---|
332 | file_bunch, warnings = get_one_filename( group_incoming ) |
---|
333 | dataset.warnings.extend( warnings ) |
---|
334 | if file_bunch.path: |
---|
335 | dataset.composite_files[ key ] = file_bunch.__dict__ |
---|
336 | else: |
---|
337 | dataset.composite_files[ key ] = None |
---|
338 | if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional: |
---|
339 | dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) ) |
---|
340 | return [ dataset ] |
---|
341 | else: |
---|
342 | datasets = get_filenames( context[ self.name ][0] ) |
---|
343 | rval = [] |
---|
344 | for dataset in datasets: |
---|
345 | dataset.file_type = file_type |
---|
346 | dataset.datatype = d_type |
---|
347 | dataset.ext = self.get_datatype_ext( trans, context ) |
---|
348 | dataset.dbkey = dbkey |
---|
349 | rval.append( dataset ) |
---|
350 | return rval |
---|
351 | |
---|
352 | class Conditional( Group ): |
---|
353 | type = "conditional" |
---|
354 | def __init__( self ): |
---|
355 | Group.__init__( self ) |
---|
356 | self.test_param = None |
---|
357 | self.cases = [] |
---|
358 | self.value_ref = None |
---|
359 | self.value_ref_in_group = True #When our test_param is not part of the conditional Group, this is False |
---|
360 | @property |
---|
361 | def label( self ): |
---|
362 | return "Conditional (%s)" % self.name |
---|
363 | def get_current_case( self, value, trans ): |
---|
364 | # Convert value to user representation |
---|
365 | str_value = self.test_param.filter_value( value, trans ) |
---|
366 | # Find the matching case |
---|
367 | for index, case in enumerate( self.cases ): |
---|
368 | if str_value == case.value: |
---|
369 | return index |
---|
370 | raise Exception( "No case matched value:", self.name, str_value ) |
---|
371 | def value_to_basic( self, value, app ): |
---|
372 | rval = dict() |
---|
373 | current_case = rval['__current_case__'] = value['__current_case__'] |
---|
374 | rval[ self.test_param.name ] = self.test_param.value_to_basic( value[ self.test_param.name ], app ) |
---|
375 | for input in self.cases[current_case].inputs.itervalues(): |
---|
376 | rval[ input.name ] = input.value_to_basic( value[ input.name ], app ) |
---|
377 | return rval |
---|
378 | def value_from_basic( self, value, app, ignore_errors=False ): |
---|
379 | rval = dict() |
---|
380 | current_case = rval['__current_case__'] = value['__current_case__'] |
---|
381 | # Test param |
---|
382 | if ignore_errors and self.test_param.name not in value: |
---|
383 | # If ignoring errors, do nothing. However this is potentially very |
---|
384 | # problematic since if we are missing the value of test param, |
---|
385 | # the entire conditional is wrong. |
---|
386 | pass |
---|
387 | else: |
---|
388 | rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors ) |
---|
389 | # Inputs associated with current case |
---|
390 | for input in self.cases[current_case].inputs.itervalues(): |
---|
391 | if ignore_errors and input.name not in value: |
---|
392 | # If we do not have a value, and are ignoring errors, we simply |
---|
393 | # do nothing. There will be no value for the parameter in the |
---|
394 | # conditional's values dictionary. |
---|
395 | pass |
---|
396 | else: |
---|
397 | rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors ) |
---|
398 | return rval |
---|
399 | def visit_inputs( self, prefix, value, callback ): |
---|
400 | current_case = value['__current_case__'] |
---|
401 | new_prefix = prefix + "%s|" % ( self.name ) |
---|
402 | for input in self.cases[current_case].inputs.itervalues(): |
---|
403 | if isinstance( input, ToolParameter ): |
---|
404 | callback( prefix, input, value[input.name], parent = value ) |
---|
405 | else: |
---|
406 | input.visit_inputs( prefix, value[input.name], callback ) |
---|
407 | def get_initial_value( self, trans, context ): |
---|
408 | # State for a conditional is a plain dictionary. |
---|
409 | rval = {} |
---|
410 | # Get the default value for the 'test element' and use it |
---|
411 | # to determine the current case |
---|
412 | test_value = self.test_param.get_initial_value( trans, context ) |
---|
413 | current_case = self.get_current_case( test_value, trans ) |
---|
414 | # Store the current case in a special value |
---|
415 | rval['__current_case__'] = current_case |
---|
416 | # Store the value of the test element |
---|
417 | rval[ self.test_param.name ] = test_value |
---|
418 | # Fill in state for selected case |
---|
419 | child_context = ExpressionContext( rval, context ) |
---|
420 | for child_input in self.cases[current_case].inputs.itervalues(): |
---|
421 | rval[ child_input.name ] = child_input.get_initial_value( trans, child_context ) |
---|
422 | return rval |
---|
423 | |
---|
424 | class ConditionalWhen( object ): |
---|
425 | def __init__( self ): |
---|
426 | self.value = None |
---|
427 | self.inputs = None |
---|