1 | from galaxy.web.base.controller import * |
---|
2 | |
---|
3 | import pkg_resources |
---|
4 | pkg_resources.require( "simplejson" ) |
---|
5 | pkg_resources.require( "SVGFig" ) |
---|
6 | import simplejson |
---|
7 | import base64, httplib, urllib2, sgmllib, svgfig |
---|
8 | |
---|
9 | from galaxy.web.framework.helpers import time_ago, grids |
---|
10 | from galaxy.tools.parameters import * |
---|
11 | from galaxy.tools import DefaultToolState |
---|
12 | from galaxy.tools.parameters.grouping import Repeat, Conditional |
---|
13 | from galaxy.datatypes.data import Data |
---|
14 | from galaxy.util.odict import odict |
---|
15 | from galaxy.util.sanitize_html import sanitize_html |
---|
16 | from galaxy.util.topsort import topsort, topsort_levels, CycleError |
---|
17 | from galaxy.workflow.modules import * |
---|
18 | from galaxy import model |
---|
19 | from galaxy.model.mapping import desc |
---|
20 | from galaxy.model.orm import * |
---|
21 | from galaxy.model.item_attrs import * |
---|
22 | from galaxy.web.framework.helpers import to_unicode |
---|
23 | from galaxy.jobs.actions.post import ActionBox |
---|
24 | |
---|
25 | class StoredWorkflowListGrid( grids.Grid ): |
---|
26 | class StepsColumn( grids.GridColumn ): |
---|
27 | def get_value(self, trans, grid, workflow): |
---|
28 | return len( workflow.latest_workflow.steps ) |
---|
29 | |
---|
30 | # Grid definition |
---|
31 | use_panels = True |
---|
32 | title = "Saved Workflows" |
---|
33 | model_class = model.StoredWorkflow |
---|
34 | default_filter = { "name" : "All", "tags": "All" } |
---|
35 | default_sort_key = "-update_time" |
---|
36 | columns = [ |
---|
37 | grids.TextColumn( "Name", key="name", attach_popup=True, filterable="advanced" ), |
---|
38 | grids.IndividualTagsColumn( "Tags", "tags", model_tag_association_class=model.StoredWorkflowTagAssociation, filterable="advanced", grid_name="StoredWorkflowListGrid" ), |
---|
39 | StepsColumn( "Steps" ), |
---|
40 | grids.GridColumn( "Created", key="create_time", format=time_ago ), |
---|
41 | grids.GridColumn( "Last Updated", key="update_time", format=time_ago ), |
---|
42 | ] |
---|
43 | columns.append( |
---|
44 | grids.MulticolFilterColumn( |
---|
45 | "Search", |
---|
46 | cols_to_filter=[ columns[0], columns[1] ], |
---|
47 | key="free-text-search", visible=False, filterable="standard" ) |
---|
48 | ) |
---|
49 | operations = [ |
---|
50 | grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
51 | grids.GridOperation( "Run", condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
52 | grids.GridOperation( "Clone", condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
53 | grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
54 | grids.GridOperation( "Sharing", condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
55 | grids.GridOperation( "Delete", condition=( lambda item: item.deleted ), async_compatible=True ), |
---|
56 | ] |
---|
57 | def apply_query_filter( self, trans, query, **kwargs ): |
---|
58 | return query.filter_by( user=trans.user, deleted=False ) |
---|
59 | |
---|
60 | class StoredWorkflowAllPublishedGrid( grids.Grid ): |
---|
61 | title = "Published Workflows" |
---|
62 | model_class = model.StoredWorkflow |
---|
63 | default_sort_key = "update_time" |
---|
64 | default_filter = dict( public_url="All", username="All", tags="All" ) |
---|
65 | use_async = True |
---|
66 | columns = [ |
---|
67 | grids.PublicURLColumn( "Name", key="name", filterable="advanced" ), |
---|
68 | grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.StoredWorkflowAnnotationAssociation, filterable="advanced" ), |
---|
69 | grids.OwnerColumn( "Owner", key="owner", model_class=model.User, filterable="advanced" ), |
---|
70 | grids.CommunityRatingColumn( "Community Rating", key="rating" ), |
---|
71 | grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.StoredWorkflowTagAssociation, filterable="advanced", grid_name="PublicWorkflowListGrid" ), |
---|
72 | grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago ) |
---|
73 | ] |
---|
74 | columns.append( |
---|
75 | grids.MulticolFilterColumn( |
---|
76 | "Search", |
---|
77 | cols_to_filter=[ columns[0], columns[1], columns[2] ], |
---|
78 | key="free-text-search", visible=False, filterable="standard" ) |
---|
79 | ) |
---|
80 | operations = [] |
---|
81 | def build_initial_query( self, trans, **kwargs ): |
---|
82 | # Join so that searching stored_workflow.user makes sense. |
---|
83 | return trans.sa_session.query( self.model_class ).join( model.User.table ) |
---|
84 | def apply_query_filter( self, trans, query, **kwargs ): |
---|
85 | # A public workflow is published, has a slug, and is not deleted. |
---|
86 | return query.filter( self.model_class.published==True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False ) |
---|
87 | |
---|
88 | # Simple SGML parser to get all content in a single tag. |
---|
89 | class SingleTagContentsParser( sgmllib.SGMLParser ): |
---|
90 | |
---|
91 | def __init__( self, target_tag ): |
---|
92 | sgmllib.SGMLParser.__init__( self ) |
---|
93 | self.target_tag = target_tag |
---|
94 | self.cur_tag = None |
---|
95 | self.tag_content = "" |
---|
96 | |
---|
97 | def unknown_starttag( self, tag, attrs ): |
---|
98 | """ Called for each start tag. """ |
---|
99 | self.cur_tag = tag |
---|
100 | |
---|
101 | def handle_data( self, text ): |
---|
102 | """ Called for each block of plain text. """ |
---|
103 | if self.cur_tag == self.target_tag: |
---|
104 | self.tag_content += text |
---|
105 | |
---|
106 | class WorkflowController( BaseController, Sharable, UsesStoredWorkflow, UsesAnnotations, UsesItemRatings ): |
---|
107 | stored_list_grid = StoredWorkflowListGrid() |
---|
108 | published_list_grid = StoredWorkflowAllPublishedGrid() |
---|
109 | |
---|
110 | __myexp_url = "sandbox.myexperiment.org:80" |
---|
111 | |
---|
112 | @web.expose |
---|
113 | def index( self, trans ): |
---|
114 | return self.list( trans ) |
---|
115 | |
---|
116 | @web.expose |
---|
117 | @web.require_login( "use Galaxy workflows" ) |
---|
118 | def list_grid( self, trans, **kwargs ): |
---|
119 | """ List user's stored workflows. """ |
---|
120 | status = message = None |
---|
121 | if 'operation' in kwargs: |
---|
122 | operation = kwargs['operation'].lower() |
---|
123 | if operation == "rename": |
---|
124 | return self.rename( trans, **kwargs ) |
---|
125 | history_ids = util.listify( kwargs.get( 'id', [] ) ) |
---|
126 | if operation == "sharing": |
---|
127 | return self.sharing( trans, id=history_ids ) |
---|
128 | return self.stored_list_grid( trans, **kwargs ) |
---|
129 | |
---|
130 | @web.expose |
---|
131 | @web.require_login( "use Galaxy workflows", use_panels=True ) |
---|
132 | def list( self, trans ): |
---|
133 | """ |
---|
134 | Render workflow main page (management of existing workflows) |
---|
135 | """ |
---|
136 | user = trans.get_user() |
---|
137 | workflows = trans.sa_session.query( model.StoredWorkflow ) \ |
---|
138 | .filter_by( user=user, deleted=False ) \ |
---|
139 | .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ |
---|
140 | .all() |
---|
141 | shared_by_others = trans.sa_session \ |
---|
142 | .query( model.StoredWorkflowUserShareAssociation ) \ |
---|
143 | .filter_by( user=user ) \ |
---|
144 | .join( 'stored_workflow' ) \ |
---|
145 | .filter( model.StoredWorkflow.deleted == False ) \ |
---|
146 | .order_by( desc( model.StoredWorkflow.update_time ) ) \ |
---|
147 | .all() |
---|
148 | |
---|
149 | # Legacy issue: all shared workflows must have slugs. |
---|
150 | slug_set = False |
---|
151 | for workflow_assoc in shared_by_others: |
---|
152 | slug_set = self.create_item_slug( trans.sa_session, workflow_assoc.stored_workflow ) |
---|
153 | if slug_set: |
---|
154 | trans.sa_session.flush() |
---|
155 | |
---|
156 | return trans.fill_template( "workflow/list.mako", |
---|
157 | workflows = workflows, |
---|
158 | shared_by_others = shared_by_others ) |
---|
159 | |
---|
160 | @web.expose |
---|
161 | @web.require_login( "use Galaxy workflows" ) |
---|
162 | def list_for_run( self, trans ): |
---|
163 | """ |
---|
164 | Render workflow list for analysis view (just allows running workflow |
---|
165 | or switching to management view) |
---|
166 | """ |
---|
167 | user = trans.get_user() |
---|
168 | workflows = trans.sa_session.query( model.StoredWorkflow ) \ |
---|
169 | .filter_by( user=user, deleted=False ) \ |
---|
170 | .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ |
---|
171 | .all() |
---|
172 | shared_by_others = trans.sa_session \ |
---|
173 | .query( model.StoredWorkflowUserShareAssociation ) \ |
---|
174 | .filter_by( user=user ) \ |
---|
175 | .filter( model.StoredWorkflow.deleted == False ) \ |
---|
176 | .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ |
---|
177 | .all() |
---|
178 | return trans.fill_template( "workflow/list_for_run.mako", |
---|
179 | workflows = workflows, |
---|
180 | shared_by_others = shared_by_others ) |
---|
181 | |
---|
182 | @web.expose |
---|
183 | def list_published( self, trans, **kwargs ): |
---|
184 | grid = self.published_list_grid( trans, **kwargs ) |
---|
185 | if 'async' in kwargs: |
---|
186 | return grid |
---|
187 | else: |
---|
188 | # Render grid wrapped in panels |
---|
189 | return trans.fill_template( "workflow/list_published.mako", grid=grid ) |
---|
190 | |
---|
191 | @web.expose |
---|
192 | def display_by_username_and_slug( self, trans, username, slug ): |
---|
193 | """ Display workflow based on a username and slug. """ |
---|
194 | |
---|
195 | # Get workflow. |
---|
196 | session = trans.sa_session |
---|
197 | user = session.query( model.User ).filter_by( username=username ).first() |
---|
198 | stored_workflow = trans.sa_session.query( model.StoredWorkflow ).filter_by( user=user, slug=slug, deleted=False ).first() |
---|
199 | if stored_workflow is None: |
---|
200 | raise web.httpexceptions.HTTPNotFound() |
---|
201 | # Security check raises error if user cannot access workflow. |
---|
202 | self.security_check( trans.get_user(), stored_workflow, False, True) |
---|
203 | |
---|
204 | # Get data for workflow's steps. |
---|
205 | self.get_stored_workflow_steps( trans, stored_workflow ) |
---|
206 | # Get annotations. |
---|
207 | stored_workflow.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, stored_workflow ) |
---|
208 | for step in stored_workflow.latest_workflow.steps: |
---|
209 | step.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, step ) |
---|
210 | |
---|
211 | # Get rating data. |
---|
212 | user_item_rating = 0 |
---|
213 | if trans.get_user(): |
---|
214 | user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), stored_workflow ) |
---|
215 | if user_item_rating: |
---|
216 | user_item_rating = user_item_rating.rating |
---|
217 | else: |
---|
218 | user_item_rating = 0 |
---|
219 | ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, stored_workflow ) |
---|
220 | return trans.fill_template_mako( "workflow/display.mako", item=stored_workflow, item_data=stored_workflow.latest_workflow.steps, |
---|
221 | user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings ) |
---|
222 | |
---|
223 | @web.expose |
---|
224 | def get_item_content_async( self, trans, id ): |
---|
225 | """ Returns item content in HTML format. """ |
---|
226 | |
---|
227 | stored = self.get_stored_workflow( trans, id, False, True ) |
---|
228 | if stored is None: |
---|
229 | raise web.httpexceptions.HTTPNotFound() |
---|
230 | |
---|
231 | # Get data for workflow's steps. |
---|
232 | self.get_stored_workflow_steps( trans, stored ) |
---|
233 | # Get annotations. |
---|
234 | stored.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, stored ) |
---|
235 | for step in stored.latest_workflow.steps: |
---|
236 | step.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, step ) |
---|
237 | return trans.stream_template_mako( "/workflow/item_content.mako", item = stored, item_data = stored.latest_workflow.steps ) |
---|
238 | |
---|
239 | @web.expose |
---|
240 | @web.require_login( "use Galaxy workflows" ) |
---|
241 | def share( self, trans, id, email="", use_panels=False ): |
---|
242 | msg = mtype = None |
---|
243 | # Load workflow from database |
---|
244 | stored = self.get_stored_workflow( trans, id ) |
---|
245 | if email: |
---|
246 | other = trans.sa_session.query( model.User ) \ |
---|
247 | .filter( and_( model.User.table.c.email==email, |
---|
248 | model.User.table.c.deleted==False ) ) \ |
---|
249 | .first() |
---|
250 | if not other: |
---|
251 | mtype = "error" |
---|
252 | msg = ( "User '%s' does not exist" % email ) |
---|
253 | elif other == trans.get_user(): |
---|
254 | mtype = "error" |
---|
255 | msg = ( "You cannot share a workflow with yourself" ) |
---|
256 | elif trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \ |
---|
257 | .filter_by( user=other, stored_workflow=stored ).count() > 0: |
---|
258 | mtype = "error" |
---|
259 | msg = ( "Workflow already shared with '%s'" % email ) |
---|
260 | else: |
---|
261 | share = model.StoredWorkflowUserShareAssociation() |
---|
262 | share.stored_workflow = stored |
---|
263 | share.user = other |
---|
264 | session = trans.sa_session |
---|
265 | session.add( share ) |
---|
266 | self.create_item_slug( session, stored ) |
---|
267 | session.flush() |
---|
268 | trans.set_message( "Workflow '%s' shared with user '%s'" % ( stored.name, other.email ) ) |
---|
269 | return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) ) |
---|
270 | return trans.fill_template( "/ind_share_base.mako", |
---|
271 | message = msg, |
---|
272 | messagetype = mtype, |
---|
273 | item=stored, |
---|
274 | email=email, |
---|
275 | use_panels=use_panels ) |
---|
276 | |
---|
277 | @web.expose |
---|
278 | @web.require_login( "use Galaxy workflows" ) |
---|
279 | def sharing( self, trans, id, **kwargs ): |
---|
280 | """ Handle workflow sharing. """ |
---|
281 | |
---|
282 | # Get session and workflow. |
---|
283 | session = trans.sa_session |
---|
284 | stored = self.get_stored_workflow( trans, id ) |
---|
285 | session.add( stored ) |
---|
286 | |
---|
287 | # Do operation on workflow. |
---|
288 | if 'make_accessible_via_link' in kwargs: |
---|
289 | self._make_item_accessible( trans.sa_session, stored ) |
---|
290 | elif 'make_accessible_and_publish' in kwargs: |
---|
291 | self._make_item_accessible( trans.sa_session, stored ) |
---|
292 | stored.published = True |
---|
293 | elif 'publish' in kwargs: |
---|
294 | stored.published = True |
---|
295 | elif 'disable_link_access' in kwargs: |
---|
296 | stored.importable = False |
---|
297 | elif 'unpublish' in kwargs: |
---|
298 | stored.published = False |
---|
299 | elif 'disable_link_access_and_unpublish' in kwargs: |
---|
300 | stored.importable = stored.published = False |
---|
301 | elif 'unshare_user' in kwargs: |
---|
302 | user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) ) |
---|
303 | if not user: |
---|
304 | error( "User not found for provided id" ) |
---|
305 | association = session.query( model.StoredWorkflowUserShareAssociation ) \ |
---|
306 | .filter_by( user=user, stored_workflow=stored ).one() |
---|
307 | session.delete( association ) |
---|
308 | |
---|
309 | # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them. |
---|
310 | if stored.importable and not stored.slug: |
---|
311 | self._make_item_accessible( trans.sa_session, stored ) |
---|
312 | |
---|
313 | session.flush() |
---|
314 | |
---|
315 | return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored ) |
---|
316 | |
---|
317 | @web.expose |
---|
318 | @web.require_login( "to import a workflow", use_panels=True ) |
---|
319 | def imp( self, trans, id, **kwargs ): |
---|
320 | # Set referer message. |
---|
321 | referer = trans.request.referer |
---|
322 | if referer is not "": |
---|
323 | referer_message = "<a href='%s'>return to the previous page</a>" % referer |
---|
324 | else: |
---|
325 | referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' ) |
---|
326 | |
---|
327 | # Do import. |
---|
328 | session = trans.sa_session |
---|
329 | stored = self.get_stored_workflow( trans, id, check_ownership=False ) |
---|
330 | if stored.importable == False: |
---|
331 | return trans.show_error_message( "The owner of this workflow has disabled imports via this link.<br>You can %s" % referer_message, use_panels=True ) |
---|
332 | elif stored.user == trans.user: |
---|
333 | return trans.show_error_message( "You can't import this workflow because you own it.<br>You can %s" % referer_message, use_panels=True ) |
---|
334 | elif stored.deleted: |
---|
335 | return trans.show_error_message( "You can't import this workflow because it has been deleted.<br>You can %s" % referer_message, use_panels=True ) |
---|
336 | else: |
---|
337 | # Copy workflow. |
---|
338 | imported_stored = model.StoredWorkflow() |
---|
339 | imported_stored.name = "imported: " + stored.name |
---|
340 | imported_stored.latest_workflow = stored.latest_workflow |
---|
341 | imported_stored.user = trans.user |
---|
342 | # Save new workflow. |
---|
343 | session = trans.sa_session |
---|
344 | session.add( imported_stored ) |
---|
345 | session.flush() |
---|
346 | |
---|
347 | # Copy annotations. |
---|
348 | self.copy_item_annotation( session, stored.user, stored, imported_stored.user, imported_stored ) |
---|
349 | for order_index, step in enumerate( stored.latest_workflow.steps ): |
---|
350 | self.copy_item_annotation( session, stored.user, step, \ |
---|
351 | imported_stored.user, imported_stored.latest_workflow.steps[order_index] ) |
---|
352 | session.flush() |
---|
353 | |
---|
354 | # Redirect to load galaxy frames. |
---|
355 | return trans.show_ok_message( |
---|
356 | message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s.""" |
---|
357 | % ( stored.name, web.url_for( controller='workflow' ), referer_message ), use_panels=True ) |
---|
358 | |
---|
359 | @web.expose |
---|
360 | @web.require_login( "use Galaxy workflows" ) |
---|
361 | def edit_attributes( self, trans, id, **kwargs ): |
---|
362 | # Get workflow and do error checking. |
---|
363 | stored = self.get_stored_workflow( trans, id ) |
---|
364 | if not stored: |
---|
365 | error( "You do not own this workflow or workflow ID is invalid." ) |
---|
366 | # Update workflow attributes if new values submitted. |
---|
367 | if 'name' in kwargs: |
---|
368 | # Rename workflow. |
---|
369 | stored.name = kwargs[ 'name' ] |
---|
370 | if 'annotation' in kwargs: |
---|
371 | # Set workflow annotation; sanitize annotation before adding it. |
---|
372 | annotation = sanitize_html( kwargs[ 'annotation' ], 'utf-8', 'text/html' ) |
---|
373 | self.add_item_annotation( trans.sa_session, trans.get_user(), stored, annotation ) |
---|
374 | trans.sa_session.flush() |
---|
375 | return trans.fill_template( 'workflow/edit_attributes.mako', |
---|
376 | stored=stored, |
---|
377 | annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored ) |
---|
378 | ) |
---|
379 | |
---|
380 | @web.expose |
---|
381 | @web.require_login( "use Galaxy workflows" ) |
---|
382 | def rename( self, trans, id, new_name=None, **kwargs ): |
---|
383 | stored = self.get_stored_workflow( trans, id ) |
---|
384 | if new_name is not None: |
---|
385 | stored.name = new_name |
---|
386 | trans.sa_session.flush() |
---|
387 | # For current workflows grid: |
---|
388 | trans.set_message ( "Workflow renamed to '%s'." % new_name ) |
---|
389 | return self.list( trans ) |
---|
390 | # For new workflows grid: |
---|
391 | #message = "Workflow renamed to '%s'." % new_name |
---|
392 | #return self.list_grid( trans, message=message, status='done' ) |
---|
393 | else: |
---|
394 | return form( url_for( action='rename', id=trans.security.encode_id(stored.id) ), "Rename workflow", submit_text="Rename" ) \ |
---|
395 | .add_text( "new_name", "Workflow Name", value=to_unicode( stored.name ) ) |
---|
396 | |
---|
397 | @web.expose |
---|
398 | @web.require_login( "use Galaxy workflows" ) |
---|
399 | def rename_async( self, trans, id, new_name=None, **kwargs ): |
---|
400 | stored = self.get_stored_workflow( trans, id ) |
---|
401 | if new_name: |
---|
402 | stored.name = new_name |
---|
403 | trans.sa_session.flush() |
---|
404 | return stored.name |
---|
405 | |
---|
406 | @web.expose |
---|
407 | @web.require_login( "use Galaxy workflows" ) |
---|
408 | def annotate_async( self, trans, id, new_annotation=None, **kwargs ): |
---|
409 | stored = self.get_stored_workflow( trans, id ) |
---|
410 | if new_annotation: |
---|
411 | # Sanitize annotation before adding it. |
---|
412 | new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' ) |
---|
413 | self.add_item_annotation( trans.sa_session, trans.get_user(), stored, new_annotation ) |
---|
414 | trans.sa_session.flush() |
---|
415 | return new_annotation |
---|
416 | |
---|
417 | @web.expose |
---|
418 | @web.require_login( "rate items" ) |
---|
419 | @web.json |
---|
420 | def rate_async( self, trans, id, rating ): |
---|
421 | """ Rate a workflow asynchronously and return updated community data. """ |
---|
422 | |
---|
423 | stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True ) |
---|
424 | if not stored: |
---|
425 | return trans.show_error_message( "The specified workflow does not exist." ) |
---|
426 | |
---|
427 | # Rate workflow. |
---|
428 | stored_rating = self.rate_item( trans.sa_session, trans.get_user(), stored, rating ) |
---|
429 | |
---|
430 | return self.get_ave_item_rating_data( trans.sa_session, stored ) |
---|
431 | |
---|
432 | @web.expose |
---|
433 | @web.require_login( "use Galaxy workflows" ) |
---|
434 | def set_accessible_async( self, trans, id=None, accessible=False ): |
---|
435 | """ Set workflow's importable attribute and slug. """ |
---|
436 | stored = self.get_stored_workflow( trans, id ) |
---|
437 | |
---|
438 | # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed. |
---|
439 | importable = accessible in ['True', 'true', 't', 'T']; |
---|
440 | if stored and stored.importable != importable: |
---|
441 | if importable: |
---|
442 | self._make_item_accessible( trans.sa_session, stored ) |
---|
443 | else: |
---|
444 | stored.importable = importable |
---|
445 | trans.sa_session.flush() |
---|
446 | return |
---|
447 | |
---|
448 | @web.expose |
---|
449 | @web.require_login( "modify Galaxy items" ) |
---|
450 | def set_slug_async( self, trans, id, new_slug ): |
---|
451 | stored = self.get_stored_workflow( trans, id ) |
---|
452 | if stored: |
---|
453 | stored.slug = new_slug |
---|
454 | trans.sa_session.flush() |
---|
455 | return stored.slug |
---|
456 | |
---|
457 | @web.expose |
---|
458 | def get_embed_html_async( self, trans, id ): |
---|
459 | """ Returns HTML for embedding a workflow in a page. """ |
---|
460 | |
---|
461 | # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code. |
---|
462 | stored = self.get_stored_workflow( trans, id ) |
---|
463 | if stored: |
---|
464 | return "Embedded Workflow '%s'" % stored.name |
---|
465 | |
---|
466 | @web.expose |
---|
467 | @web.json |
---|
468 | @web.require_login( "use Galaxy workflows" ) |
---|
469 | def get_name_and_link_async( self, trans, id=None ): |
---|
470 | """ Returns workflow's name and link. """ |
---|
471 | stored = self.get_stored_workflow( trans, id ) |
---|
472 | |
---|
473 | if self.create_item_slug( trans.sa_session, stored ): |
---|
474 | trans.sa_session.flush() |
---|
475 | return_dict = { "name" : stored.name, "link" : url_for( action="display_by_username_and_slug", username=stored.user.username, slug=stored.slug ) } |
---|
476 | return return_dict |
---|
477 | |
---|
478 | @web.expose |
---|
479 | @web.require_login( "use Galaxy workflows" ) |
---|
480 | def gen_image( self, trans, id ): |
---|
481 | stored = self.get_stored_workflow( trans, id, check_ownership=True ) |
---|
482 | session = trans.sa_session |
---|
483 | |
---|
484 | workflow = stored.latest_workflow |
---|
485 | data = [] |
---|
486 | |
---|
487 | canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left") |
---|
488 | text = svgfig.SVG("g") |
---|
489 | connectors = svgfig.SVG("g") |
---|
490 | boxes = svgfig.SVG("g") |
---|
491 | svgfig.Text.defaults["font-size"] = "10px" |
---|
492 | |
---|
493 | in_pos = {} |
---|
494 | out_pos = {} |
---|
495 | margin = 5 |
---|
496 | line_px = 16 # how much spacing between input/outputs |
---|
497 | widths = {} # store px width for boxes of each step |
---|
498 | max_width, max_x, max_y = 0, 0, 0 |
---|
499 | |
---|
500 | for step in workflow.steps: |
---|
501 | # Load from database representation |
---|
502 | module = module_factory.from_workflow_step( trans, step ) |
---|
503 | |
---|
504 | # Pack attributes into plain dictionary |
---|
505 | step_dict = { |
---|
506 | 'id': step.order_index, |
---|
507 | 'data_inputs': module.get_data_inputs(), |
---|
508 | 'data_outputs': module.get_data_outputs(), |
---|
509 | 'position': step.position |
---|
510 | } |
---|
511 | |
---|
512 | input_conn_dict = {} |
---|
513 | for conn in step.input_connections: |
---|
514 | input_conn_dict[ conn.input_name ] = \ |
---|
515 | dict( id=conn.output_step.order_index, output_name=conn.output_name ) |
---|
516 | step_dict['input_connections'] = input_conn_dict |
---|
517 | |
---|
518 | data.append(step_dict) |
---|
519 | |
---|
520 | x, y = step.position['left'], step.position['top'] |
---|
521 | count = 0 |
---|
522 | |
---|
523 | max_len = len(module.get_name()) * 1.5 |
---|
524 | text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() ) |
---|
525 | |
---|
526 | y += 45 |
---|
527 | for di in module.get_data_inputs(): |
---|
528 | cur_y = y+count*line_px |
---|
529 | if step.order_index not in in_pos: |
---|
530 | in_pos[step.order_index] = {} |
---|
531 | in_pos[step.order_index][di['name']] = (x, cur_y) |
---|
532 | text.append( svgfig.Text(x, cur_y, di['label']).SVG() ) |
---|
533 | count += 1 |
---|
534 | max_len = max(max_len, len(di['label'])) |
---|
535 | |
---|
536 | |
---|
537 | if len(module.get_data_inputs()) > 0: |
---|
538 | y += 15 |
---|
539 | |
---|
540 | for do in module.get_data_outputs(): |
---|
541 | cur_y = y+count*line_px |
---|
542 | if step.order_index not in out_pos: |
---|
543 | out_pos[step.order_index] = {} |
---|
544 | out_pos[step.order_index][do['name']] = (x, cur_y) |
---|
545 | text.append( svgfig.Text(x, cur_y, do['name']).SVG() ) |
---|
546 | count += 1 |
---|
547 | max_len = max(max_len, len(do['name'])) |
---|
548 | |
---|
549 | widths[step.order_index] = max_len*5.5 |
---|
550 | max_x = max(max_x, step.position['left']) |
---|
551 | max_y = max(max_y, step.position['top']) |
---|
552 | max_width = max(max_width, widths[step.order_index]) |
---|
553 | |
---|
554 | for step_dict in data: |
---|
555 | width = widths[step_dict['id']] |
---|
556 | x, y = step_dict['position']['left'], step_dict['position']['top'] |
---|
557 | boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() ) |
---|
558 | box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin |
---|
559 | |
---|
560 | # Draw separator line |
---|
561 | if len(step_dict['data_inputs']) > 0: |
---|
562 | box_height += 15 |
---|
563 | sep_y = y + len(step_dict['data_inputs']) * line_px + 40 |
---|
564 | text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) # |
---|
565 | |
---|
566 | # input/output box |
---|
567 | boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() ) |
---|
568 | |
---|
569 | for conn, output_dict in step_dict['input_connections'].iteritems(): |
---|
570 | in_coords = in_pos[step_dict['id']][conn] |
---|
571 | out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']] |
---|
572 | adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1]) |
---|
573 | text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) ) |
---|
574 | connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() ) |
---|
575 | |
---|
576 | canvas.append(connectors) |
---|
577 | canvas.append(boxes) |
---|
578 | canvas.append(text) |
---|
579 | width, height = (max_x + max_width + 50), max_y + 300 |
---|
580 | canvas['width'] = "%s px" % width |
---|
581 | canvas['height'] = "%s px" % height |
---|
582 | canvas['viewBox'] = "0 0 %s %s" % (width, height) |
---|
583 | trans.response.set_content_type("image/svg+xml") |
---|
584 | return canvas.standalone_xml() |
---|
585 | |
---|
586 | |
---|
587 | @web.expose |
---|
588 | @web.require_login( "use Galaxy workflows" ) |
---|
589 | def clone( self, trans, id ): |
---|
590 | stored = self.get_stored_workflow( trans, id, check_ownership=False ) |
---|
591 | user = trans.get_user() |
---|
592 | if stored.user == user: |
---|
593 | owner = True |
---|
594 | else: |
---|
595 | if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \ |
---|
596 | .filter_by( user=user, stored_workflow=stored ).count() == 0: |
---|
597 | error( "Workflow is not owned by or shared with current user" ) |
---|
598 | owner = False |
---|
599 | new_stored = model.StoredWorkflow() |
---|
600 | new_stored.name = "Clone of '%s'" % stored.name |
---|
601 | new_stored.latest_workflow = stored.latest_workflow |
---|
602 | if not owner: |
---|
603 | new_stored.name += " shared by '%s'" % stored.user.email |
---|
604 | new_stored.user = user |
---|
605 | # Persist |
---|
606 | session = trans.sa_session |
---|
607 | session.add( new_stored ) |
---|
608 | session.flush() |
---|
609 | # Display the management page |
---|
610 | trans.set_message( 'Clone created with name "%s"' % new_stored.name ) |
---|
611 | return self.list( trans ) |
---|
612 | |
---|
613 | @web.expose |
---|
614 | @web.require_login( "create workflows" ) |
---|
615 | def create( self, trans, workflow_name=None, workflow_annotation="" ): |
---|
616 | """ |
---|
617 | Create a new stored workflow with name `workflow_name`. |
---|
618 | """ |
---|
619 | user = trans.get_user() |
---|
620 | if workflow_name is not None: |
---|
621 | # Create the new stored workflow |
---|
622 | stored_workflow = model.StoredWorkflow() |
---|
623 | stored_workflow.name = workflow_name |
---|
624 | stored_workflow.user = user |
---|
625 | # And the first (empty) workflow revision |
---|
626 | workflow = model.Workflow() |
---|
627 | workflow.name = workflow_name |
---|
628 | workflow.stored_workflow = stored_workflow |
---|
629 | stored_workflow.latest_workflow = workflow |
---|
630 | # Add annotation. |
---|
631 | workflow_annotation = sanitize_html( workflow_annotation, 'utf-8', 'text/html' ) |
---|
632 | self.add_item_annotation( trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation ) |
---|
633 | # Persist |
---|
634 | session = trans.sa_session |
---|
635 | session.add( stored_workflow ) |
---|
636 | session.flush() |
---|
637 | # Display the management page |
---|
638 | trans.set_message( "Workflow '%s' created" % stored_workflow.name ) |
---|
639 | return self.list( trans ) |
---|
640 | else: |
---|
641 | return form( url_for(), "Create New Workflow", submit_text="Create" ) \ |
---|
642 | .add_text( "workflow_name", "Workflow Name", value="Unnamed workflow" ) \ |
---|
643 | .add_text( "workflow_annotation", "Workflow Annotation", value="", help="A description of the workflow; annotation is shown alongside shared or published workflows." ) |
---|
644 | |
---|
645 | @web.expose |
---|
646 | def delete( self, trans, id=None ): |
---|
647 | """ |
---|
648 | Mark a workflow as deleted |
---|
649 | """ |
---|
650 | # Load workflow from database |
---|
651 | stored = self.get_stored_workflow( trans, id ) |
---|
652 | # Marke as deleted and save |
---|
653 | stored.deleted = True |
---|
654 | trans.sa_session.add( stored ) |
---|
655 | trans.sa_session.flush() |
---|
656 | # Display the management page |
---|
657 | trans.set_message( "Workflow '%s' deleted" % stored.name ) |
---|
658 | return self.list( trans ) |
---|
659 | |
---|
660 | @web.expose |
---|
661 | @web.require_login( "edit workflows" ) |
---|
662 | def editor( self, trans, id=None ): |
---|
663 | """ |
---|
664 | Render the main workflow editor interface. The canvas is embedded as |
---|
665 | an iframe (necessary for scrolling to work properly), which is |
---|
666 | rendered by `editor_canvas`. |
---|
667 | """ |
---|
668 | if not id: |
---|
669 | error( "Invalid workflow id" ) |
---|
670 | stored = self.get_stored_workflow( trans, id ) |
---|
671 | return trans.fill_template( "workflow/editor.mako", stored=stored, annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored ) ) |
---|
672 | |
---|
673 | @web.json |
---|
674 | def editor_form_post( self, trans, type='tool', tool_id=None, annotation=None, **incoming ): |
---|
675 | """ |
---|
676 | Accepts a tool state and incoming values, and generates a new tool |
---|
677 | form and some additional information, packed into a json dictionary. |
---|
678 | This is used for the form shown in the right pane when a node |
---|
679 | is selected. |
---|
680 | """ |
---|
681 | |
---|
682 | trans.workflow_building_mode = True |
---|
683 | module = module_factory.from_dict( trans, { |
---|
684 | 'type': type, |
---|
685 | 'tool_id': tool_id, |
---|
686 | 'tool_state': incoming.pop("tool_state") |
---|
687 | } ) |
---|
688 | module.update_state( incoming ) |
---|
689 | |
---|
690 | if type=='tool': |
---|
691 | return { |
---|
692 | 'tool_state': module.get_state(), |
---|
693 | 'data_inputs': module.get_data_inputs(), |
---|
694 | 'data_outputs': module.get_data_outputs(), |
---|
695 | 'tool_errors': module.get_errors(), |
---|
696 | 'form_html': module.get_config_form(), |
---|
697 | 'annotation': annotation, |
---|
698 | 'post_job_actions': module.get_post_job_actions() |
---|
699 | } |
---|
700 | else: |
---|
701 | return { |
---|
702 | 'tool_state': module.get_state(), |
---|
703 | 'data_inputs': module.get_data_inputs(), |
---|
704 | 'data_outputs': module.get_data_outputs(), |
---|
705 | 'tool_errors': module.get_errors(), |
---|
706 | 'form_html': module.get_config_form(), |
---|
707 | 'annotation': annotation |
---|
708 | } |
---|
709 | |
---|
710 | @web.json |
---|
711 | def get_new_module_info( self, trans, type, **kwargs ): |
---|
712 | """ |
---|
713 | Get the info for a new instance of a module initialized with default |
---|
714 | parameters (any keyword arguments will be passed along to the module). |
---|
715 | Result includes data inputs and outputs, html representation |
---|
716 | of the initial form, and the initial tool state (with default values). |
---|
717 | This is called asynchronously whenever a new node is added. |
---|
718 | """ |
---|
719 | trans.workflow_building_mode = True |
---|
720 | module = module_factory.new( trans, type, **kwargs ) |
---|
721 | return { |
---|
722 | 'type': module.type, |
---|
723 | 'name': module.get_name(), |
---|
724 | 'tool_id': module.get_tool_id(), |
---|
725 | 'tool_state': module.get_state(), |
---|
726 | 'tooltip': module.get_tooltip(), |
---|
727 | 'data_inputs': module.get_data_inputs(), |
---|
728 | 'data_outputs': module.get_data_outputs(), |
---|
729 | 'form_html': module.get_config_form(), |
---|
730 | 'annotation': "" |
---|
731 | } |
---|
732 | |
---|
733 | @web.json |
---|
734 | def load_workflow( self, trans, id ): |
---|
735 | """ |
---|
736 | Get the latest Workflow for the StoredWorkflow identified by `id` and |
---|
737 | encode it as a json string that can be read by the workflow editor |
---|
738 | web interface. |
---|
739 | """ |
---|
740 | user = trans.get_user() |
---|
741 | id = trans.security.decode_id( id ) |
---|
742 | trans.workflow_building_mode = True |
---|
743 | # Load encoded workflow from database |
---|
744 | stored = trans.sa_session.query( model.StoredWorkflow ).get( id ) |
---|
745 | assert stored.user == user |
---|
746 | workflow = stored.latest_workflow |
---|
747 | # Pack workflow data into a dictionary and return |
---|
748 | data = {} |
---|
749 | data['name'] = workflow.name |
---|
750 | data['steps'] = {} |
---|
751 | data['upgrade_messages'] = {} |
---|
752 | # For each step, rebuild the form and encode the state |
---|
753 | for step in workflow.steps: |
---|
754 | # Load from database representation |
---|
755 | module = module_factory.from_workflow_step( trans, step ) |
---|
756 | # Fix any missing parameters |
---|
757 | upgrade_message = module.check_and_update_state() |
---|
758 | if upgrade_message: |
---|
759 | # FIXME: Frontend should be able to handle workflow messages |
---|
760 | # as a dictionary not just the values |
---|
761 | data['upgrade_messages'][step.order_index] = upgrade_message.values() |
---|
762 | # Get user annotation. |
---|
763 | step_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, step ) |
---|
764 | annotation_str = "" |
---|
765 | if step_annotation: |
---|
766 | annotation_str = step_annotation.annotation |
---|
767 | # Pack attributes into plain dictionary |
---|
768 | step_dict = { |
---|
769 | 'id': step.order_index, |
---|
770 | 'type': module.type, |
---|
771 | 'tool_id': module.get_tool_id(), |
---|
772 | 'name': module.get_name(), |
---|
773 | 'tool_state': module.get_state(), |
---|
774 | 'tooltip': module.get_tooltip(), |
---|
775 | 'tool_errors': module.get_errors(), |
---|
776 | 'data_inputs': module.get_data_inputs(), |
---|
777 | 'data_outputs': module.get_data_outputs(), |
---|
778 | 'form_html': module.get_config_form(), |
---|
779 | 'annotation' : annotation_str, |
---|
780 | 'post_job_actions' : {}, |
---|
781 | 'workflow_outputs' : [] |
---|
782 | } |
---|
783 | # Connections |
---|
784 | input_connections = step.input_connections |
---|
785 | if step.type is None or step.type == 'tool': |
---|
786 | # Determine full (prefixed) names of valid input datasets |
---|
787 | data_input_names = {} |
---|
788 | def callback( input, value, prefixed_name, prefixed_label ): |
---|
789 | if isinstance( input, DataToolParameter ): |
---|
790 | data_input_names[ prefixed_name ] = True |
---|
791 | visit_input_values( module.tool.inputs, module.state.inputs, callback ) |
---|
792 | # Filter |
---|
793 | # FIXME: this removes connection without displaying a message currently! |
---|
794 | input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] |
---|
795 | # post_job_actions |
---|
796 | pja_dict = {} |
---|
797 | for pja in step.post_job_actions: |
---|
798 | pja_dict[pja.action_type+pja.output_name] = dict(action_type = pja.action_type, |
---|
799 | output_name = pja.output_name, |
---|
800 | action_arguments = pja.action_arguments) |
---|
801 | step_dict['post_job_actions'] = pja_dict |
---|
802 | #workflow outputs |
---|
803 | outputs = [] |
---|
804 | for output in step.workflow_outputs: |
---|
805 | outputs.append(output.output_name) |
---|
806 | step_dict['workflow_outputs'] = outputs |
---|
807 | # Encode input connections as dictionary |
---|
808 | input_conn_dict = {} |
---|
809 | for conn in input_connections: |
---|
810 | input_conn_dict[ conn.input_name ] = \ |
---|
811 | dict( id=conn.output_step.order_index, output_name=conn.output_name ) |
---|
812 | step_dict['input_connections'] = input_conn_dict |
---|
813 | # Position |
---|
814 | step_dict['position'] = step.position |
---|
815 | # Add to return value |
---|
816 | data['steps'][step.order_index] = step_dict |
---|
817 | return data |
---|
818 | |
---|
819 | @web.json |
---|
820 | def save_workflow( self, trans, id, workflow_data ): |
---|
821 | """ |
---|
822 | Save the workflow described by `workflow_data` with id `id`. |
---|
823 | """ |
---|
824 | # Get the stored workflow |
---|
825 | stored = self.get_stored_workflow( trans, id ) |
---|
826 | # Put parameters in workflow mode |
---|
827 | trans.workflow_building_mode = True |
---|
828 | # Convert incoming workflow data from json |
---|
829 | data = simplejson.loads( workflow_data ) |
---|
830 | # Create new workflow from incoming data |
---|
831 | workflow = model.Workflow() |
---|
832 | # Just keep the last name (user can rename later) |
---|
833 | workflow.name = stored.name |
---|
834 | # Assume no errors until we find a step that has some |
---|
835 | workflow.has_errors = False |
---|
836 | # Create each step |
---|
837 | steps = [] |
---|
838 | # The editor will provide ids for each step that we don't need to save, |
---|
839 | # but do need to use to make connections |
---|
840 | steps_by_external_id = {} |
---|
841 | # First pass to build step objects and populate basic values |
---|
842 | for key, step_dict in data['steps'].iteritems(): |
---|
843 | # Create the model class for the step |
---|
844 | step = model.WorkflowStep() |
---|
845 | steps.append( step ) |
---|
846 | steps_by_external_id[ step_dict['id' ] ] = step |
---|
847 | # FIXME: Position should be handled inside module |
---|
848 | step.position = step_dict['position'] |
---|
849 | module = module_factory.from_dict( trans, step_dict ) |
---|
850 | module.save_to_step( step ) |
---|
851 | if step_dict.has_key('workflow_outputs'): |
---|
852 | for output_name in step_dict['workflow_outputs']: |
---|
853 | m = model.WorkflowOutput(workflow_step = step, output_name = output_name) |
---|
854 | trans.sa_session.add(m) |
---|
855 | if step.tool_errors: |
---|
856 | workflow.has_errors = True |
---|
857 | # Stick this in the step temporarily |
---|
858 | step.temp_input_connections = step_dict['input_connections'] |
---|
859 | # Save step annotation. |
---|
860 | annotation = step_dict[ 'annotation' ] |
---|
861 | if annotation: |
---|
862 | annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) |
---|
863 | self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) |
---|
864 | # Second pass to deal with connections between steps |
---|
865 | for step in steps: |
---|
866 | # Input connections |
---|
867 | for input_name, conn_dict in step.temp_input_connections.iteritems(): |
---|
868 | if conn_dict: |
---|
869 | conn = model.WorkflowStepConnection() |
---|
870 | conn.input_step = step |
---|
871 | conn.input_name = input_name |
---|
872 | conn.output_name = conn_dict['output_name'] |
---|
873 | conn.output_step = steps_by_external_id[ conn_dict['id'] ] |
---|
874 | del step.temp_input_connections |
---|
875 | # Order the steps if possible |
---|
876 | attach_ordered_steps( workflow, steps ) |
---|
877 | # Connect up |
---|
878 | workflow.stored_workflow = stored |
---|
879 | stored.latest_workflow = workflow |
---|
880 | # Persist |
---|
881 | trans.sa_session.flush() |
---|
882 | # Return something informative |
---|
883 | errors = [] |
---|
884 | if workflow.has_errors: |
---|
885 | errors.append( "Some steps in this workflow have validation errors" ) |
---|
886 | if workflow.has_cycles: |
---|
887 | errors.append( "This workflow contains cycles" ) |
---|
888 | if errors: |
---|
889 | rval = dict( message="Workflow saved, but will not be runnable due to the following errors", |
---|
890 | errors=errors ) |
---|
891 | else: |
---|
892 | rval = dict( message="Workflow saved" ) |
---|
893 | rval['name'] = workflow.name |
---|
894 | return rval |
---|
895 | |
---|
896 | @web.expose |
---|
897 | @web.require_login( "use workflows" ) |
---|
898 | def export( self, trans, id=None, **kwd ): |
---|
899 | """ |
---|
900 | Handles download/export workflow command. |
---|
901 | """ |
---|
902 | stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True ) |
---|
903 | return trans.fill_template( "/workflow/export.mako", item=stored, use_panels=True ) |
---|
904 | |
---|
905 | |
---|
906 | @web.expose |
---|
907 | @web.require_login( "use workflows" ) |
---|
908 | def import_from_myexp( self, trans, myexp_id, myexp_username=None, myexp_password=None ): |
---|
909 | """ |
---|
910 | Imports a workflow from the myExperiment website. |
---|
911 | """ |
---|
912 | |
---|
913 | # |
---|
914 | # Get workflow XML. |
---|
915 | # |
---|
916 | |
---|
917 | # Get workflow content. |
---|
918 | conn = httplib.HTTPConnection( self.__myexp_url ) |
---|
919 | # NOTE: blocks web thread. |
---|
920 | headers = {} |
---|
921 | if myexp_username and myexp_password: |
---|
922 | auth_header = base64.b64encode( '%s:%s' % ( myexp_username, myexp_password ))[:-1] |
---|
923 | headers = { "Authorization" : "Basic %s" % auth_header } |
---|
924 | conn.request( "GET", "/workflow.xml?id=%s&elements=content" % myexp_id, headers=headers ) |
---|
925 | response = conn.getresponse() |
---|
926 | workflow_xml = response.read() |
---|
927 | conn.close() |
---|
928 | parser = SingleTagContentsParser( "content" ) |
---|
929 | parser.feed( workflow_xml ) |
---|
930 | workflow_content = base64.b64decode( parser.tag_content ) |
---|
931 | |
---|
932 | # |
---|
933 | # Process workflow XML and create workflow. |
---|
934 | # |
---|
935 | parser = SingleTagContentsParser( "galaxy_json" ) |
---|
936 | parser.feed( workflow_content ) |
---|
937 | workflow_dict = from_json_string( parser.tag_content ) |
---|
938 | |
---|
939 | # Create workflow. |
---|
940 | workflow = self._workflow_from_dict( trans, workflow_dict, source="myExperiment" ).latest_workflow |
---|
941 | |
---|
942 | # Provide user feedback. |
---|
943 | if workflow.has_errors: |
---|
944 | return trans.show_warn_message( "Imported, but some steps in this workflow have validation errors" ) |
---|
945 | if workflow.has_cycles: |
---|
946 | return trans.show_warn_message( "Imported, but this workflow contains cycles" ) |
---|
947 | else: |
---|
948 | return trans.show_message( "Workflow '%s' imported" % workflow.name ) |
---|
949 | |
---|
950 | @web.expose |
---|
951 | @web.require_login( "use workflows" ) |
---|
952 | def export_to_myexp( self, trans, id, myexp_username, myexp_password ): |
---|
953 | """ |
---|
954 | Exports a workflow to myExperiment website. |
---|
955 | """ |
---|
956 | |
---|
957 | # Load encoded workflow from database |
---|
958 | user = trans.get_user() |
---|
959 | id = trans.security.decode_id( id ) |
---|
960 | trans.workflow_building_mode = True |
---|
961 | stored = trans.sa_session.query( model.StoredWorkflow ).get( id ) |
---|
962 | self.security_check( trans.get_user(), stored, False, True ) |
---|
963 | |
---|
964 | # Convert workflow to dict. |
---|
965 | workflow_dict = self._workflow_to_dict( trans, stored ) |
---|
966 | |
---|
967 | # |
---|
968 | # Create and submit workflow myExperiment request. |
---|
969 | # |
---|
970 | |
---|
971 | # Create workflow content XML. |
---|
972 | workflow_dict_packed = simplejson.dumps( workflow_dict, indent=4, sort_keys=True ) |
---|
973 | workflow_content = trans.fill_template( "workflow/myexp_export_content.mako", \ |
---|
974 | workflow_dict_packed=workflow_dict_packed, \ |
---|
975 | workflow_steps=workflow_dict['steps'] ) |
---|
976 | |
---|
977 | # Create myExperiment request. |
---|
978 | request_raw = trans.fill_template( "workflow/myexp_export.mako", \ |
---|
979 | workflow_name=workflow_dict['name'], \ |
---|
980 | workflow_description=workflow_dict['annotation'], \ |
---|
981 | workflow_content=workflow_content |
---|
982 | ) |
---|
983 | # strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters. |
---|
984 | request = unicode( request_raw.strip(), 'utf-8' ) |
---|
985 | |
---|
986 | # Do request and get result. |
---|
987 | auth_header = base64.b64encode( '%s:%s' % ( myexp_username, myexp_password ))[:-1] |
---|
988 | headers = { "Content-type": "text/xml", "Accept": "text/plain", "Authorization" : "Basic %s" % auth_header } |
---|
989 | conn = httplib.HTTPConnection( self.__myexp_url ) |
---|
990 | # NOTE: blocks web thread. |
---|
991 | conn.request("POST", "/workflow.xml", request, headers) |
---|
992 | response = conn.getresponse() |
---|
993 | response_data = response.read() |
---|
994 | conn.close() |
---|
995 | |
---|
996 | # Do simple parse of response to see if export successful and provide user feedback. |
---|
997 | parser = SingleTagContentsParser( 'id' ) |
---|
998 | parser.feed( response_data ) |
---|
999 | myexp_workflow_id = parser.tag_content |
---|
1000 | workflow_list_str = " <br>Return to <a href='%s'>workflow list." % url_for( action='list' ) |
---|
1001 | if myexp_workflow_id: |
---|
1002 | return trans.show_message( \ |
---|
1003 | "Workflow '%s' successfully exported to myExperiment. %s" % \ |
---|
1004 | ( stored.name, workflow_list_str ), |
---|
1005 | use_panels=True ) |
---|
1006 | else: |
---|
1007 | return trans.show_error_message( \ |
---|
1008 | "Workflow '%s' could not be exported to myExperiment. Error: %s. %s" % \ |
---|
1009 | ( stored.name, response_data, workflow_list_str ), use_panels=True ) |
---|
1010 | |
---|
1011 | @web.json_pretty |
---|
1012 | def export_workflow( self, trans, id ): |
---|
1013 | """ |
---|
1014 | Get the latest Workflow for the StoredWorkflow identified by `id` and |
---|
1015 | encode it as a json string that can be imported back into Galaxy |
---|
1016 | |
---|
1017 | This has slightly different information than the above. In particular, |
---|
1018 | it does not attempt to decode forms and build UIs, it just stores |
---|
1019 | the raw state. |
---|
1020 | |
---|
1021 | TODO: this is a legacy method; it should be removed once we have UI |
---|
1022 | support for exporting/importing a workflow. |
---|
1023 | """ |
---|
1024 | user = trans.get_user() |
---|
1025 | id = trans.security.decode_id( id ) |
---|
1026 | trans.workflow_building_mode = True |
---|
1027 | # Load encoded workflow from database |
---|
1028 | stored = trans.sa_session.query( model.StoredWorkflow ).get( id ) |
---|
1029 | self.security_check( trans.get_user(), stored, False, True ) |
---|
1030 | return self._workflow_to_dict( trans, stored ) |
---|
1031 | |
---|
1032 | @web.json_pretty |
---|
1033 | @web.require_login( "use workflows" ) |
---|
1034 | def export_to_file( self, trans, id ): |
---|
1035 | """ |
---|
1036 | Get the latest Workflow for the StoredWorkflow identified by `id` and |
---|
1037 | encode it as a json string that can be imported back into Galaxy |
---|
1038 | |
---|
1039 | This has slightly different information than the above. In particular, |
---|
1040 | it does not attempt to decode forms and build UIs, it just stores |
---|
1041 | the raw state. |
---|
1042 | """ |
---|
1043 | |
---|
1044 | # Get workflow. |
---|
1045 | stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True ) |
---|
1046 | |
---|
1047 | # Stream workflow to file. |
---|
1048 | stored_dict = self._workflow_to_dict( trans, stored ) |
---|
1049 | valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' |
---|
1050 | sname = stored.name |
---|
1051 | sname = ''.join(c in valid_chars and c or '_' for c in sname)[0:150] |
---|
1052 | trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy-Workflow-%s.ga" % ( sname ) |
---|
1053 | trans.response.set_content_type( 'application/galaxy-archive' ) |
---|
1054 | return stored_dict |
---|
1055 | |
---|
1056 | @web.expose |
---|
1057 | def import_workflow( self, trans, workflow_text=None, url=None ): |
---|
1058 | if workflow_text is None and url is None: |
---|
1059 | return form( url_for(), "Import Workflow", submit_text="Import" ) \ |
---|
1060 | .add_text( "url", "URL to load workflow from", "" ) \ |
---|
1061 | .add_input( "textarea", "Encoded workflow (as generated by export workflow)", "workflow_text", "" ) |
---|
1062 | if url: |
---|
1063 | # Load workflow from external URL |
---|
1064 | # NOTE: blocks the web thread. |
---|
1065 | try: |
---|
1066 | workflow_data = urllib2.urlopen( url ).read() |
---|
1067 | except Exception, e: |
---|
1068 | return trans.show_error_message( "Failed to open URL %s<br><br>Message: %s" % ( url, str( e ) ) ) |
---|
1069 | else: |
---|
1070 | workflow_data = workflow_text |
---|
1071 | # Convert incoming workflow data from json |
---|
1072 | try: |
---|
1073 | data = simplejson.loads( workflow_data ) |
---|
1074 | except Exception, e: |
---|
1075 | return trans.show_error_message( "Data at '%s' does not appear to be a Galaxy workflow<br><br>Message: %s" % ( url, str( e ) ) ) |
---|
1076 | |
---|
1077 | # Create workflow. |
---|
1078 | workflow = self._workflow_from_dict( trans, data, source="uploaded file" ).latest_workflow |
---|
1079 | |
---|
1080 | # Provide user feedback and show workflow list. |
---|
1081 | if workflow.has_errors: |
---|
1082 | trans.set_message( "Imported, but some steps in this workflow have validation errors", |
---|
1083 | type="warning" ) |
---|
1084 | if workflow.has_cycles: |
---|
1085 | trans.set_message( "Imported, but this workflow contains cycles", |
---|
1086 | type="warning" ) |
---|
1087 | else: |
---|
1088 | trans.set_message( "Workflow '%s' imported" % workflow.name ) |
---|
1089 | return self.list( trans ) |
---|
1090 | |
---|
1091 | @web.json |
---|
1092 | def get_datatypes( self, trans ): |
---|
1093 | ext_to_class_name = dict() |
---|
1094 | classes = [] |
---|
1095 | for k, v in trans.app.datatypes_registry.datatypes_by_extension.iteritems(): |
---|
1096 | c = v.__class__ |
---|
1097 | ext_to_class_name[k] = c.__module__ + "." + c.__name__ |
---|
1098 | classes.append( c ) |
---|
1099 | class_to_classes = dict() |
---|
1100 | def visit_bases( types, cls ): |
---|
1101 | for base in cls.__bases__: |
---|
1102 | if issubclass( base, Data ): |
---|
1103 | types.add( base.__module__ + "." + base.__name__ ) |
---|
1104 | visit_bases( types, base ) |
---|
1105 | for c in classes: |
---|
1106 | n = c.__module__ + "." + c.__name__ |
---|
1107 | types = set( [ n ] ) |
---|
1108 | visit_bases( types, c ) |
---|
1109 | class_to_classes[ n ] = dict( ( t, True ) for t in types ) |
---|
1110 | return dict( ext_to_class_name=ext_to_class_name, class_to_classes=class_to_classes ) |
---|
1111 | |
---|
1112 | @web.expose |
---|
1113 | def build_from_current_history( self, trans, job_ids=None, dataset_ids=None, workflow_name=None ): |
---|
1114 | user = trans.get_user() |
---|
1115 | history = trans.get_history() |
---|
1116 | if not user: |
---|
1117 | return trans.show_error_message( "Must be logged in to create workflows" ) |
---|
1118 | if ( job_ids is None and dataset_ids is None ) or workflow_name is None: |
---|
1119 | jobs, warnings = get_job_dict( trans ) |
---|
1120 | # Render |
---|
1121 | return trans.fill_template( |
---|
1122 | "workflow/build_from_current_history.mako", |
---|
1123 | jobs=jobs, |
---|
1124 | warnings=warnings, |
---|
1125 | history=history ) |
---|
1126 | else: |
---|
1127 | # Ensure job_ids and dataset_ids are lists (possibly empty) |
---|
1128 | if job_ids is None: |
---|
1129 | job_ids = [] |
---|
1130 | elif type( job_ids ) is not list: |
---|
1131 | job_ids = [ job_ids ] |
---|
1132 | if dataset_ids is None: |
---|
1133 | dataset_ids = [] |
---|
1134 | elif type( dataset_ids ) is not list: |
---|
1135 | dataset_ids = [ dataset_ids ] |
---|
1136 | # Convert both sets of ids to integers |
---|
1137 | job_ids = [ int( id ) for id in job_ids ] |
---|
1138 | dataset_ids = [ int( id ) for id in dataset_ids ] |
---|
1139 | # Find each job, for security we (implicately) check that they are |
---|
1140 | # associated witha job in the current history. |
---|
1141 | jobs, warnings = get_job_dict( trans ) |
---|
1142 | jobs_by_id = dict( ( job.id, job ) for job in jobs.keys() ) |
---|
1143 | steps = [] |
---|
1144 | steps_by_job_id = {} |
---|
1145 | hid_to_output_pair = {} |
---|
1146 | # Input dataset steps |
---|
1147 | for hid in dataset_ids: |
---|
1148 | step = model.WorkflowStep() |
---|
1149 | step.type = 'data_input' |
---|
1150 | hid_to_output_pair[ hid ] = ( step, 'output' ) |
---|
1151 | steps.append( step ) |
---|
1152 | # Tool steps |
---|
1153 | for job_id in job_ids: |
---|
1154 | assert job_id in jobs_by_id, "Attempt to create workflow with job not connected to current history" |
---|
1155 | job = jobs_by_id[ job_id ] |
---|
1156 | tool = trans.app.toolbox.tools_by_id[ job.tool_id ] |
---|
1157 | param_values = job.get_param_values( trans.app ) |
---|
1158 | associations = cleanup_param_values( tool.inputs, param_values ) |
---|
1159 | step = model.WorkflowStep() |
---|
1160 | step.type = 'tool' |
---|
1161 | step.tool_id = job.tool_id |
---|
1162 | step.tool_inputs = tool.params_to_strings( param_values, trans.app ) |
---|
1163 | # NOTE: We shouldn't need to do two passes here since only |
---|
1164 | # an earlier job can be used as an input to a later |
---|
1165 | # job. |
---|
1166 | for other_hid, input_name in associations: |
---|
1167 | if other_hid in hid_to_output_pair: |
---|
1168 | other_step, other_name = hid_to_output_pair[ other_hid ] |
---|
1169 | conn = model.WorkflowStepConnection() |
---|
1170 | conn.input_step = step |
---|
1171 | conn.input_name = input_name |
---|
1172 | # Should always be connected to an earlier step |
---|
1173 | conn.output_step = other_step |
---|
1174 | conn.output_name = other_name |
---|
1175 | steps.append( step ) |
---|
1176 | steps_by_job_id[ job_id ] = step |
---|
1177 | # Store created dataset hids |
---|
1178 | for assoc in job.output_datasets: |
---|
1179 | hid_to_output_pair[ assoc.dataset.hid ] = ( step, assoc.name ) |
---|
1180 | # Workflow to populate |
---|
1181 | workflow = model.Workflow() |
---|
1182 | workflow.name = workflow_name |
---|
1183 | # Order the steps if possible |
---|
1184 | attach_ordered_steps( workflow, steps ) |
---|
1185 | # And let's try to set up some reasonable locations on the canvas |
---|
1186 | # (these are pretty arbitrary values) |
---|
1187 | levorder = order_workflow_steps_with_levels( steps ) |
---|
1188 | base_pos = 10 |
---|
1189 | for i, steps_at_level in enumerate( levorder ): |
---|
1190 | for j, index in enumerate( steps_at_level ): |
---|
1191 | step = steps[ index ] |
---|
1192 | step.position = dict( top = ( base_pos + 120 * j ), |
---|
1193 | left = ( base_pos + 220 * i ) ) |
---|
1194 | # Store it |
---|
1195 | stored = model.StoredWorkflow() |
---|
1196 | stored.user = user |
---|
1197 | stored.name = workflow_name |
---|
1198 | workflow.stored_workflow = stored |
---|
1199 | stored.latest_workflow = workflow |
---|
1200 | trans.sa_session.add( stored ) |
---|
1201 | trans.sa_session.flush() |
---|
1202 | # Index page with message |
---|
1203 | return trans.show_message( "Workflow '%s' created from current history." % workflow_name ) |
---|
1204 | ## return trans.show_ok_message( "<p>Workflow '%s' created.</p><p><a target='_top' href='%s'>Click to load in workflow editor</a></p>" |
---|
1205 | ## % ( workflow_name, web.url_for( action='editor', id=trans.security.encode_id(stored.id) ) ) ) |
---|
1206 | |
---|
1207 | @web.expose |
---|
1208 | def run( self, trans, id, check_user=True, **kwargs ): |
---|
1209 | stored = self.get_stored_workflow( trans, id, check_ownership=False ) |
---|
1210 | if check_user: |
---|
1211 | user = trans.get_user() |
---|
1212 | if stored.user != user: |
---|
1213 | if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \ |
---|
1214 | .filter_by( user=user, stored_workflow=stored ).count() == 0: |
---|
1215 | error( "Workflow is not owned by or shared with current user" ) |
---|
1216 | # Get the latest revision |
---|
1217 | workflow = stored.latest_workflow |
---|
1218 | # It is possible for a workflow to have 0 steps |
---|
1219 | if len( workflow.steps ) == 0: |
---|
1220 | error( "Workflow cannot be run because it does not have any steps" ) |
---|
1221 | #workflow = Workflow.from_simple( simplejson.loads( stored.encoded_value ), trans.app ) |
---|
1222 | if workflow.has_cycles: |
---|
1223 | error( "Workflow cannot be run because it contains cycles" ) |
---|
1224 | if workflow.has_errors: |
---|
1225 | error( "Workflow cannot be run because of validation errors in some steps" ) |
---|
1226 | # Build the state for each step |
---|
1227 | errors = {} |
---|
1228 | has_upgrade_messages = False |
---|
1229 | has_errors = False |
---|
1230 | if kwargs: |
---|
1231 | # If kwargs were provided, the states for each step should have |
---|
1232 | # been POSTed |
---|
1233 | for step in workflow.steps: |
---|
1234 | step.upgrade_messages = {} |
---|
1235 | # Connections by input name |
---|
1236 | step.input_connections_by_name = \ |
---|
1237 | dict( ( conn.input_name, conn ) for conn in step.input_connections ) |
---|
1238 | # Extract just the arguments for this step by prefix |
---|
1239 | p = "%s|" % step.id |
---|
1240 | l = len(p) |
---|
1241 | step_args = dict( ( k[l:], v ) for ( k, v ) in kwargs.iteritems() if k.startswith( p ) ) |
---|
1242 | step_errors = None |
---|
1243 | if step.type == 'tool' or step.type is None: |
---|
1244 | module = module_factory.from_workflow_step( trans, step ) |
---|
1245 | # Fix any missing parameters |
---|
1246 | step.upgrade_messages = module.check_and_update_state() |
---|
1247 | if step.upgrade_messages: |
---|
1248 | has_upgrade_messages = True |
---|
1249 | # Any connected input needs to have value DummyDataset (these |
---|
1250 | # are not persisted so we need to do it every time) |
---|
1251 | module.add_dummy_datasets( connections=step.input_connections ) |
---|
1252 | # Get the tool |
---|
1253 | tool = module.tool |
---|
1254 | # Get the state |
---|
1255 | step.state = state = module.state |
---|
1256 | # Get old errors |
---|
1257 | old_errors = state.inputs.pop( "__errors__", {} ) |
---|
1258 | # Update the state |
---|
1259 | step_errors = tool.update_state( trans, tool.inputs, step.state.inputs, step_args, |
---|
1260 | update_only=True, old_errors=old_errors ) |
---|
1261 | else: |
---|
1262 | module = step.module = module_factory.from_workflow_step( trans, step ) |
---|
1263 | state = step.state = module.decode_runtime_state( trans, step_args.pop( "tool_state" ) ) |
---|
1264 | step_errors = module.update_runtime_state( trans, state, step_args ) |
---|
1265 | if step_errors: |
---|
1266 | errors[step.id] = state.inputs["__errors__"] = step_errors |
---|
1267 | if 'run_workflow' in kwargs and not errors: |
---|
1268 | # Run each step, connecting outputs to inputs |
---|
1269 | workflow_invocation = model.WorkflowInvocation() |
---|
1270 | workflow_invocation.workflow = workflow |
---|
1271 | outputs = odict() |
---|
1272 | # Find out if there are any workflow outputs defined, as that influences our actions. |
---|
1273 | use_workflow_outputs = False |
---|
1274 | for step in workflow.steps: |
---|
1275 | if step.type == 'tool' or step.type is None: |
---|
1276 | if step.workflow_outputs: |
---|
1277 | use_workflow_outputs = True |
---|
1278 | break |
---|
1279 | for i, step in enumerate( workflow.steps ): |
---|
1280 | # Execute module |
---|
1281 | job = None |
---|
1282 | if step.type == 'tool' or step.type is None: |
---|
1283 | tool = trans.app.toolbox.tools_by_id[ step.tool_id ] |
---|
1284 | input_values = step.state.inputs |
---|
1285 | # Connect up |
---|
1286 | def callback( input, value, prefixed_name, prefixed_label ): |
---|
1287 | if isinstance( input, DataToolParameter ): |
---|
1288 | if prefixed_name in step.input_connections_by_name: |
---|
1289 | conn = step.input_connections_by_name[ prefixed_name ] |
---|
1290 | return outputs[ conn.output_step.id ][ conn.output_name ] |
---|
1291 | visit_input_values( tool.inputs, step.state.inputs, callback ) |
---|
1292 | # Execute it |
---|
1293 | job, out_data = tool.execute( trans, step.state.inputs ) |
---|
1294 | outputs[ step.id ] = out_data |
---|
1295 | # Create new PJA associations with the created job, to be run on completion. |
---|
1296 | if use_workflow_outputs: |
---|
1297 | # We're using outputs. Check the step for outputs to be displayed. Create PJAs to hide the rest upon completion. |
---|
1298 | step_outputs = [s.output_name for s in step.workflow_outputs] |
---|
1299 | for output in tool.outputs.keys(): |
---|
1300 | if output not in step_outputs: |
---|
1301 | # Create a PJA for hiding this output. |
---|
1302 | n_pja = PostJobAction('HideDatasetAction', step, output, {}) |
---|
1303 | else: |
---|
1304 | # Remove any HideDatasetActions, step is flagged for output. |
---|
1305 | for pja in step.post_job_actions: |
---|
1306 | if pja.action_type == "HideDatasetAction" and pja.output_name == output: |
---|
1307 | step.post_job_actions.remove(pja) |
---|
1308 | trans.sa_session.delete(pja) |
---|
1309 | for pja in step.post_job_actions: |
---|
1310 | if pja.action_type in ActionBox.immediate_actions: |
---|
1311 | ActionBox.execute(trans.app, trans.sa_session, pja, job) |
---|
1312 | else: |
---|
1313 | job.add_post_job_action(pja) |
---|
1314 | else: |
---|
1315 | job, out_data = step.module.execute( trans, step.state ) |
---|
1316 | outputs[ step.id ] = out_data |
---|
1317 | # Record invocation |
---|
1318 | workflow_invocation_step = model.WorkflowInvocationStep() |
---|
1319 | workflow_invocation_step.workflow_invocation = workflow_invocation |
---|
1320 | workflow_invocation_step.workflow_step = step |
---|
1321 | workflow_invocation_step.job = job |
---|
1322 | # All jobs ran sucessfully, so we can save now |
---|
1323 | trans.sa_session.add( workflow_invocation ) |
---|
1324 | trans.sa_session.flush() |
---|
1325 | return trans.fill_template( "workflow/run_complete.mako", |
---|
1326 | workflow=stored, |
---|
1327 | outputs=outputs ) |
---|
1328 | else: |
---|
1329 | # Prepare each step |
---|
1330 | for step in workflow.steps: |
---|
1331 | step.upgrade_messages = {} |
---|
1332 | # Contruct modules |
---|
1333 | if step.type == 'tool' or step.type is None: |
---|
1334 | # Restore the tool state for the step |
---|
1335 | step.module = module_factory.from_workflow_step( trans, step ) |
---|
1336 | # Fix any missing parameters |
---|
1337 | step.upgrade_messages = step.module.check_and_update_state() |
---|
1338 | if step.upgrade_messages: |
---|
1339 | has_upgrade_messages = True |
---|
1340 | # Any connected input needs to have value DummyDataset (these |
---|
1341 | # are not persisted so we need to do it every time) |
---|
1342 | step.module.add_dummy_datasets( connections=step.input_connections ) |
---|
1343 | # Store state with the step |
---|
1344 | step.state = step.module.state |
---|
1345 | # Error dict |
---|
1346 | if step.tool_errors: |
---|
1347 | has_errors = True |
---|
1348 | errors[step.id] = step.tool_errors |
---|
1349 | else: |
---|
1350 | ## Non-tool specific stuff? |
---|
1351 | step.module = module_factory.from_workflow_step( trans, step ) |
---|
1352 | step.state = step.module.get_runtime_state() |
---|
1353 | # Connections by input name |
---|
1354 | step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections ) |
---|
1355 | # Render the form |
---|
1356 | return trans.fill_template( |
---|
1357 | "workflow/run.mako", |
---|
1358 | steps=workflow.steps, |
---|
1359 | workflow=stored, |
---|
1360 | has_upgrade_messages=has_upgrade_messages, |
---|
1361 | errors=errors, |
---|
1362 | incoming=kwargs ) |
---|
1363 | |
---|
1364 | @web.expose |
---|
1365 | def tag_outputs( self, trans, id, check_user=True, **kwargs ): |
---|
1366 | stored = self.get_stored_workflow( trans, id, check_ownership=False ) |
---|
1367 | if check_user: |
---|
1368 | user = trans.get_user() |
---|
1369 | if stored.user != user: |
---|
1370 | if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \ |
---|
1371 | .filter_by( user=user, stored_workflow=stored ).count() == 0: |
---|
1372 | error( "Workflow is not owned by or shared with current user" ) |
---|
1373 | # Get the latest revision |
---|
1374 | workflow = stored.latest_workflow |
---|
1375 | # It is possible for a workflow to have 0 steps |
---|
1376 | if len( workflow.steps ) == 0: |
---|
1377 | error( "Workflow cannot be tagged for outputs because it does not have any steps" ) |
---|
1378 | if workflow.has_cycles: |
---|
1379 | error( "Workflow cannot be tagged for outputs because it contains cycles" ) |
---|
1380 | if workflow.has_errors: |
---|
1381 | error( "Workflow cannot be tagged for outputs because of validation errors in some steps" ) |
---|
1382 | # Build the state for each step |
---|
1383 | errors = {} |
---|
1384 | has_upgrade_messages = False |
---|
1385 | has_errors = False |
---|
1386 | if kwargs: |
---|
1387 | # If kwargs were provided, the states for each step should have |
---|
1388 | # been POSTed |
---|
1389 | for step in workflow.steps: |
---|
1390 | if step.type == 'tool': |
---|
1391 | # Extract just the output flags for this step. |
---|
1392 | p = "%s|otag|" % step.id |
---|
1393 | l = len(p) |
---|
1394 | outputs = [k[l:] for ( k, v ) in kwargs.iteritems() if k.startswith( p )] |
---|
1395 | if step.workflow_outputs: |
---|
1396 | for existing_output in step.workflow_outputs: |
---|
1397 | if existing_output.output_name not in outputs: |
---|
1398 | trans.sa_session.delete(existing_output) |
---|
1399 | else: |
---|
1400 | outputs.remove(existing_output.output_name) |
---|
1401 | for outputname in outputs: |
---|
1402 | m = model.WorkflowOutput(workflow_step_id = int(step.id), output_name = outputname) |
---|
1403 | trans.sa_session.add(m) |
---|
1404 | # Prepare each step |
---|
1405 | trans.sa_session.flush() |
---|
1406 | for step in workflow.steps: |
---|
1407 | step.upgrade_messages = {} |
---|
1408 | # Contruct modules |
---|
1409 | if step.type == 'tool' or step.type is None: |
---|
1410 | # Restore the tool state for the step |
---|
1411 | step.module = module_factory.from_workflow_step( trans, step ) |
---|
1412 | # Fix any missing parameters |
---|
1413 | step.upgrade_messages = step.module.check_and_update_state() |
---|
1414 | if step.upgrade_messages: |
---|
1415 | has_upgrade_messages = True |
---|
1416 | # Any connected input needs to have value DummyDataset (these |
---|
1417 | # are not persisted so we need to do it every time) |
---|
1418 | step.module.add_dummy_datasets( connections=step.input_connections ) |
---|
1419 | # Store state with the step |
---|
1420 | step.state = step.module.state |
---|
1421 | # Error dict |
---|
1422 | if step.tool_errors: |
---|
1423 | has_errors = True |
---|
1424 | errors[step.id] = step.tool_errors |
---|
1425 | else: |
---|
1426 | ## Non-tool specific stuff? |
---|
1427 | step.module = module_factory.from_workflow_step( trans, step ) |
---|
1428 | step.state = step.module.get_runtime_state() |
---|
1429 | # Connections by input name |
---|
1430 | step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections ) |
---|
1431 | # Render the form |
---|
1432 | return trans.fill_template( |
---|
1433 | "workflow/tag_outputs.mako", |
---|
1434 | steps=workflow.steps, |
---|
1435 | workflow=stored, |
---|
1436 | has_upgrade_messages=has_upgrade_messages, |
---|
1437 | errors=errors, |
---|
1438 | incoming=kwargs ) |
---|
1439 | |
---|
1440 | @web.expose |
---|
1441 | def configure_menu( self, trans, workflow_ids=None ): |
---|
1442 | user = trans.get_user() |
---|
1443 | if trans.request.method == "POST": |
---|
1444 | if workflow_ids is None: |
---|
1445 | workflow_ids = [] |
---|
1446 | elif type( workflow_ids ) != list: |
---|
1447 | workflow_ids = [ workflow_ids ] |
---|
1448 | sess = trans.sa_session |
---|
1449 | # This explicit remove seems like a hack, need to figure out |
---|
1450 | # how to make the association do it automatically. |
---|
1451 | for m in user.stored_workflow_menu_entries: |
---|
1452 | sess.delete( m ) |
---|
1453 | user.stored_workflow_menu_entries = [] |
---|
1454 | q = sess.query( model.StoredWorkflow ) |
---|
1455 | # To ensure id list is unique |
---|
1456 | seen_workflow_ids = set() |
---|
1457 | for id in workflow_ids: |
---|
1458 | if id in seen_workflow_ids: |
---|
1459 | continue |
---|
1460 | else: |
---|
1461 | seen_workflow_ids.add( id ) |
---|
1462 | m = model.StoredWorkflowMenuEntry() |
---|
1463 | m.stored_workflow = q.get( id ) |
---|
1464 | user.stored_workflow_menu_entries.append( m ) |
---|
1465 | sess.flush() |
---|
1466 | return trans.show_message( "Menu updated", refresh_frames=['tools'] ) |
---|
1467 | else: |
---|
1468 | user = trans.get_user() |
---|
1469 | ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] ) |
---|
1470 | workflows = trans.sa_session.query( model.StoredWorkflow ) \ |
---|
1471 | .filter_by( user=user, deleted=False ) \ |
---|
1472 | .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ |
---|
1473 | .all() |
---|
1474 | shared_by_others = trans.sa_session \ |
---|
1475 | .query( model.StoredWorkflowUserShareAssociation ) \ |
---|
1476 | .filter_by( user=user ) \ |
---|
1477 | .filter( model.StoredWorkflow.deleted == False ) \ |
---|
1478 | .all() |
---|
1479 | return trans.fill_template( "workflow/configure_menu.mako", |
---|
1480 | workflows=workflows, |
---|
1481 | shared_by_others=shared_by_others, |
---|
1482 | ids_in_menu=ids_in_menu ) |
---|
1483 | |
---|
1484 | def _workflow_to_dict( self, trans, stored ): |
---|
1485 | """ |
---|
1486 | Converts a workflow to a dict of attributes suitable for exporting. |
---|
1487 | """ |
---|
1488 | workflow = stored.latest_workflow |
---|
1489 | workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored ) |
---|
1490 | annotation_str = "" |
---|
1491 | if workflow_annotation: |
---|
1492 | annotation_str = workflow_annotation.annotation |
---|
1493 | # Pack workflow data into a dictionary and return |
---|
1494 | data = {} |
---|
1495 | data['a_galaxy_workflow'] = 'true' # Placeholder for identifying galaxy workflow |
---|
1496 | data['format-version'] = "0.1" |
---|
1497 | data['name'] = workflow.name |
---|
1498 | data['annotation'] = annotation_str |
---|
1499 | data['steps'] = {} |
---|
1500 | # For each step, rebuild the form and encode the state |
---|
1501 | for step in workflow.steps: |
---|
1502 | # Load from database representation |
---|
1503 | module = module_factory.from_workflow_step( trans, step ) |
---|
1504 | # Get user annotation. |
---|
1505 | step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step ) |
---|
1506 | annotation_str = "" |
---|
1507 | if step_annotation: |
---|
1508 | annotation_str = step_annotation.annotation |
---|
1509 | |
---|
1510 | # Step info |
---|
1511 | step_dict = { |
---|
1512 | 'id': step.order_index, |
---|
1513 | 'type': module.type, |
---|
1514 | 'tool_id': module.get_tool_id(), |
---|
1515 | 'tool_version' : step.tool_version, |
---|
1516 | 'name': module.get_name(), |
---|
1517 | 'tool_state': module.get_state( secure=False ), |
---|
1518 | 'tool_errors': module.get_errors(), |
---|
1519 | ## 'data_inputs': module.get_data_inputs(), |
---|
1520 | ## 'data_outputs': module.get_data_outputs(), |
---|
1521 | 'annotation' : annotation_str |
---|
1522 | } |
---|
1523 | |
---|
1524 | # Data inputs |
---|
1525 | step_dict['inputs'] = [] |
---|
1526 | if module.type == "data_input": |
---|
1527 | # Get input dataset name; default to 'Input Dataset' |
---|
1528 | name = module.state.get( 'name', 'Input Dataset') |
---|
1529 | step_dict['inputs'].append( { "name" : name, "description" : annotation_str } ) |
---|
1530 | else: |
---|
1531 | # Step is a tool and may have runtime inputs. |
---|
1532 | for name, val in module.state.inputs.items(): |
---|
1533 | input_type = type( val ) |
---|
1534 | if input_type == RuntimeValue: |
---|
1535 | step['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) |
---|
1536 | elif input_type == dict: |
---|
1537 | # Input type is described by a dict, e.g. indexed parameters. |
---|
1538 | for partname, partval in val.items(): |
---|
1539 | if type( partval ) == RuntimeValue: |
---|
1540 | step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } ) |
---|
1541 | |
---|
1542 | # User outputs |
---|
1543 | step_dict['user_outputs'] = [] |
---|
1544 | """ |
---|
1545 | module_outputs = module.get_data_outputs() |
---|
1546 | step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) |
---|
1547 | for output in step_outputs: |
---|
1548 | name = output.output_name |
---|
1549 | annotation = "" |
---|
1550 | for module_output in module_outputs: |
---|
1551 | if module_output.get( 'name', None ) == name: |
---|
1552 | output_type = module_output.get( 'extension', '' ) |
---|
1553 | break |
---|
1554 | data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } |
---|
1555 | """ |
---|
1556 | |
---|
1557 | # All step outputs |
---|
1558 | step_dict['outputs'] = [] |
---|
1559 | if type( module ) is ToolModule: |
---|
1560 | for output in module.get_data_outputs(): |
---|
1561 | step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } ) |
---|
1562 | |
---|
1563 | # Connections |
---|
1564 | input_connections = step.input_connections |
---|
1565 | if step.type is None or step.type == 'tool': |
---|
1566 | # Determine full (prefixed) names of valid input datasets |
---|
1567 | data_input_names = {} |
---|
1568 | def callback( input, value, prefixed_name, prefixed_label ): |
---|
1569 | if isinstance( input, DataToolParameter ): |
---|
1570 | data_input_names[ prefixed_name ] = True |
---|
1571 | visit_input_values( module.tool.inputs, module.state.inputs, callback ) |
---|
1572 | # Filter |
---|
1573 | # FIXME: this removes connection without displaying a message currently! |
---|
1574 | input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ] |
---|
1575 | # Encode input connections as dictionary |
---|
1576 | input_conn_dict = {} |
---|
1577 | for conn in input_connections: |
---|
1578 | input_conn_dict[ conn.input_name ] = \ |
---|
1579 | dict( id=conn.output_step.order_index, output_name=conn.output_name ) |
---|
1580 | step_dict['input_connections'] = input_conn_dict |
---|
1581 | # Position |
---|
1582 | step_dict['position'] = step.position |
---|
1583 | # Add to return value |
---|
1584 | data['steps'][step.order_index] = step_dict |
---|
1585 | return data |
---|
1586 | |
---|
1587 | def _workflow_from_dict( self, trans, data, source=None ): |
---|
1588 | """ |
---|
1589 | Creates a workflow from a dict. Created workflow is stored in the database and returned. |
---|
1590 | """ |
---|
1591 | # Put parameters in workflow mode |
---|
1592 | trans.workflow_building_mode = True |
---|
1593 | # Create new workflow from incoming dict |
---|
1594 | workflow = model.Workflow() |
---|
1595 | # If there's a source, put it in the workflow name. |
---|
1596 | if source: |
---|
1597 | name = "%s (imported from %s)" % ( data['name'], source ) |
---|
1598 | else: |
---|
1599 | name = data['name'] |
---|
1600 | workflow.name = name |
---|
1601 | # Assume no errors until we find a step that has some |
---|
1602 | workflow.has_errors = False |
---|
1603 | # Create each step |
---|
1604 | steps = [] |
---|
1605 | # The editor will provide ids for each step that we don't need to save, |
---|
1606 | # but do need to use to make connections |
---|
1607 | steps_by_external_id = {} |
---|
1608 | # First pass to build step objects and populate basic values |
---|
1609 | for key, step_dict in data['steps'].iteritems(): |
---|
1610 | # Create the model class for the step |
---|
1611 | step = model.WorkflowStep() |
---|
1612 | steps.append( step ) |
---|
1613 | steps_by_external_id[ step_dict['id' ] ] = step |
---|
1614 | # FIXME: Position should be handled inside module |
---|
1615 | step.position = step_dict['position'] |
---|
1616 | module = module_factory.from_dict( trans, step_dict, secure=False ) |
---|
1617 | module.save_to_step( step ) |
---|
1618 | if step.tool_errors: |
---|
1619 | workflow.has_errors = True |
---|
1620 | # Stick this in the step temporarily |
---|
1621 | step.temp_input_connections = step_dict['input_connections'] |
---|
1622 | # Save step annotation. |
---|
1623 | annotation = step_dict[ 'annotation' ] |
---|
1624 | if annotation: |
---|
1625 | annotation = sanitize_html( annotation, 'utf-8', 'text/html' ) |
---|
1626 | self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation ) |
---|
1627 | # Second pass to deal with connections between steps |
---|
1628 | for step in steps: |
---|
1629 | # Input connections |
---|
1630 | for input_name, conn_dict in step.temp_input_connections.iteritems(): |
---|
1631 | if conn_dict: |
---|
1632 | conn = model.WorkflowStepConnection() |
---|
1633 | conn.input_step = step |
---|
1634 | conn.input_name = input_name |
---|
1635 | conn.output_name = conn_dict['output_name'] |
---|
1636 | conn.output_step = steps_by_external_id[ conn_dict['id'] ] |
---|
1637 | del step.temp_input_connections |
---|
1638 | # Order the steps if possible |
---|
1639 | attach_ordered_steps( workflow, steps ) |
---|
1640 | # Connect up |
---|
1641 | stored = model.StoredWorkflow() |
---|
1642 | stored.name = workflow.name |
---|
1643 | workflow.stored_workflow = stored |
---|
1644 | stored.latest_workflow = workflow |
---|
1645 | stored.user = trans.user |
---|
1646 | # Persist |
---|
1647 | trans.sa_session.add( stored ) |
---|
1648 | trans.sa_session.flush() |
---|
1649 | return stored |
---|
1650 | |
---|
1651 | ## ---- Utility methods ------------------------------------------------------- |
---|
1652 | |
---|
1653 | def attach_ordered_steps( workflow, steps ): |
---|
1654 | ordered_steps = order_workflow_steps( steps ) |
---|
1655 | if ordered_steps: |
---|
1656 | workflow.has_cycles = False |
---|
1657 | for i, step in enumerate( ordered_steps ): |
---|
1658 | step.order_index = i |
---|
1659 | workflow.steps.append( step ) |
---|
1660 | else: |
---|
1661 | workflow.has_cycles = True |
---|
1662 | workflow.steps = steps |
---|
1663 | |
---|
1664 | def edgelist_for_workflow_steps( steps ): |
---|
1665 | """ |
---|
1666 | Create a list of tuples representing edges between `WorkflowSteps` based |
---|
1667 | on associated `WorkflowStepConnection`s |
---|
1668 | """ |
---|
1669 | edges = [] |
---|
1670 | steps_to_index = dict( ( step, i ) for i, step in enumerate( steps ) ) |
---|
1671 | for step in steps: |
---|
1672 | edges.append( ( steps_to_index[step], steps_to_index[step] ) ) |
---|
1673 | for conn in step.input_connections: |
---|
1674 | edges.append( ( steps_to_index[conn.output_step], steps_to_index[conn.input_step] ) ) |
---|
1675 | return edges |
---|
1676 | |
---|
1677 | def order_workflow_steps( steps ): |
---|
1678 | """ |
---|
1679 | Perform topological sort of the steps, return ordered or None |
---|
1680 | """ |
---|
1681 | try: |
---|
1682 | edges = edgelist_for_workflow_steps( steps ) |
---|
1683 | node_order = topsort( edges ) |
---|
1684 | return [ steps[i] for i in node_order ] |
---|
1685 | except CycleError: |
---|
1686 | return None |
---|
1687 | |
---|
1688 | def order_workflow_steps_with_levels( steps ): |
---|
1689 | try: |
---|
1690 | return topsort_levels( edgelist_for_workflow_steps( steps ) ) |
---|
1691 | except CycleError: |
---|
1692 | return None |
---|
1693 | |
---|
1694 | class FakeJob( object ): |
---|
1695 | """ |
---|
1696 | Fake job object for datasets that have no creating_job_associations, |
---|
1697 | they will be treated as "input" datasets. |
---|
1698 | """ |
---|
1699 | def __init__( self, dataset ): |
---|
1700 | self.is_fake = True |
---|
1701 | self.id = "fake_%s" % dataset.id |
---|
1702 | |
---|
1703 | def get_job_dict( trans ): |
---|
1704 | """ |
---|
1705 | Return a dictionary of Job -> [ Dataset ] mappings, for all finished |
---|
1706 | active Datasets in the current history and the jobs that created them. |
---|
1707 | """ |
---|
1708 | history = trans.get_history() |
---|
1709 | # Get the jobs that created the datasets |
---|
1710 | warnings = set() |
---|
1711 | jobs = odict() |
---|
1712 | for dataset in history.active_datasets: |
---|
1713 | # FIXME: Create "Dataset.is_finished" |
---|
1714 | if dataset.state in ( 'new', 'running', 'queued' ): |
---|
1715 | warnings.add( "Some datasets still queued or running were ignored" ) |
---|
1716 | continue |
---|
1717 | |
---|
1718 | #if this hda was copied from another, we need to find the job that created the origial hda |
---|
1719 | job_hda = dataset |
---|
1720 | while job_hda.copied_from_history_dataset_association: |
---|
1721 | job_hda = job_hda.copied_from_history_dataset_association |
---|
1722 | |
---|
1723 | if not job_hda.creating_job_associations: |
---|
1724 | jobs[ FakeJob( dataset ) ] = [ ( None, dataset ) ] |
---|
1725 | |
---|
1726 | for assoc in job_hda.creating_job_associations: |
---|
1727 | job = assoc.job |
---|
1728 | if job in jobs: |
---|
1729 | jobs[ job ].append( ( assoc.name, dataset ) ) |
---|
1730 | else: |
---|
1731 | jobs[ job ] = [ ( assoc.name, dataset ) ] |
---|
1732 | return jobs, warnings |
---|
1733 | |
---|
1734 | def cleanup_param_values( inputs, values ): |
---|
1735 | """ |
---|
1736 | Remove 'Data' values from `param_values`, along with metadata cruft, |
---|
1737 | but track the associations. |
---|
1738 | """ |
---|
1739 | associations = [] |
---|
1740 | names_to_clean = [] |
---|
1741 | # dbkey is pushed in by the framework |
---|
1742 | if 'dbkey' in values: |
---|
1743 | del values['dbkey'] |
---|
1744 | root_values = values |
---|
1745 | # Recursively clean data inputs and dynamic selects |
---|
1746 | def cleanup( prefix, inputs, values ): |
---|
1747 | for key, input in inputs.items(): |
---|
1748 | if isinstance( input, ( SelectToolParameter, DrillDownSelectToolParameter ) ): |
---|
1749 | if input.is_dynamic and not isinstance( values[key], UnvalidatedValue ): |
---|
1750 | values[key] = UnvalidatedValue( values[key] ) |
---|
1751 | if isinstance( input, DataToolParameter ): |
---|
1752 | tmp = values[key] |
---|
1753 | values[key] = None |
---|
1754 | # HACK: Nested associations are not yet working, but we |
---|
1755 | # still need to clean them up so we can serialize |
---|
1756 | # if not( prefix ): |
---|
1757 | if tmp: #this is false for a non-set optional dataset |
---|
1758 | associations.append( ( tmp.hid, prefix + key ) ) |
---|
1759 | # Cleanup the other deprecated crap associated with datasets |
---|
1760 | # as well. Worse, for nested datasets all the metadata is |
---|
1761 | # being pushed into the root. FIXME: MUST REMOVE SOON |
---|
1762 | key = prefix + key + "_" |
---|
1763 | for k in root_values.keys(): |
---|
1764 | if k.startswith( key ): |
---|
1765 | del root_values[k] |
---|
1766 | elif isinstance( input, Repeat ): |
---|
1767 | group_values = values[key] |
---|
1768 | for i, rep_values in enumerate( group_values ): |
---|
1769 | rep_index = rep_values['__index__'] |
---|
1770 | prefix = "%s_%d|" % ( key, rep_index ) |
---|
1771 | cleanup( prefix, input.inputs, group_values[i] ) |
---|
1772 | elif isinstance( input, Conditional ): |
---|
1773 | group_values = values[input.name] |
---|
1774 | current_case = group_values['__current_case__'] |
---|
1775 | prefix = "%s|" % ( key ) |
---|
1776 | cleanup( prefix, input.cases[current_case].inputs, group_values ) |
---|
1777 | cleanup( "", inputs, values ) |
---|
1778 | return associations |
---|
1779 | |
---|