[2] | 1 | import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob, sys |
---|
| 2 | |
---|
| 3 | from galaxy.web.base.controller import * |
---|
| 4 | from galaxy.web.framework.helpers import time_ago, iff, grids |
---|
| 5 | from galaxy import util, datatypes, jobs, web, model |
---|
| 6 | from cgi import escape, FieldStorage |
---|
| 7 | from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user |
---|
| 8 | from galaxy.util.sanitize_html import sanitize_html |
---|
| 9 | from galaxy.model.item_attrs import * |
---|
| 10 | |
---|
| 11 | from email.MIMEText import MIMEText |
---|
| 12 | import pkg_resources; |
---|
| 13 | pkg_resources.require( "Paste" ) |
---|
| 14 | import paste.httpexceptions |
---|
| 15 | |
---|
| 16 | if sys.version_info[:2] < ( 2, 6 ): |
---|
| 17 | zipfile.BadZipFile = zipfile.error |
---|
| 18 | if sys.version_info[:2] < ( 2, 5 ): |
---|
| 19 | zipfile.LargeZipFile = zipfile.error |
---|
| 20 | |
---|
| 21 | tmpd = tempfile.mkdtemp() |
---|
| 22 | comptypes=[] |
---|
| 23 | ziptype = '32' |
---|
| 24 | tmpf = os.path.join( tmpd, 'compression_test.zip' ) |
---|
| 25 | try: |
---|
| 26 | archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) |
---|
| 27 | archive.close() |
---|
| 28 | comptypes.append( 'zip' ) |
---|
| 29 | ziptype = '64' |
---|
| 30 | except RuntimeError: |
---|
| 31 | log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." ) |
---|
| 32 | except (TypeError, zipfile.LargeZipFile): # ZIP64 is only in Python2.5+. Remove TypeError when 2.4 support is dropped |
---|
| 33 | log.warning( 'Max zip file size is 2GB, ZIP64 not supported' ) |
---|
| 34 | comptypes.append( 'zip' ) |
---|
| 35 | try: |
---|
| 36 | os.unlink( tmpf ) |
---|
| 37 | except OSError: |
---|
| 38 | pass |
---|
| 39 | os.rmdir( tmpd ) |
---|
| 40 | |
---|
| 41 | log = logging.getLogger( __name__ ) |
---|
| 42 | |
---|
| 43 | error_report_template = """ |
---|
| 44 | GALAXY TOOL ERROR REPORT |
---|
| 45 | ------------------------ |
---|
| 46 | |
---|
| 47 | This error report was sent from the Galaxy instance hosted on the server |
---|
| 48 | "${host}" |
---|
| 49 | ----------------------------------------------------------------------------- |
---|
| 50 | This is in reference to dataset id ${dataset_id} from history id ${history_id} |
---|
| 51 | ----------------------------------------------------------------------------- |
---|
| 52 | You should be able to view the history containing the related history item |
---|
| 53 | |
---|
| 54 | ${hid}: ${history_item_name} |
---|
| 55 | |
---|
| 56 | by logging in as a Galaxy admin user to the Galaxy instance referenced above |
---|
| 57 | and pointing your browser to the following link. |
---|
| 58 | |
---|
| 59 | ${history_view_link} |
---|
| 60 | ----------------------------------------------------------------------------- |
---|
| 61 | The user '${email}' provided the following information: |
---|
| 62 | |
---|
| 63 | ${message} |
---|
| 64 | ----------------------------------------------------------------------------- |
---|
| 65 | job id: ${job_id} |
---|
| 66 | tool id: ${job_tool_id} |
---|
| 67 | ----------------------------------------------------------------------------- |
---|
| 68 | job command line: |
---|
| 69 | ${job_command_line} |
---|
| 70 | ----------------------------------------------------------------------------- |
---|
| 71 | job stderr: |
---|
| 72 | ${job_stderr} |
---|
| 73 | ----------------------------------------------------------------------------- |
---|
| 74 | job stdout: |
---|
| 75 | ${job_stdout} |
---|
| 76 | ----------------------------------------------------------------------------- |
---|
| 77 | job info: |
---|
| 78 | ${job_info} |
---|
| 79 | ----------------------------------------------------------------------------- |
---|
| 80 | job traceback: |
---|
| 81 | ${job_traceback} |
---|
| 82 | ----------------------------------------------------------------------------- |
---|
| 83 | (This is an automated message). |
---|
| 84 | """ |
---|
| 85 | |
---|
| 86 | class HistoryDatasetAssociationListGrid( grids.Grid ): |
---|
| 87 | # Custom columns for grid. |
---|
| 88 | class HistoryColumn( grids.GridColumn ): |
---|
| 89 | def get_value( self, trans, grid, hda): |
---|
| 90 | return hda.history.name |
---|
| 91 | |
---|
| 92 | class StatusColumn( grids.GridColumn ): |
---|
| 93 | def get_value( self, trans, grid, hda ): |
---|
| 94 | if hda.deleted: |
---|
| 95 | return "deleted" |
---|
| 96 | return "" |
---|
| 97 | def get_accepted_filters( self ): |
---|
| 98 | """ Returns a list of accepted filters for this column. """ |
---|
| 99 | accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" } |
---|
| 100 | accepted_filters = [] |
---|
| 101 | for label, val in accepted_filter_labels_and_vals.items(): |
---|
| 102 | args = { self.key: val } |
---|
| 103 | accepted_filters.append( grids.GridColumnFilter( label, args) ) |
---|
| 104 | return accepted_filters |
---|
| 105 | |
---|
| 106 | # Grid definition |
---|
| 107 | title = "Saved Datasets" |
---|
| 108 | model_class = model.HistoryDatasetAssociation |
---|
| 109 | template='/dataset/grid.mako' |
---|
| 110 | default_sort_key = "-update_time" |
---|
| 111 | columns = [ |
---|
| 112 | grids.TextColumn( "Name", key="name", |
---|
| 113 | # Link name to dataset's history. |
---|
| 114 | link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ), filterable="advanced", attach_popup=True ), |
---|
| 115 | HistoryColumn( "History", key="history", |
---|
| 116 | link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ), |
---|
| 117 | grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced", grid_name="HistoryDatasetAssocationListGrid" ), |
---|
| 118 | StatusColumn( "Status", key="deleted", attach_popup=False ), |
---|
| 119 | grids.GridColumn( "Last Updated", key="update_time", format=time_ago ), |
---|
| 120 | ] |
---|
| 121 | columns.append( |
---|
| 122 | grids.MulticolFilterColumn( |
---|
| 123 | "Search", |
---|
| 124 | cols_to_filter=[ columns[0], columns[2] ], |
---|
| 125 | key="free-text-search", visible=False, filterable="standard" ) |
---|
| 126 | ) |
---|
| 127 | operations = [ |
---|
| 128 | grids.GridOperation( "Copy to current history", condition=( lambda item: not item.deleted ), async_compatible=False ), |
---|
| 129 | ] |
---|
| 130 | standard_filters = [] |
---|
| 131 | default_filter = dict( name="All", deleted="False", tags="All" ) |
---|
| 132 | preserve_state = False |
---|
| 133 | use_paging = True |
---|
| 134 | num_rows_per_page = 50 |
---|
| 135 | def build_initial_query( self, trans, **kwargs ): |
---|
| 136 | # Show user's datasets that are not deleted, not in deleted histories, and not hidden. |
---|
| 137 | # To filter HDAs by user, need to join model class/HDA and History table so that it is |
---|
| 138 | # possible to filter by user. However, for dictionary-based filtering to work, need a |
---|
| 139 | # primary table for the query. |
---|
| 140 | return trans.sa_session.query( self.model_class ).select_from( self.model_class.table.join( model.History.table ) ) \ |
---|
| 141 | .filter( model.History.user == trans.user ) \ |
---|
| 142 | .filter( self.model_class.deleted==False ) \ |
---|
| 143 | .filter( model.History.deleted==False ) \ |
---|
| 144 | .filter( self.model_class.visible==True ) |
---|
| 145 | |
---|
| 146 | class DatasetInterface( BaseController, UsesAnnotations, UsesHistoryDatasetAssociation, UsesItemRatings ): |
---|
| 147 | |
---|
| 148 | stored_list_grid = HistoryDatasetAssociationListGrid() |
---|
| 149 | |
---|
| 150 | @web.expose |
---|
| 151 | def errors( self, trans, id ): |
---|
| 152 | hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) |
---|
| 153 | return trans.fill_template( "dataset/errors.mako", hda=hda ) |
---|
| 154 | @web.expose |
---|
| 155 | def stderr( self, trans, id ): |
---|
| 156 | dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) |
---|
| 157 | job = dataset.creating_job_associations[0].job |
---|
| 158 | trans.response.set_content_type( 'text/plain' ) |
---|
| 159 | return job.stderr |
---|
| 160 | @web.expose |
---|
| 161 | def report_error( self, trans, id, email='', message="" ): |
---|
| 162 | smtp_server = trans.app.config.smtp_server |
---|
| 163 | if smtp_server is None: |
---|
| 164 | return trans.show_error_message( "Mail is not configured for this galaxy instance" ) |
---|
| 165 | to_address = trans.app.config.error_email_to |
---|
| 166 | if to_address is None: |
---|
| 167 | return trans.show_error_message( "Error reporting has been disabled for this galaxy instance" ) |
---|
| 168 | # Get the dataset and associated job |
---|
| 169 | hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) |
---|
| 170 | job = hda.creating_job_associations[0].job |
---|
| 171 | # Get the name of the server hosting the Galaxy instance from which this report originated |
---|
| 172 | host = trans.request.host |
---|
| 173 | history_view_link = "%s/history/view?id=%s" % ( str( host ), trans.security.encode_id( hda.history_id ) ) |
---|
| 174 | # Build the email message |
---|
| 175 | msg = MIMEText( string.Template( error_report_template ) |
---|
| 176 | .safe_substitute( host=host, |
---|
| 177 | dataset_id=hda.dataset_id, |
---|
| 178 | history_id=hda.history_id, |
---|
| 179 | hid=hda.hid, |
---|
| 180 | history_item_name=hda.get_display_name(), |
---|
| 181 | history_view_link=history_view_link, |
---|
| 182 | job_id=job.id, |
---|
| 183 | job_tool_id=job.tool_id, |
---|
| 184 | job_command_line=job.command_line, |
---|
| 185 | job_stderr=job.stderr, |
---|
| 186 | job_stdout=job.stdout, |
---|
| 187 | job_info=job.info, |
---|
| 188 | job_traceback=job.traceback, |
---|
| 189 | email=email, |
---|
| 190 | message=message ) ) |
---|
| 191 | frm = to_address |
---|
| 192 | # Check email a bit |
---|
| 193 | email = email.strip() |
---|
| 194 | parts = email.split() |
---|
| 195 | if len( parts ) == 1 and len( email ) > 0: |
---|
| 196 | to = to_address + ", " + email |
---|
| 197 | else: |
---|
| 198 | to = to_address |
---|
| 199 | msg[ 'To' ] = to |
---|
| 200 | msg[ 'From' ] = frm |
---|
| 201 | msg[ 'Subject' ] = "Galaxy tool error report from " + email |
---|
| 202 | # Send it |
---|
| 203 | try: |
---|
| 204 | s = smtplib.SMTP() |
---|
| 205 | s.connect( smtp_server ) |
---|
| 206 | s.sendmail( frm, [ to ], msg.as_string() ) |
---|
| 207 | s.close() |
---|
| 208 | return trans.show_ok_message( "Your error report has been sent" ) |
---|
| 209 | except Exception, e: |
---|
| 210 | return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) ) |
---|
| 211 | |
---|
| 212 | @web.expose |
---|
| 213 | def default(self, trans, dataset_id=None, **kwd): |
---|
| 214 | return 'This link may not be followed from within Galaxy.' |
---|
| 215 | |
---|
| 216 | @web.expose |
---|
| 217 | def archive_composite_dataset( self, trans, data=None, **kwd ): |
---|
| 218 | # save a composite object into a compressed archive for downloading |
---|
| 219 | params = util.Params( kwd ) |
---|
| 220 | valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' |
---|
| 221 | outfname = data.name[0:150] |
---|
| 222 | outfname = ''.join(c in valid_chars and c or '_' for c in outfname) |
---|
| 223 | if (params.do_action == None): |
---|
| 224 | params.do_action = 'zip' # default |
---|
| 225 | msg = util.restore_text( params.get( 'msg', '' ) ) |
---|
| 226 | messagetype = params.get( 'messagetype', 'done' ) |
---|
| 227 | if not data: |
---|
| 228 | msg = "You must select at least one dataset" |
---|
| 229 | messagetype = 'error' |
---|
| 230 | else: |
---|
| 231 | error = False |
---|
| 232 | try: |
---|
| 233 | if (params.do_action == 'zip'): |
---|
| 234 | # Can't use mkstemp - the file must not exist first |
---|
| 235 | tmpd = tempfile.mkdtemp() |
---|
| 236 | tmpf = os.path.join( tmpd, 'library_download.' + params.do_action ) |
---|
| 237 | if ziptype == '64': |
---|
| 238 | archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) |
---|
| 239 | else: |
---|
| 240 | archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED ) |
---|
| 241 | archive.add = lambda x, y: archive.write( x, y.encode('CP437') ) |
---|
| 242 | elif params.do_action == 'tgz': |
---|
| 243 | archive = util.streamball.StreamBall( 'w|gz' ) |
---|
| 244 | elif params.do_action == 'tbz': |
---|
| 245 | archive = util.streamball.StreamBall( 'w|bz2' ) |
---|
| 246 | except (OSError, zipfile.BadZipFile): |
---|
| 247 | error = True |
---|
| 248 | log.exception( "Unable to create archive for download" ) |
---|
| 249 | msg = "Unable to create archive for %s for download, please report this error" % outfname |
---|
| 250 | messagetype = 'error' |
---|
| 251 | if not error: |
---|
| 252 | current_user_roles = trans.get_current_user_roles() |
---|
| 253 | ext = data.extension |
---|
| 254 | path = data.file_name |
---|
| 255 | fname = os.path.split(path)[-1] |
---|
| 256 | efp = data.extra_files_path |
---|
| 257 | htmlname = os.path.splitext(outfname)[0] |
---|
| 258 | if not htmlname.endswith(ext): |
---|
| 259 | htmlname = '%s_%s' % (htmlname,ext) |
---|
| 260 | archname = '%s.html' % htmlname # fake the real nature of the html file |
---|
| 261 | try: |
---|
| 262 | archive.add(data.file_name,archname) |
---|
| 263 | except IOError: |
---|
| 264 | error = True |
---|
| 265 | log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name) |
---|
| 266 | msg = "Unable to create archive for download, please report this error" |
---|
| 267 | messagetype = 'error' |
---|
| 268 | flist = glob.glob(os.path.join(efp,'*.*')) # glob returns full paths |
---|
| 269 | for fpath in flist: |
---|
| 270 | efp,fname = os.path.split(fpath) |
---|
| 271 | try: |
---|
| 272 | archive.add( fpath,fname ) |
---|
| 273 | except IOError: |
---|
| 274 | error = True |
---|
| 275 | log.exception( "Unable to add %s to temporary library download archive" % fname) |
---|
| 276 | msg = "Unable to create archive for download, please report this error" |
---|
| 277 | messagetype = 'error' |
---|
| 278 | continue |
---|
| 279 | if not error: |
---|
| 280 | if params.do_action == 'zip': |
---|
| 281 | archive.close() |
---|
| 282 | tmpfh = open( tmpf ) |
---|
| 283 | # clean up now |
---|
| 284 | try: |
---|
| 285 | os.unlink( tmpf ) |
---|
| 286 | os.rmdir( tmpd ) |
---|
| 287 | except OSError: |
---|
| 288 | error = True |
---|
| 289 | msg = "Unable to remove temporary library download archive and directory" |
---|
| 290 | log.exception( msg ) |
---|
| 291 | messagetype = 'error' |
---|
| 292 | if not error: |
---|
| 293 | trans.response.set_content_type( "application/x-zip-compressed" ) |
---|
| 294 | trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.zip" % outfname |
---|
| 295 | return tmpfh |
---|
| 296 | else: |
---|
| 297 | trans.response.set_content_type( "application/x-tar" ) |
---|
| 298 | outext = 'tgz' |
---|
| 299 | if params.do_action == 'tbz': |
---|
| 300 | outext = 'tbz' |
---|
| 301 | trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext) |
---|
| 302 | archive.wsgi_status = trans.response.wsgi_status() |
---|
| 303 | archive.wsgi_headeritems = trans.response.wsgi_headeritems() |
---|
| 304 | return archive.stream |
---|
| 305 | return trans.show_error_message( msg ) |
---|
| 306 | |
---|
| 307 | |
---|
| 308 | |
---|
| 309 | @web.expose |
---|
| 310 | def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd): |
---|
| 311 | """Catches the dataset id and displays file contents as directed""" |
---|
| 312 | composite_extensions = trans.app.datatypes_registry.get_composite_extensions( ) |
---|
| 313 | composite_extensions.append('html') # for archiving composite datatypes |
---|
| 314 | # DEPRECATION: We still support unencoded ids for backward compatibility |
---|
| 315 | try: |
---|
| 316 | data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) ) |
---|
| 317 | if data is None: |
---|
| 318 | raise ValueError( 'Invalid reference dataset id: %s.' % dataset_id ) |
---|
| 319 | except: |
---|
| 320 | try: |
---|
| 321 | data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( dataset_id ) ) |
---|
| 322 | except: |
---|
| 323 | data = None |
---|
| 324 | if not data: |
---|
| 325 | raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) |
---|
| 326 | current_user_roles = trans.get_current_user_roles() |
---|
| 327 | if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ): |
---|
| 328 | return trans.show_error_message( "You are not allowed to access this dataset" ) |
---|
| 329 | |
---|
| 330 | if data.state == trans.model.Dataset.states.UPLOAD: |
---|
| 331 | return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." ) |
---|
| 332 | |
---|
| 333 | if filename and filename != "index": |
---|
| 334 | # For files in extra_files_path |
---|
| 335 | file_path = os.path.join( data.extra_files_path, filename ) |
---|
| 336 | if os.path.exists( file_path ): |
---|
| 337 | mime, encoding = mimetypes.guess_type( file_path ) |
---|
| 338 | if not mime: |
---|
| 339 | try: |
---|
| 340 | mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] ) |
---|
| 341 | except: |
---|
| 342 | mime = "text/plain" |
---|
| 343 | |
---|
| 344 | trans.response.set_content_type( mime ) |
---|
| 345 | return open( file_path ) |
---|
| 346 | else: |
---|
| 347 | return "Could not find '%s' on the extra files path %s." % (filename,file_path) |
---|
| 348 | |
---|
| 349 | mime = trans.app.datatypes_registry.get_mimetype_by_extension( data.extension.lower() ) |
---|
| 350 | trans.response.set_content_type(mime) |
---|
| 351 | trans.log_event( "Display dataset id: %s" % str( dataset_id ) ) |
---|
| 352 | |
---|
| 353 | if to_ext or isinstance(data.datatype, datatypes.binary.Binary): # Saving the file, or binary file |
---|
| 354 | if data.extension in composite_extensions: |
---|
| 355 | return self.archive_composite_dataset( trans, data, **kwd ) |
---|
| 356 | else: |
---|
| 357 | trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size ) |
---|
| 358 | if not to_ext: |
---|
| 359 | to_ext = data.extension |
---|
| 360 | valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' |
---|
| 361 | fname = data.name |
---|
| 362 | fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150] |
---|
| 363 | trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s].%s" % (data.hid, fname, to_ext) |
---|
| 364 | return open( data.file_name ) |
---|
| 365 | if not os.path.exists( data.file_name ): |
---|
| 366 | raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % data.file_name ) |
---|
| 367 | |
---|
| 368 | max_peek_size = 1000000 # 1 MB |
---|
| 369 | if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size: |
---|
| 370 | return open( data.file_name ) |
---|
| 371 | else: |
---|
| 372 | trans.response.set_content_type( "text/html" ) |
---|
| 373 | return trans.stream_template_mako( "/dataset/large_file.mako", |
---|
| 374 | truncated_data = open( data.file_name ).read(max_peek_size), |
---|
| 375 | data = data ) |
---|
| 376 | |
---|
| 377 | @web.expose |
---|
| 378 | @web.require_login( "see all available datasets" ) |
---|
| 379 | def list( self, trans, **kwargs ): |
---|
| 380 | """List all available datasets""" |
---|
| 381 | status = message = None |
---|
| 382 | |
---|
| 383 | if 'operation' in kwargs: |
---|
| 384 | operation = kwargs['operation'].lower() |
---|
| 385 | hda_ids = util.listify( kwargs.get( 'id', [] ) ) |
---|
| 386 | |
---|
| 387 | # Display no message by default |
---|
| 388 | status, message = None, None |
---|
| 389 | |
---|
| 390 | # Load the hdas and ensure they all belong to the current user |
---|
| 391 | hdas = [] |
---|
| 392 | for encoded_hda_id in hda_ids: |
---|
| 393 | hda_id = trans.security.decode_id( encoded_hda_id ) |
---|
| 394 | hda = trans.sa_session.query( model.HistoryDatasetAssociation ).filter_by( id=hda_id ).first() |
---|
| 395 | if hda: |
---|
| 396 | # Ensure history is owned by current user |
---|
| 397 | if hda.history.user_id != None and trans.user: |
---|
| 398 | assert trans.user.id == hda.history.user_id, "HistoryDatasetAssocation does not belong to current user" |
---|
| 399 | hdas.append( hda ) |
---|
| 400 | else: |
---|
| 401 | log.warn( "Invalid history_dataset_association id '%r' passed to list", hda_id ) |
---|
| 402 | |
---|
| 403 | if hdas: |
---|
| 404 | if operation == "switch" or operation == "switch_history": |
---|
| 405 | # Switch to a history that the HDA resides in. |
---|
| 406 | |
---|
| 407 | # Convert hda to histories. |
---|
| 408 | histories = [] |
---|
| 409 | for hda in hdas: |
---|
| 410 | histories.append( hda.history ) |
---|
| 411 | |
---|
| 412 | # Use history controller to switch the history. TODO: is this reasonable? |
---|
| 413 | status, message = trans.webapp.controllers['history']._list_switch( trans, histories ) |
---|
| 414 | |
---|
| 415 | # Current history changed, refresh history frame; if switching to a dataset, set hda seek. |
---|
| 416 | trans.template_context['refresh_frames'] = ['history'] |
---|
| 417 | if operation == "switch": |
---|
| 418 | hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ] |
---|
| 419 | trans.template_context[ 'seek_hda_ids' ] = hda_ids |
---|
| 420 | elif operation == "copy to current history": |
---|
| 421 | # Copy a dataset to the current history. |
---|
| 422 | target_histories = [ trans.get_history() ] |
---|
| 423 | status, message = self._copy_datasets( trans, hda_ids, target_histories ) |
---|
| 424 | |
---|
| 425 | # Current history changed, refresh history frame. |
---|
| 426 | trans.template_context['refresh_frames'] = ['history'] |
---|
| 427 | |
---|
| 428 | # Render the list view |
---|
| 429 | return self.stored_list_grid( trans, status=status, message=message, **kwargs ) |
---|
| 430 | |
---|
| 431 | @web.expose |
---|
| 432 | def imp( self, trans, dataset_id=None, **kwd ): |
---|
| 433 | """ Import another user's dataset via a shared URL; dataset is added to user's current history. """ |
---|
| 434 | msg = "" |
---|
| 435 | |
---|
| 436 | # Set referer message. |
---|
| 437 | referer = trans.request.referer |
---|
| 438 | if referer is not "": |
---|
| 439 | referer_message = "<a href='%s'>return to the previous page</a>" % referer |
---|
| 440 | else: |
---|
| 441 | referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' ) |
---|
| 442 | |
---|
| 443 | # Error checking. |
---|
| 444 | if not dataset_id: |
---|
| 445 | return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True ) |
---|
| 446 | |
---|
| 447 | # Do import. |
---|
| 448 | cur_history = trans.get_history( create=True ) |
---|
| 449 | status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ], imported=True ) |
---|
| 450 | message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message ) |
---|
| 451 | return trans.show_message( message, type=status, use_panels=True ) |
---|
| 452 | |
---|
| 453 | @web.expose |
---|
| 454 | @web.json |
---|
| 455 | @web.require_login( "use Galaxy datasets" ) |
---|
| 456 | def get_name_and_link_async( self, trans, id=None ): |
---|
| 457 | """ Returns dataset's name and link. """ |
---|
| 458 | dataset = self.get_dataset( trans, id, False, True ) |
---|
| 459 | return_dict = { "name" : dataset.name, "link" : url_for( action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) } |
---|
| 460 | return return_dict |
---|
| 461 | |
---|
| 462 | @web.expose |
---|
| 463 | def get_embed_html_async( self, trans, id ): |
---|
| 464 | """ Returns HTML for embedding a dataset in a page. """ |
---|
| 465 | dataset = self.get_dataset( trans, id, False, True ) |
---|
| 466 | if dataset: |
---|
| 467 | return "Embedded Dataset '%s'" % dataset.name |
---|
| 468 | |
---|
| 469 | @web.expose |
---|
| 470 | @web.require_login( "use Galaxy datasets" ) |
---|
| 471 | def set_accessible_async( self, trans, id=None, accessible=False ): |
---|
| 472 | """ Does nothing because datasets do not have an importable/accessible attribute. This method could potentially set another attribute. """ |
---|
| 473 | return |
---|
| 474 | |
---|
| 475 | @web.expose |
---|
| 476 | @web.require_login( "rate items" ) |
---|
| 477 | @web.json |
---|
| 478 | def rate_async( self, trans, id, rating ): |
---|
| 479 | """ Rate a dataset asynchronously and return updated community data. """ |
---|
| 480 | |
---|
| 481 | dataset = self.get_dataset( trans, id, check_ownership=False, check_accessible=True ) |
---|
| 482 | if not dataset: |
---|
| 483 | return trans.show_error_message( "The specified dataset does not exist." ) |
---|
| 484 | |
---|
| 485 | # Rate dataset. |
---|
| 486 | dataset_rating = self.rate_item( rate_item, trans.get_user(), dataset, rating ) |
---|
| 487 | |
---|
| 488 | return self.get_ave_item_rating_data( trans.sa_session, dataset ) |
---|
| 489 | |
---|
| 490 | @web.expose |
---|
| 491 | def display_by_username_and_slug( self, trans, username, slug, preview=True ): |
---|
| 492 | """ Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """ |
---|
| 493 | dataset = self.get_dataset( trans, slug, False, True ) |
---|
| 494 | if dataset: |
---|
| 495 | truncated, dataset_data = self.get_data( dataset, preview ) |
---|
| 496 | dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset ) |
---|
| 497 | |
---|
| 498 | # If data is binary or an image, stream without template; otherwise, use display template. |
---|
| 499 | # TODO: figure out a way to display images in display template. |
---|
| 500 | if isinstance(dataset.datatype, datatypes.binary.Binary) or isinstance(dataset.datatype, datatypes.images.Image): |
---|
| 501 | mime = trans.app.datatypes_registry.get_mimetype_by_extension( dataset.extension.lower() ) |
---|
| 502 | trans.response.set_content_type( mime ) |
---|
| 503 | return open( dataset.file_name ) |
---|
| 504 | else: |
---|
| 505 | # Get rating data. |
---|
| 506 | user_item_rating = 0 |
---|
| 507 | if trans.get_user(): |
---|
| 508 | user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), dataset ) |
---|
| 509 | if user_item_rating: |
---|
| 510 | user_item_rating = user_item_rating.rating |
---|
| 511 | else: |
---|
| 512 | user_item_rating = 0 |
---|
| 513 | ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, dataset ) |
---|
| 514 | |
---|
| 515 | return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data, truncated=truncated, |
---|
| 516 | user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings ) |
---|
| 517 | else: |
---|
| 518 | raise web.httpexceptions.HTTPNotFound() |
---|
| 519 | |
---|
| 520 | @web.expose |
---|
| 521 | def get_item_content_async( self, trans, id ): |
---|
| 522 | """ Returns item content in HTML format. """ |
---|
| 523 | |
---|
| 524 | dataset = self.get_dataset( trans, id, False, True ) |
---|
| 525 | if dataset is None: |
---|
| 526 | raise web.httpexceptions.HTTPNotFound() |
---|
| 527 | truncated, dataset_data = self.get_data( dataset, preview=True ) |
---|
| 528 | # Get annotation. |
---|
| 529 | dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset ) |
---|
| 530 | return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated ) |
---|
| 531 | |
---|
| 532 | @web.expose |
---|
| 533 | def annotate_async( self, trans, id, new_annotation=None, **kwargs ): |
---|
| 534 | dataset = self.get_dataset( trans, id, False, True ) |
---|
| 535 | if not dataset: |
---|
| 536 | web.httpexceptions.HTTPNotFound() |
---|
| 537 | if dataset and new_annotation: |
---|
| 538 | # Sanitize annotation before adding it. |
---|
| 539 | new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' ) |
---|
| 540 | self.add_item_annotation( trans.sa_session, trans.get_user(), dataset, new_annotation ) |
---|
| 541 | trans.sa_session.flush() |
---|
| 542 | return new_annotation |
---|
| 543 | |
---|
| 544 | @web.expose |
---|
| 545 | def get_annotation_async( self, trans, id ): |
---|
| 546 | dataset = self.get_dataset( trans, id, False, True ) |
---|
| 547 | if not dataset: |
---|
| 548 | web.httpexceptions.HTTPNotFound() |
---|
| 549 | return self.get_item_annotation_str( trans.sa_session, trans.user, dataset ) |
---|
| 550 | |
---|
| 551 | @web.expose |
---|
| 552 | def display_at( self, trans, dataset_id, filename=None, **kwd ): |
---|
| 553 | """Sets up a dataset permissions so it is viewable at an external site""" |
---|
| 554 | site = filename |
---|
| 555 | data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id ) |
---|
| 556 | if not data: |
---|
| 557 | raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) |
---|
| 558 | if 'display_url' not in kwd or 'redirect_url' not in kwd: |
---|
| 559 | return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' ) |
---|
| 560 | try: |
---|
| 561 | redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] ) |
---|
| 562 | except: |
---|
| 563 | redirect_url = kwd['redirect_url'] # not all will need custom text |
---|
| 564 | current_user_roles = trans.get_current_user_roles() |
---|
| 565 | if trans.app.security_agent.dataset_is_public( data.dataset ): |
---|
| 566 | return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac |
---|
| 567 | if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ): |
---|
| 568 | trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site ) |
---|
| 569 | return trans.response.send_redirect( redirect_url ) |
---|
| 570 | else: |
---|
| 571 | return trans.show_error_message( "You are not allowed to view this dataset at external sites. Please contact your Galaxy administrator to acquire management permissions for this dataset." ) |
---|
| 572 | |
---|
| 573 | @web.expose |
---|
| 574 | def display_application( self, trans, dataset_id=None, user_id=None, app_name = None, link_name = None, app_action = None, action_param = None, **kwds ): |
---|
| 575 | """Access to external display applications""" |
---|
| 576 | if kwds: |
---|
| 577 | log.debug( "Unexpected Keywords passed to display_application: %s" % kwds ) #route memory? |
---|
| 578 | #decode ids |
---|
| 579 | data, user = decode_dataset_user( trans, dataset_id, user_id ) |
---|
| 580 | if not data: |
---|
| 581 | raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) |
---|
| 582 | if user is None: |
---|
| 583 | user = trans.user |
---|
| 584 | if user: |
---|
| 585 | user_roles = user.all_roles() |
---|
| 586 | else: |
---|
| 587 | user_roles = [] |
---|
| 588 | if None in [ app_name, link_name ]: |
---|
| 589 | return trans.show_error_message( "A display application name and link name must be provided." ) |
---|
| 590 | |
---|
| 591 | if trans.app.security_agent.can_access_dataset( user_roles, data.dataset ): |
---|
| 592 | msg = [] |
---|
| 593 | refresh = False |
---|
| 594 | display_app = trans.app.datatypes_registry.display_applications.get( app_name ) |
---|
| 595 | assert display_app, "Unknown display application has been requested: %s" % app_name |
---|
| 596 | dataset_hash, user_hash = encode_dataset_user( trans, data, user ) |
---|
| 597 | display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans ) |
---|
| 598 | assert display_link, "Unknown display link has been requested: %s" % link_name |
---|
| 599 | if data.state == data.states.ERROR: |
---|
| 600 | msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) ) |
---|
| 601 | elif data.deleted: |
---|
| 602 | msg.append( ( 'This dataset has been deleted, you cannot view it at an external display application.', 'error' ) ) |
---|
| 603 | elif data.state != data.states.OK: |
---|
| 604 | msg.append( ( 'You must wait for this dataset to be created before you can view it at an external display application.', 'info' ) ) |
---|
| 605 | refresh = True |
---|
| 606 | else: |
---|
| 607 | #We have permissions, dataset is not deleted and is in OK state, allow access |
---|
| 608 | if display_link.display_ready(): |
---|
| 609 | if app_action in [ 'data', 'param' ]: |
---|
| 610 | assert action_param, "An action param must be provided for a data or param action" |
---|
| 611 | #data is used for things with filenames that could be passed off to a proxy |
---|
| 612 | #in case some display app wants all files to be in the same 'directory', |
---|
| 613 | #data can be forced to param, but not the other way (no filename for other direction) |
---|
| 614 | #get param name from url param name |
---|
| 615 | action_param = display_link.get_param_name_by_url( action_param ) |
---|
| 616 | value = display_link.get_param_value( action_param ) |
---|
| 617 | assert value, "An invalid parameter name was provided: %s" % action_param |
---|
| 618 | assert value.parameter.viewable, "This parameter is not viewable." |
---|
| 619 | if value.parameter.type == 'data': |
---|
| 620 | content_length = os.path.getsize( value.file_name ) |
---|
| 621 | rval = open( value.file_name ) |
---|
| 622 | else: |
---|
| 623 | rval = str( value ) |
---|
| 624 | content_length = len( rval ) |
---|
| 625 | trans.response.set_content_type( value.mime_type() ) |
---|
| 626 | trans.response.headers[ 'Content-Length' ] = content_length |
---|
| 627 | return rval |
---|
| 628 | elif app_action == None: |
---|
| 629 | #redirect user to url generated by display link |
---|
| 630 | return trans.response.send_redirect( display_link.display_url() ) |
---|
| 631 | else: |
---|
| 632 | msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) ) |
---|
| 633 | else: |
---|
| 634 | if app_action == None: |
---|
| 635 | if trans.history != data.history: |
---|
| 636 | msg.append( ( 'You must import this dataset into your current history before you can view it at the desired display application.', 'error' ) ) |
---|
| 637 | else: |
---|
| 638 | refresh = True |
---|
| 639 | msg.append( ( 'This display application is being prepared.', 'info' ) ) |
---|
| 640 | if not display_link.preparing_display(): |
---|
| 641 | display_link.prepare_display() |
---|
| 642 | else: |
---|
| 643 | raise Exception( 'Attempted a view action (%s) on a non-ready display application' % app_action ) |
---|
| 644 | return trans.fill_template_mako( "dataset/display_application/display.mako", msg = msg, display_app = display_app, display_link = display_link, refresh = refresh ) |
---|
| 645 | return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' ) |
---|
| 646 | |
---|
| 647 | def _undelete( self, trans, id ): |
---|
| 648 | try: |
---|
| 649 | id = int( id ) |
---|
| 650 | except ValueError, e: |
---|
| 651 | return False |
---|
| 652 | history = trans.get_history() |
---|
| 653 | data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) |
---|
| 654 | if data and data.undeletable: |
---|
| 655 | # Walk up parent datasets to find the containing history |
---|
| 656 | topmost_parent = data |
---|
| 657 | while topmost_parent.parent: |
---|
| 658 | topmost_parent = topmost_parent.parent |
---|
| 659 | assert topmost_parent in history.datasets, "Data does not belong to current history" |
---|
| 660 | # Mark undeleted |
---|
| 661 | data.mark_undeleted() |
---|
| 662 | trans.sa_session.flush() |
---|
| 663 | trans.log_event( "Dataset id %s has been undeleted" % str(id) ) |
---|
| 664 | return True |
---|
| 665 | return False |
---|
| 666 | |
---|
| 667 | def _unhide( self, trans, id ): |
---|
| 668 | try: |
---|
| 669 | id = int( id ) |
---|
| 670 | except ValueError, e: |
---|
| 671 | return False |
---|
| 672 | history = trans.get_history() |
---|
| 673 | data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) |
---|
| 674 | if data: |
---|
| 675 | # Walk up parent datasets to find the containing history |
---|
| 676 | topmost_parent = data |
---|
| 677 | while topmost_parent.parent: |
---|
| 678 | topmost_parent = topmost_parent.parent |
---|
| 679 | assert topmost_parent in history.datasets, "Data does not belong to current history" |
---|
| 680 | # Mark undeleted |
---|
| 681 | data.mark_unhidden() |
---|
| 682 | trans.sa_session.flush() |
---|
| 683 | trans.log_event( "Dataset id %s has been unhidden" % str(id) ) |
---|
| 684 | return True |
---|
| 685 | return False |
---|
| 686 | |
---|
| 687 | @web.expose |
---|
| 688 | def undelete( self, trans, id ): |
---|
| 689 | if self._undelete( trans, id ): |
---|
| 690 | return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) ) |
---|
| 691 | raise "Error undeleting" |
---|
| 692 | |
---|
| 693 | @web.expose |
---|
| 694 | def unhide( self, trans, id ): |
---|
| 695 | if self._unhide( trans, id ): |
---|
| 696 | return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden = True ) ) |
---|
| 697 | raise "Error unhiding" |
---|
| 698 | |
---|
| 699 | |
---|
| 700 | @web.expose |
---|
| 701 | def undelete_async( self, trans, id ): |
---|
| 702 | if self._undelete( trans, id ): |
---|
| 703 | return "OK" |
---|
| 704 | raise "Error undeleting" |
---|
| 705 | |
---|
| 706 | @web.expose |
---|
| 707 | def copy_datasets( self, trans, source_dataset_ids="", target_history_ids="", new_history_name="", do_copy=False, **kwd ): |
---|
| 708 | params = util.Params( kwd ) |
---|
| 709 | user = trans.get_user() |
---|
| 710 | history = trans.get_history() |
---|
| 711 | create_new_history = False |
---|
| 712 | refresh_frames = [] |
---|
| 713 | if source_dataset_ids: |
---|
| 714 | if not isinstance( source_dataset_ids, list ): |
---|
| 715 | source_dataset_ids = source_dataset_ids.split( "," ) |
---|
| 716 | source_dataset_ids = map( int, source_dataset_ids ) |
---|
| 717 | else: |
---|
| 718 | source_dataset_ids = [] |
---|
| 719 | if target_history_ids: |
---|
| 720 | if not isinstance( target_history_ids, list ): |
---|
| 721 | target_history_ids = target_history_ids.split( "," ) |
---|
| 722 | if "create_new_history" in target_history_ids: |
---|
| 723 | create_new_history = True |
---|
| 724 | target_history_ids.remove( "create_new_history" ) |
---|
| 725 | target_history_ids = map( int, target_history_ids ) |
---|
| 726 | else: |
---|
| 727 | target_history_ids = [] |
---|
| 728 | done_msg = error_msg = "" |
---|
| 729 | if do_copy: |
---|
| 730 | invalid_datasets = 0 |
---|
| 731 | if not source_dataset_ids or not ( target_history_ids or create_new_history ): |
---|
| 732 | error_msg = "You must provide both source datasets and target histories." |
---|
| 733 | if create_new_history: |
---|
| 734 | target_history_ids.append( "create_new_history" ) |
---|
| 735 | else: |
---|
| 736 | if create_new_history: |
---|
| 737 | new_history = trans.app.model.History() |
---|
| 738 | if new_history_name: |
---|
| 739 | new_history.name = new_history_name |
---|
| 740 | new_history.user = user |
---|
| 741 | trans.sa_session.add( new_history ) |
---|
| 742 | trans.sa_session.flush() |
---|
| 743 | target_history_ids.append( new_history.id ) |
---|
| 744 | if user: |
---|
| 745 | target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )] |
---|
| 746 | else: |
---|
| 747 | target_histories = [ history ] |
---|
| 748 | if len( target_histories ) != len( target_history_ids ): |
---|
| 749 | error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) ) |
---|
| 750 | for data in map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, source_dataset_ids ): |
---|
| 751 | if data is None: |
---|
| 752 | error_msg = error_msg + "You tried to copy a dataset that does not exist. " |
---|
| 753 | invalid_datasets += 1 |
---|
| 754 | elif data.history != history: |
---|
| 755 | error_msg = error_msg + "You tried to copy a dataset which is not in your current history. " |
---|
| 756 | invalid_datasets += 1 |
---|
| 757 | else: |
---|
| 758 | for hist in target_histories: |
---|
| 759 | hist.add_dataset( data.copy( copy_children = True ) ) |
---|
| 760 | if history in target_histories: |
---|
| 761 | refresh_frames = ['history'] |
---|
| 762 | trans.sa_session.flush() |
---|
| 763 | done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) ) |
---|
| 764 | trans.sa_session.refresh( history ) |
---|
| 765 | elif create_new_history: |
---|
| 766 | target_history_ids.append( "create_new_history" ) |
---|
| 767 | source_datasets = history.active_datasets |
---|
| 768 | target_histories = [history] |
---|
| 769 | if user: |
---|
| 770 | target_histories = user.active_histories |
---|
| 771 | return trans.fill_template( "/dataset/copy_view.mako", |
---|
| 772 | source_dataset_ids = source_dataset_ids, |
---|
| 773 | target_history_ids = target_history_ids, |
---|
| 774 | source_datasets = source_datasets, |
---|
| 775 | target_histories = target_histories, |
---|
| 776 | new_history_name = new_history_name, |
---|
| 777 | done_msg = done_msg, |
---|
| 778 | error_msg = error_msg, |
---|
| 779 | refresh_frames = refresh_frames ) |
---|
| 780 | |
---|
| 781 | def _copy_datasets( self, trans, dataset_ids, target_histories, imported=False ): |
---|
| 782 | """ Helper method for copying datasets. """ |
---|
| 783 | user = trans.get_user() |
---|
| 784 | done_msg = error_msg = "" |
---|
| 785 | |
---|
| 786 | invalid_datasets = 0 |
---|
| 787 | if not dataset_ids or not target_histories: |
---|
| 788 | error_msg = "You must provide both source datasets and target histories." |
---|
| 789 | else: |
---|
| 790 | # User must own target histories to copy datasets to them. |
---|
| 791 | for history in target_histories: |
---|
| 792 | if user != history.user: |
---|
| 793 | error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_histories ) ) |
---|
| 794 | for dataset_id in dataset_ids: |
---|
| 795 | data = self.get_dataset( trans, dataset_id, False, True ) |
---|
| 796 | if data is None: |
---|
| 797 | error_msg = error_msg + "You tried to copy a dataset that does not exist or that you do not have access to. " |
---|
| 798 | invalid_datasets += 1 |
---|
| 799 | else: |
---|
| 800 | for hist in target_histories: |
---|
| 801 | dataset_copy = data.copy( copy_children = True ) |
---|
| 802 | if imported: |
---|
| 803 | dataset_copy.name = "imported: " + dataset_copy.name |
---|
| 804 | hist.add_dataset( dataset_copy ) |
---|
| 805 | trans.sa_session.flush() |
---|
| 806 | num_datasets_copied = len( dataset_ids ) - invalid_datasets |
---|
| 807 | done_msg = "%i dataset%s copied to %i histor%s." % \ |
---|
| 808 | ( num_datasets_copied, iff( num_datasets_copied == 1, "", "s"), len( target_histories ), iff( len ( target_histories ) == 1, "y", "ies") ) |
---|
| 809 | trans.sa_session.refresh( history ) |
---|
| 810 | |
---|
| 811 | if error_msg != "": |
---|
| 812 | status = ERROR |
---|
| 813 | message = error_msg |
---|
| 814 | else: |
---|
| 815 | status = SUCCESS |
---|
| 816 | message = done_msg |
---|
| 817 | return status, message |
---|