1 | """ |
---|
2 | Galaxy data model classes |
---|
3 | |
---|
4 | Naming: try to use class names that have a distinct plural form so that |
---|
5 | the relationship cardinalities are obvious (e.g. prefer Dataset to Data) |
---|
6 | """ |
---|
7 | |
---|
8 | import galaxy.datatypes |
---|
9 | from galaxy.util.bunch import Bunch |
---|
10 | from galaxy import util |
---|
11 | import galaxy.datatypes.registry |
---|
12 | from galaxy.datatypes.metadata import MetadataCollection |
---|
13 | from galaxy.security import RBACAgent, get_permitted_actions |
---|
14 | from galaxy.util.hash_util import * |
---|
15 | from galaxy.web.form_builder import * |
---|
16 | from galaxy.model.item_attrs import UsesAnnotations |
---|
17 | from sqlalchemy.orm import object_session |
---|
18 | import os.path, os, errno, codecs, operator, smtplib, socket, pexpect, logging |
---|
19 | |
---|
20 | log = logging.getLogger( __name__ ) |
---|
21 | |
---|
22 | datatypes_registry = galaxy.datatypes.registry.Registry() #Default Value Required for unit tests |
---|
23 | |
---|
24 | def set_datatypes_registry( d_registry ): |
---|
25 | """ |
---|
26 | Set up datatypes_registry |
---|
27 | """ |
---|
28 | global datatypes_registry |
---|
29 | datatypes_registry = d_registry |
---|
30 | |
---|
31 | class User( object ): |
---|
32 | def __init__( self, email=None, password=None ): |
---|
33 | self.email = email |
---|
34 | self.password = password |
---|
35 | self.external = False |
---|
36 | self.deleted = False |
---|
37 | self.purged = False |
---|
38 | self.username = None |
---|
39 | # Relationships |
---|
40 | self.histories = [] |
---|
41 | self.credentials = [] |
---|
42 | def set_password_cleartext( self, cleartext ): |
---|
43 | """Set 'self.password' to the digest of 'cleartext'.""" |
---|
44 | self.password = new_secure_hash( text_type=cleartext ) |
---|
45 | def check_password( self, cleartext ): |
---|
46 | """Check if 'cleartext' matches 'self.password' when hashed.""" |
---|
47 | return self.password == new_secure_hash( text_type=cleartext ) |
---|
48 | def all_roles( self ): |
---|
49 | roles = [ ura.role for ura in self.roles ] |
---|
50 | for group in [ uga.group for uga in self.groups ]: |
---|
51 | for role in [ gra.role for gra in group.roles ]: |
---|
52 | if role not in roles: |
---|
53 | roles.append( role ) |
---|
54 | return roles |
---|
55 | def accessible_libraries( self, trans, actions ): |
---|
56 | # Get all permitted libraries for this user |
---|
57 | all_libraries = trans.sa_session.query( trans.app.model.Library ) \ |
---|
58 | .filter( trans.app.model.Library.table.c.deleted == False ) \ |
---|
59 | .order_by( trans.app.model.Library.name ) |
---|
60 | roles = self.all_roles() |
---|
61 | actions_to_check = actions |
---|
62 | # The libraries dictionary looks like: { library : '1,2' }, library : '3' } |
---|
63 | # Its keys are the libraries that should be displayed for the current user and whose values are a |
---|
64 | # string of comma-separated folder ids, of the associated folders the should NOT be displayed. |
---|
65 | # The folders that should not be displayed may not be a complete list, but it is ultimately passed |
---|
66 | # to the calling method to keep from re-checking the same folders when the library / folder |
---|
67 | # select lists are rendered. |
---|
68 | libraries = {} |
---|
69 | for library in all_libraries: |
---|
70 | can_show, hidden_folder_ids = trans.app.security_agent.show_library_item( self, roles, library, actions_to_check ) |
---|
71 | if can_show: |
---|
72 | libraries[ library ] = hidden_folder_ids |
---|
73 | return libraries |
---|
74 | def accessible_request_types( self, trans ): |
---|
75 | active_request_types = trans.sa_session.query( trans.app.model.RequestType ) \ |
---|
76 | .filter( trans.app.model.RequestType.table.c.deleted == False ) \ |
---|
77 | .order_by( trans.app.model.RequestType.name ) |
---|
78 | # Filter active_request_types to those that can be accessed by this user |
---|
79 | role_ids = [ r.id for r in self.all_roles() ] |
---|
80 | accessible_request_types = set() |
---|
81 | for request_type in active_request_types: |
---|
82 | for permission in request_type.actions: |
---|
83 | if permission.role.id in role_ids: |
---|
84 | accessible_request_types.add( request_type ) |
---|
85 | accessible_request_types = [ request_type for request_type in accessible_request_types ] |
---|
86 | return accessible_request_types |
---|
87 | |
---|
88 | class Job( object ): |
---|
89 | """ |
---|
90 | A job represents a request to run a tool given input datasets, tool |
---|
91 | parameters, and output datasets. |
---|
92 | """ |
---|
93 | states = Bunch( NEW = 'new', |
---|
94 | UPLOAD = 'upload', |
---|
95 | WAITING = 'waiting', |
---|
96 | QUEUED = 'queued', |
---|
97 | RUNNING = 'running', |
---|
98 | OK = 'ok', |
---|
99 | ERROR = 'error', |
---|
100 | DELETED = 'deleted' ) |
---|
101 | def __init__( self ): |
---|
102 | self.session_id = None |
---|
103 | self.user_id = None |
---|
104 | self.tool_id = None |
---|
105 | self.tool_version = None |
---|
106 | self.command_line = None |
---|
107 | self.param_filename = None |
---|
108 | self.parameters = [] |
---|
109 | self.input_datasets = [] |
---|
110 | self.output_datasets = [] |
---|
111 | self.output_library_datasets = [] |
---|
112 | self.state = Job.states.NEW |
---|
113 | self.info = None |
---|
114 | self.job_runner_name = None |
---|
115 | self.job_runner_external_id = None |
---|
116 | self.post_job_actions = [] |
---|
117 | self.imported = False |
---|
118 | |
---|
119 | def add_parameter( self, name, value ): |
---|
120 | self.parameters.append( JobParameter( name, value ) ) |
---|
121 | def add_input_dataset( self, name, dataset ): |
---|
122 | self.input_datasets.append( JobToInputDatasetAssociation( name, dataset ) ) |
---|
123 | def add_output_dataset( self, name, dataset ): |
---|
124 | self.output_datasets.append( JobToOutputDatasetAssociation( name, dataset ) ) |
---|
125 | def add_output_library_dataset( self, name, dataset ): |
---|
126 | self.output_library_datasets.append( JobToOutputLibraryDatasetAssociation( name, dataset ) ) |
---|
127 | def add_post_job_action(self, pja): |
---|
128 | self.post_job_actions.append( PostJobActionAssociation( pja, self ) ) |
---|
129 | def set_state( self, state ): |
---|
130 | self.state = state |
---|
131 | # For historical reasons state propogates down to datasets |
---|
132 | for da in self.output_datasets: |
---|
133 | da.dataset.state = state |
---|
134 | def get_param_values( self, app ): |
---|
135 | """ |
---|
136 | Read encoded parameter values from the database and turn back into a |
---|
137 | dict of tool parameter values. |
---|
138 | """ |
---|
139 | param_dict = dict( [ ( p.name, p.value ) for p in self.parameters ] ) |
---|
140 | tool = app.toolbox.tools_by_id[self.tool_id] |
---|
141 | param_dict = tool.params_from_strings( param_dict, app ) |
---|
142 | return param_dict |
---|
143 | def check_if_output_datasets_deleted( self ): |
---|
144 | """ |
---|
145 | Return true if all of the output datasets associated with this job are |
---|
146 | in the deleted state |
---|
147 | """ |
---|
148 | for dataset_assoc in self.output_datasets: |
---|
149 | dataset = dataset_assoc.dataset |
---|
150 | # only the originator of the job can delete a dataset to cause |
---|
151 | # cancellation of the job, no need to loop through history_associations |
---|
152 | if not dataset.deleted: |
---|
153 | return False |
---|
154 | return True |
---|
155 | def mark_deleted( self ): |
---|
156 | """ |
---|
157 | Mark this job as deleted, and mark any output datasets as discarded. |
---|
158 | """ |
---|
159 | self.state = Job.states.DELETED |
---|
160 | self.info = "Job output deleted by user before job completed." |
---|
161 | for dataset_assoc in self.output_datasets: |
---|
162 | dataset = dataset_assoc.dataset |
---|
163 | dataset.deleted = True |
---|
164 | dataset.state = dataset.states.DISCARDED |
---|
165 | for dataset in dataset.dataset.history_associations: |
---|
166 | # propagate info across shared datasets |
---|
167 | dataset.deleted = True |
---|
168 | dataset.blurb = 'deleted' |
---|
169 | dataset.peek = 'Job deleted' |
---|
170 | dataset.info = 'Job output deleted by user before job completed' |
---|
171 | |
---|
172 | class JobParameter( object ): |
---|
173 | def __init__( self, name, value ): |
---|
174 | self.name = name |
---|
175 | self.value = value |
---|
176 | |
---|
177 | class JobToInputDatasetAssociation( object ): |
---|
178 | def __init__( self, name, dataset ): |
---|
179 | self.name = name |
---|
180 | self.dataset = dataset |
---|
181 | |
---|
182 | class JobToOutputDatasetAssociation( object ): |
---|
183 | def __init__( self, name, dataset ): |
---|
184 | self.name = name |
---|
185 | self.dataset = dataset |
---|
186 | |
---|
187 | class JobToOutputLibraryDatasetAssociation( object ): |
---|
188 | def __init__( self, name, dataset ): |
---|
189 | self.name = name |
---|
190 | self.dataset = dataset |
---|
191 | |
---|
192 | class PostJobAction( object ): |
---|
193 | def __init__( self, action_type, workflow_step, output_name = None, action_arguments = None): |
---|
194 | self.action_type = action_type |
---|
195 | self.output_name = output_name |
---|
196 | self.action_arguments = action_arguments |
---|
197 | self.workflow_step = workflow_step |
---|
198 | |
---|
199 | class PostJobActionAssociation( object ): |
---|
200 | def __init__(self, pja, job): |
---|
201 | self.job = job |
---|
202 | self.post_job_action = pja |
---|
203 | |
---|
204 | class JobExternalOutputMetadata( object ): |
---|
205 | def __init__( self, job = None, dataset = None ): |
---|
206 | self.job = job |
---|
207 | if isinstance( dataset, galaxy.model.HistoryDatasetAssociation ): |
---|
208 | self.history_dataset_association = dataset |
---|
209 | elif isinstance( dataset, galaxy.model.LibraryDatasetDatasetAssociation ): |
---|
210 | self.library_dataset_dataset_association = dataset |
---|
211 | @property |
---|
212 | def dataset( self ): |
---|
213 | if self.history_dataset_association: |
---|
214 | return self.history_dataset_association |
---|
215 | elif self.library_dataset_dataset_association: |
---|
216 | return self.library_dataset_dataset_association |
---|
217 | return None |
---|
218 | |
---|
219 | class JobExportHistoryArchive( object ): |
---|
220 | def __init__( self, job=None, history=None, dataset=None, compressed=False, \ |
---|
221 | history_attrs_filename=None, datasets_attrs_filename=None, |
---|
222 | jobs_attrs_filename=None ): |
---|
223 | self.job = job |
---|
224 | self.history = history |
---|
225 | self.dataset = dataset |
---|
226 | self.compressed = compressed |
---|
227 | self.history_attrs_filename = history_attrs_filename |
---|
228 | self.datasets_attrs_filename = datasets_attrs_filename |
---|
229 | self.jobs_attrs_filename = jobs_attrs_filename |
---|
230 | |
---|
231 | class Group( object ): |
---|
232 | def __init__( self, name = None ): |
---|
233 | self.name = name |
---|
234 | self.deleted = False |
---|
235 | |
---|
236 | class UserGroupAssociation( object ): |
---|
237 | def __init__( self, user, group ): |
---|
238 | self.user = user |
---|
239 | self.group = group |
---|
240 | |
---|
241 | class History( object, UsesAnnotations ): |
---|
242 | def __init__( self, id=None, name=None, user=None ): |
---|
243 | self.id = id |
---|
244 | self.name = name or "Unnamed history" |
---|
245 | self.deleted = False |
---|
246 | self.purged = False |
---|
247 | self.genome_build = None |
---|
248 | self.published = False |
---|
249 | # Relationships |
---|
250 | self.user = user |
---|
251 | self.datasets = [] |
---|
252 | self.galaxy_sessions = [] |
---|
253 | def _next_hid( self ): |
---|
254 | # TODO: override this with something in the database that ensures |
---|
255 | # better integrity |
---|
256 | if len( self.datasets ) == 0: |
---|
257 | return 1 |
---|
258 | else: |
---|
259 | last_hid = 0 |
---|
260 | for dataset in self.datasets: |
---|
261 | if dataset.hid > last_hid: |
---|
262 | last_hid = dataset.hid |
---|
263 | return last_hid + 1 |
---|
264 | def add_galaxy_session( self, galaxy_session, association=None ): |
---|
265 | if association is None: |
---|
266 | self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) ) |
---|
267 | else: |
---|
268 | self.galaxy_sessions.append( association ) |
---|
269 | def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid = True ): |
---|
270 | if isinstance( dataset, Dataset ): |
---|
271 | dataset = HistoryDatasetAssociation( dataset = dataset, copied_from = dataset ) |
---|
272 | object_session( self ).add( dataset ) |
---|
273 | object_session( self ).flush() |
---|
274 | elif not isinstance( dataset, HistoryDatasetAssociation ): |
---|
275 | raise TypeError, "You can only add Dataset and HistoryDatasetAssociation instances to a history ( you tried to add %s )." % str( dataset ) |
---|
276 | if parent_id: |
---|
277 | for data in self.datasets: |
---|
278 | if data.id == parent_id: |
---|
279 | dataset.hid = data.hid |
---|
280 | break |
---|
281 | else: |
---|
282 | if set_hid: |
---|
283 | dataset.hid = self._next_hid() |
---|
284 | else: |
---|
285 | if set_hid: |
---|
286 | dataset.hid = self._next_hid() |
---|
287 | dataset.history = self |
---|
288 | if genome_build not in [None, '?']: |
---|
289 | self.genome_build = genome_build |
---|
290 | self.datasets.append( dataset ) |
---|
291 | def copy( self, name=None, target_user=None, activatable=False ): |
---|
292 | # Create new history. |
---|
293 | if not name: |
---|
294 | name = self.name |
---|
295 | if not target_user: |
---|
296 | target_user = self.user |
---|
297 | new_history = History( name=name, user=target_user ) |
---|
298 | db_session = object_session( self ) |
---|
299 | db_session.add( new_history ) |
---|
300 | db_session.flush() |
---|
301 | |
---|
302 | # Copy annotation. |
---|
303 | self.copy_item_annotation( db_session, self.user, self, target_user, new_history ) |
---|
304 | |
---|
305 | # Copy HDAs. |
---|
306 | if activatable: |
---|
307 | hdas = self.activatable_datasets |
---|
308 | else: |
---|
309 | hdas = self.active_datasets |
---|
310 | for hda in hdas: |
---|
311 | # Copy HDA. |
---|
312 | new_hda = hda.copy( copy_children=True, target_history=new_history ) |
---|
313 | new_history.add_dataset( new_hda, set_hid = False ) |
---|
314 | db_session.add( new_hda ) |
---|
315 | db_session.flush() |
---|
316 | # Copy annotation. |
---|
317 | self.copy_item_annotation( db_session, self.user, hda, target_user, new_hda ) |
---|
318 | new_history.hid_counter = self.hid_counter |
---|
319 | db_session.add( new_history ) |
---|
320 | db_session.flush() |
---|
321 | return new_history |
---|
322 | @property |
---|
323 | def activatable_datasets( self ): |
---|
324 | # This needs to be a list |
---|
325 | return [ hda for hda in self.datasets if not hda.dataset.deleted ] |
---|
326 | def get_display_name( self ): |
---|
327 | """ History name can be either a string or a unicode object. If string, convert to unicode object assuming 'utf-8' format. """ |
---|
328 | history_name = self.name |
---|
329 | if isinstance(history_name, str): |
---|
330 | history_name = unicode(history_name, 'utf-8') |
---|
331 | return history_name |
---|
332 | |
---|
333 | class HistoryUserShareAssociation( object ): |
---|
334 | def __init__( self ): |
---|
335 | self.history = None |
---|
336 | self.user = None |
---|
337 | |
---|
338 | class UserRoleAssociation( object ): |
---|
339 | def __init__( self, user, role ): |
---|
340 | self.user = user |
---|
341 | self.role = role |
---|
342 | |
---|
343 | class GroupRoleAssociation( object ): |
---|
344 | def __init__( self, group, role ): |
---|
345 | self.group = group |
---|
346 | self.role = role |
---|
347 | |
---|
348 | class Role( object ): |
---|
349 | private_id = None |
---|
350 | types = Bunch( |
---|
351 | PRIVATE = 'private', |
---|
352 | SYSTEM = 'system', |
---|
353 | USER = 'user', |
---|
354 | ADMIN = 'admin', |
---|
355 | SHARING = 'sharing' |
---|
356 | ) |
---|
357 | def __init__( self, name="", description="", type="system", deleted=False ): |
---|
358 | self.name = name |
---|
359 | self.description = description |
---|
360 | self.type = type |
---|
361 | self.deleted = deleted |
---|
362 | |
---|
363 | class DatasetPermissions( object ): |
---|
364 | def __init__( self, action, dataset, role ): |
---|
365 | self.action = action |
---|
366 | self.dataset = dataset |
---|
367 | self.role = role |
---|
368 | |
---|
369 | class LibraryPermissions( object ): |
---|
370 | def __init__( self, action, library_item, role ): |
---|
371 | self.action = action |
---|
372 | if isinstance( library_item, Library ): |
---|
373 | self.library = library_item |
---|
374 | else: |
---|
375 | raise "Invalid Library specified: %s" % library_item.__class__.__name__ |
---|
376 | self.role = role |
---|
377 | |
---|
378 | class LibraryFolderPermissions( object ): |
---|
379 | def __init__( self, action, library_item, role ): |
---|
380 | self.action = action |
---|
381 | if isinstance( library_item, LibraryFolder ): |
---|
382 | self.folder = library_item |
---|
383 | else: |
---|
384 | raise "Invalid LibraryFolder specified: %s" % library_item.__class__.__name__ |
---|
385 | self.role = role |
---|
386 | |
---|
387 | class LibraryDatasetPermissions( object ): |
---|
388 | def __init__( self, action, library_item, role ): |
---|
389 | self.action = action |
---|
390 | if isinstance( library_item, LibraryDataset ): |
---|
391 | self.library_dataset = library_item |
---|
392 | else: |
---|
393 | raise "Invalid LibraryDataset specified: %s" % library_item.__class__.__name__ |
---|
394 | self.role = role |
---|
395 | |
---|
396 | class LibraryDatasetDatasetAssociationPermissions( object ): |
---|
397 | def __init__( self, action, library_item, role ): |
---|
398 | self.action = action |
---|
399 | if isinstance( library_item, LibraryDatasetDatasetAssociation ): |
---|
400 | self.library_dataset_dataset_association = library_item |
---|
401 | else: |
---|
402 | raise "Invalid LibraryDatasetDatasetAssociation specified: %s" % library_item.__class__.__name__ |
---|
403 | self.role = role |
---|
404 | |
---|
405 | class DefaultUserPermissions( object ): |
---|
406 | def __init__( self, user, action, role ): |
---|
407 | self.user = user |
---|
408 | self.action = action |
---|
409 | self.role = role |
---|
410 | |
---|
411 | class DefaultHistoryPermissions( object ): |
---|
412 | def __init__( self, history, action, role ): |
---|
413 | self.history = history |
---|
414 | self.action = action |
---|
415 | self.role = role |
---|
416 | |
---|
417 | class Dataset( object ): |
---|
418 | states = Bunch( NEW = 'new', |
---|
419 | UPLOAD = 'upload', |
---|
420 | QUEUED = 'queued', |
---|
421 | RUNNING = 'running', |
---|
422 | OK = 'ok', |
---|
423 | EMPTY = 'empty', |
---|
424 | ERROR = 'error', |
---|
425 | DISCARDED = 'discarded', |
---|
426 | SETTING_METADATA = 'setting_metadata', |
---|
427 | FAILED_METADATA = 'failed_metadata' ) |
---|
428 | permitted_actions = get_permitted_actions( filter='DATASET' ) |
---|
429 | file_path = "/tmp/" |
---|
430 | engine = None |
---|
431 | def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ): |
---|
432 | self.id = id |
---|
433 | self.state = state |
---|
434 | self.deleted = False |
---|
435 | self.purged = False |
---|
436 | self.purgable = purgable |
---|
437 | self.external_filename = external_filename |
---|
438 | self._extra_files_path = extra_files_path |
---|
439 | self.file_size = file_size |
---|
440 | def get_file_name( self ): |
---|
441 | if not self.external_filename: |
---|
442 | assert self.id is not None, "ID must be set before filename used (commit the object)" |
---|
443 | # First try filename directly under file_path |
---|
444 | filename = os.path.join( self.file_path, "dataset_%d.dat" % self.id ) |
---|
445 | # Only use that filename if it already exists (backward compatibility), |
---|
446 | # otherwise construct hashed path |
---|
447 | if not os.path.exists( filename ): |
---|
448 | dir = os.path.join( self.file_path, *directory_hash_id( self.id ) ) |
---|
449 | # Create directory if it does not exist |
---|
450 | if not os.path.exists( dir ): |
---|
451 | os.makedirs( dir ) |
---|
452 | # Return filename inside hashed directory |
---|
453 | return os.path.abspath( os.path.join( dir, "dataset_%d.dat" % self.id ) ) |
---|
454 | else: |
---|
455 | filename = self.external_filename |
---|
456 | # Make filename absolute |
---|
457 | return os.path.abspath( filename ) |
---|
458 | def set_file_name ( self, filename ): |
---|
459 | if not filename: |
---|
460 | self.external_filename = None |
---|
461 | else: |
---|
462 | self.external_filename = filename |
---|
463 | file_name = property( get_file_name, set_file_name ) |
---|
464 | @property |
---|
465 | def extra_files_path( self ): |
---|
466 | if self._extra_files_path: |
---|
467 | path = self._extra_files_path |
---|
468 | else: |
---|
469 | path = os.path.join( self.file_path, "dataset_%d_files" % self.id ) |
---|
470 | #only use path directly under self.file_path if it exists |
---|
471 | if not os.path.exists( path ): |
---|
472 | path = os.path.join( os.path.join( self.file_path, *directory_hash_id( self.id ) ), "dataset_%d_files" % self.id ) |
---|
473 | # Make path absolute |
---|
474 | return os.path.abspath( path ) |
---|
475 | def get_size( self, nice_size=False ): |
---|
476 | """Returns the size of the data on disk""" |
---|
477 | if self.file_size: |
---|
478 | if nice_size: |
---|
479 | return galaxy.datatypes.data.nice_size( self.file_size ) |
---|
480 | else: |
---|
481 | return self.file_size |
---|
482 | else: |
---|
483 | try: |
---|
484 | if nice_size: |
---|
485 | return galaxy.datatypes.data.nice_size( os.path.getsize( self.file_name ) ) |
---|
486 | else: |
---|
487 | return os.path.getsize( self.file_name ) |
---|
488 | except OSError: |
---|
489 | return 0 |
---|
490 | def set_size( self ): |
---|
491 | """Returns the size of the data on disk""" |
---|
492 | try: |
---|
493 | if not self.file_size: |
---|
494 | self.file_size = os.path.getsize( self.file_name ) |
---|
495 | except OSError: |
---|
496 | self.file_size = 0 |
---|
497 | def has_data( self ): |
---|
498 | """Detects whether there is any data""" |
---|
499 | return self.get_size() > 0 |
---|
500 | def mark_deleted( self, include_children=True ): |
---|
501 | self.deleted = True |
---|
502 | def is_multi_byte( self ): |
---|
503 | if not self.has_data(): |
---|
504 | return False |
---|
505 | try: |
---|
506 | return util.is_multi_byte( codecs.open( self.file_name, 'r', 'utf-8' ).read( 100 ) ) |
---|
507 | except UnicodeDecodeError, e: |
---|
508 | return False |
---|
509 | # FIXME: sqlalchemy will replace this |
---|
510 | def _delete(self): |
---|
511 | """Remove the file that corresponds to this data""" |
---|
512 | try: |
---|
513 | os.remove(self.data.file_name) |
---|
514 | except OSError, e: |
---|
515 | log.critical('%s delete error %s' % (self.__class__.__name__, e)) |
---|
516 | def get_access_roles( self, trans ): |
---|
517 | roles = [] |
---|
518 | for dp in self.actions: |
---|
519 | if dp.action == trans.app.security_agent.permitted_actions.DATASET_ACCESS.action: |
---|
520 | roles.append( dp.role ) |
---|
521 | return roles |
---|
522 | |
---|
523 | class DatasetInstance( object ): |
---|
524 | """A base class for all 'dataset instances', HDAs, LDAs, etc""" |
---|
525 | states = Dataset.states |
---|
526 | permitted_actions = Dataset.permitted_actions |
---|
527 | def __init__( self, id=None, hid=None, name=None, info=None, blurb=None, peek=None, extension=None, |
---|
528 | dbkey=None, metadata=None, history=None, dataset=None, deleted=False, designation=None, |
---|
529 | parent_id=None, validation_errors=None, visible=True, create_dataset=False, sa_session=None ): |
---|
530 | self.name = name or "Unnamed dataset" |
---|
531 | self.id = id |
---|
532 | self.info = info |
---|
533 | self.blurb = blurb |
---|
534 | self.peek = peek |
---|
535 | self.extension = extension |
---|
536 | self.designation = designation |
---|
537 | self.metadata = metadata or dict() |
---|
538 | if dbkey: #dbkey is stored in metadata, only set if non-zero, or else we could clobber one supplied by input 'metadata' |
---|
539 | self.dbkey = dbkey |
---|
540 | self.deleted = deleted |
---|
541 | self.visible = visible |
---|
542 | # Relationships |
---|
543 | if not dataset and create_dataset: |
---|
544 | # Had to pass the sqlalchemy session in order to create a new dataset |
---|
545 | dataset = Dataset( state=Dataset.states.NEW ) |
---|
546 | sa_session.add( dataset ) |
---|
547 | sa_session.flush() |
---|
548 | self.dataset = dataset |
---|
549 | self.parent_id = parent_id |
---|
550 | self.validation_errors = validation_errors |
---|
551 | @property |
---|
552 | def ext( self ): |
---|
553 | return self.extension |
---|
554 | def get_dataset_state( self ): |
---|
555 | #self._state is currently only used when setting metadata externally |
---|
556 | #leave setting the state as-is, we'll currently handle this specially in the external metadata code |
---|
557 | if self._state: |
---|
558 | return self._state |
---|
559 | return self.dataset.state |
---|
560 | def set_dataset_state ( self, state ): |
---|
561 | self.dataset.state = state |
---|
562 | object_session( self ).add( self.dataset ) |
---|
563 | object_session( self ).flush() #flush here, because hda.flush() won't flush the Dataset object |
---|
564 | state = property( get_dataset_state, set_dataset_state ) |
---|
565 | def get_file_name( self ): |
---|
566 | return self.dataset.get_file_name() |
---|
567 | def set_file_name (self, filename): |
---|
568 | return self.dataset.set_file_name( filename ) |
---|
569 | file_name = property( get_file_name, set_file_name ) |
---|
570 | @property |
---|
571 | def extra_files_path( self ): |
---|
572 | return self.dataset.extra_files_path |
---|
573 | @property |
---|
574 | def datatype( self ): |
---|
575 | return datatypes_registry.get_datatype_by_extension( self.extension ) |
---|
576 | def get_metadata( self ): |
---|
577 | if not hasattr( self, '_metadata_collection' ) or self._metadata_collection.parent != self: #using weakref to store parent (to prevent circ ref), does a Session.clear() cause parent to be invalidated, while still copying over this non-database attribute? |
---|
578 | self._metadata_collection = MetadataCollection( self ) |
---|
579 | return self._metadata_collection |
---|
580 | def set_metadata( self, bunch ): |
---|
581 | # Needs to accept a MetadataCollection, a bunch, or a dict |
---|
582 | self._metadata = self.metadata.make_dict_copy( bunch ) |
---|
583 | metadata = property( get_metadata, set_metadata ) |
---|
584 | # This provide backwards compatibility with using the old dbkey |
---|
585 | # field in the database. That field now maps to "old_dbkey" (see mapping.py). |
---|
586 | def get_dbkey( self ): |
---|
587 | dbkey = self.metadata.dbkey |
---|
588 | if not isinstance(dbkey, list): dbkey = [dbkey] |
---|
589 | if dbkey in [[None], []]: return "?" |
---|
590 | return dbkey[0] |
---|
591 | def set_dbkey( self, value ): |
---|
592 | if "dbkey" in self.datatype.metadata_spec: |
---|
593 | if not isinstance(value, list): |
---|
594 | self.metadata.dbkey = [value] |
---|
595 | else: |
---|
596 | self.metadata.dbkey = value |
---|
597 | dbkey = property( get_dbkey, set_dbkey ) |
---|
598 | def change_datatype( self, new_ext ): |
---|
599 | self.clear_associated_files() |
---|
600 | datatypes_registry.change_datatype( self, new_ext ) |
---|
601 | def get_size( self, nice_size=False ): |
---|
602 | """Returns the size of the data on disk""" |
---|
603 | if nice_size: |
---|
604 | return galaxy.datatypes.data.nice_size( self.dataset.get_size() ) |
---|
605 | return self.dataset.get_size() |
---|
606 | def set_size( self ): |
---|
607 | """Returns the size of the data on disk""" |
---|
608 | return self.dataset.set_size() |
---|
609 | def has_data( self ): |
---|
610 | """Detects whether there is any data""" |
---|
611 | return self.dataset.has_data() |
---|
612 | def get_raw_data( self ): |
---|
613 | """Returns the full data. To stream it open the file_name and read/write as needed""" |
---|
614 | return self.datatype.get_raw_data( self ) |
---|
615 | def write_from_stream( self, stream ): |
---|
616 | """Writes data from a stream""" |
---|
617 | self.datatype.write_from_stream(self, stream) |
---|
618 | def set_raw_data( self, data ): |
---|
619 | """Saves the data on the disc""" |
---|
620 | self.datatype.set_raw_data(self, data) |
---|
621 | def get_mime( self ): |
---|
622 | """Returns the mime type of the data""" |
---|
623 | return datatypes_registry.get_mimetype_by_extension( self.extension.lower() ) |
---|
624 | def is_multi_byte( self ): |
---|
625 | """Data consists of multi-byte characters""" |
---|
626 | return self.dataset.is_multi_byte() |
---|
627 | def set_peek( self, is_multi_byte=False ): |
---|
628 | return self.datatype.set_peek( self, is_multi_byte=is_multi_byte ) |
---|
629 | def init_meta( self, copy_from=None ): |
---|
630 | return self.datatype.init_meta( self, copy_from=copy_from ) |
---|
631 | def set_meta( self, **kwd ): |
---|
632 | self.clear_associated_files( metadata_safe = True ) |
---|
633 | return self.datatype.set_meta( self, **kwd ) |
---|
634 | def missing_meta( self, **kwd ): |
---|
635 | return self.datatype.missing_meta( self, **kwd ) |
---|
636 | def as_display_type( self, type, **kwd ): |
---|
637 | return self.datatype.as_display_type( self, type, **kwd ) |
---|
638 | def display_peek( self ): |
---|
639 | return self.datatype.display_peek( self ) |
---|
640 | def display_name( self ): |
---|
641 | return self.datatype.display_name( self ) |
---|
642 | def display_info( self ): |
---|
643 | return self.datatype.display_info( self ) |
---|
644 | def get_converted_files_by_type( self, file_type ): |
---|
645 | for assoc in self.implicitly_converted_datasets: |
---|
646 | if not assoc.deleted and assoc.type == file_type: |
---|
647 | return assoc.dataset |
---|
648 | return None |
---|
649 | def get_converted_dataset(self, trans, target_ext): |
---|
650 | """ |
---|
651 | Return converted dataset(s) if they exist. If not converted yet, do so and return None (the first time). |
---|
652 | If unconvertible, raise exception. |
---|
653 | """ |
---|
654 | # See if we can convert the dataset |
---|
655 | if target_ext not in self.get_converter_types(): |
---|
656 | raise ValueError("Conversion from '%s' to '%s' not possible", self.extension, target_ext) |
---|
657 | |
---|
658 | # See if converted dataset already exists |
---|
659 | converted_dataset = self.get_converted_files_by_type( target_ext ) |
---|
660 | if converted_dataset: |
---|
661 | return converted_dataset |
---|
662 | |
---|
663 | # Conversion is possible but hasn't been done yet, run converter. |
---|
664 | # Check if we have dependencies |
---|
665 | deps = {} |
---|
666 | try: |
---|
667 | fail_dependencies = False |
---|
668 | depends_on = trans.app.datatypes_registry.converter_deps[self.extension][target_ext] |
---|
669 | for dependency in depends_on: |
---|
670 | dep_dataset = self.get_converted_dataset(trans, dependency) |
---|
671 | if dep_dataset is None or dep_dataset.state != trans.app.model.Job.states.OK: |
---|
672 | fail_dependencies = True |
---|
673 | else: |
---|
674 | deps[dependency] = dep_dataset |
---|
675 | if fail_dependencies: |
---|
676 | return None |
---|
677 | except ValueError: |
---|
678 | raise ValueError("A dependency could not be converted.") |
---|
679 | except KeyError: |
---|
680 | pass # No deps |
---|
681 | |
---|
682 | assoc = ImplicitlyConvertedDatasetAssociation( parent=self, file_type=target_ext, metadata_safe=False ) |
---|
683 | new_dataset = self.datatype.convert_dataset( trans, self, target_ext, return_output=True, visible=False, deps=deps ).values()[0] |
---|
684 | new_dataset.hid = self.hid |
---|
685 | new_dataset.name = self.name |
---|
686 | session = trans.sa_session |
---|
687 | session.add( new_dataset ) |
---|
688 | assoc.dataset = new_dataset |
---|
689 | session.add( assoc ) |
---|
690 | session.flush() |
---|
691 | return None |
---|
692 | def clear_associated_files( self, metadata_safe = False, purge = False ): |
---|
693 | raise 'Unimplemented' |
---|
694 | def get_child_by_designation(self, designation): |
---|
695 | for child in self.children: |
---|
696 | if child.designation == designation: |
---|
697 | return child |
---|
698 | return None |
---|
699 | def get_converter_types(self): |
---|
700 | return self.datatype.get_converter_types( self, datatypes_registry ) |
---|
701 | def find_conversion_destination( self, accepted_formats, **kwd ): |
---|
702 | """Returns ( target_ext, existing converted dataset )""" |
---|
703 | return self.datatype.find_conversion_destination( self, accepted_formats, datatypes_registry, **kwd ) |
---|
704 | def add_validation_error( self, validation_error ): |
---|
705 | self.validation_errors.append( validation_error ) |
---|
706 | def extend_validation_errors( self, validation_errors ): |
---|
707 | self.validation_errors.extend(validation_errors) |
---|
708 | def mark_deleted( self, include_children=True ): |
---|
709 | self.deleted = True |
---|
710 | if include_children: |
---|
711 | for child in self.children: |
---|
712 | child.mark_deleted() |
---|
713 | def mark_undeleted( self, include_children=True ): |
---|
714 | self.deleted = False |
---|
715 | if include_children: |
---|
716 | for child in self.children: |
---|
717 | child.mark_undeleted() |
---|
718 | def mark_unhidden( self, include_children=True ): |
---|
719 | self.visible = True |
---|
720 | if include_children: |
---|
721 | for child in self.children: |
---|
722 | child.mark_unhidden() |
---|
723 | def undeletable( self ): |
---|
724 | if self.purged: |
---|
725 | return False |
---|
726 | return True |
---|
727 | @property |
---|
728 | def is_pending( self ): |
---|
729 | """ |
---|
730 | Return true if the dataset is neither ready nor in error |
---|
731 | """ |
---|
732 | return self.state in ( self.states.NEW, self.states.UPLOAD, |
---|
733 | self.states.QUEUED, self.states.RUNNING, |
---|
734 | self.states.SETTING_METADATA ) |
---|
735 | @property |
---|
736 | def source_library_dataset( self ): |
---|
737 | def get_source( dataset ): |
---|
738 | if isinstance( dataset, LibraryDatasetDatasetAssociation ): |
---|
739 | if dataset.library_dataset: |
---|
740 | return ( dataset, dataset.library_dataset ) |
---|
741 | if dataset.copied_from_library_dataset_dataset_association: |
---|
742 | source = get_source( dataset.copied_from_library_dataset_dataset_association ) |
---|
743 | if source: |
---|
744 | return source |
---|
745 | if dataset.copied_from_history_dataset_association: |
---|
746 | source = get_source( dataset.copied_from_history_dataset_association ) |
---|
747 | if source: |
---|
748 | return source |
---|
749 | return ( None, None ) |
---|
750 | return get_source( self ) |
---|
751 | |
---|
752 | def get_display_applications( self, trans ): |
---|
753 | return self.datatype.get_display_applications_by_dataset( self, trans ) |
---|
754 | |
---|
755 | class HistoryDatasetAssociation( DatasetInstance ): |
---|
756 | def __init__( self, |
---|
757 | hid = None, |
---|
758 | history = None, |
---|
759 | copied_from_history_dataset_association = None, |
---|
760 | copied_from_library_dataset_dataset_association = None, |
---|
761 | sa_session = None, |
---|
762 | **kwd ): |
---|
763 | # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset |
---|
764 | # parameter is True so that the new object can be flushed. Is there a better way? |
---|
765 | DatasetInstance.__init__( self, sa_session=sa_session, **kwd ) |
---|
766 | self.hid = hid |
---|
767 | # Relationships |
---|
768 | self.history = history |
---|
769 | self.copied_from_history_dataset_association = copied_from_history_dataset_association |
---|
770 | self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association |
---|
771 | def copy( self, copy_children = False, parent_id = None, target_history = None ): |
---|
772 | hda = HistoryDatasetAssociation( hid=self.hid, |
---|
773 | name=self.name, |
---|
774 | info=self.info, |
---|
775 | blurb=self.blurb, |
---|
776 | peek=self.peek, |
---|
777 | extension=self.extension, |
---|
778 | dbkey=self.dbkey, |
---|
779 | dataset = self.dataset, |
---|
780 | visible=self.visible, |
---|
781 | deleted=self.deleted, |
---|
782 | parent_id=parent_id, |
---|
783 | copied_from_history_dataset_association=self, |
---|
784 | history = target_history ) |
---|
785 | object_session( self ).add( hda ) |
---|
786 | object_session( self ).flush() |
---|
787 | hda.set_size() |
---|
788 | # Need to set after flushed, as MetadataFiles require dataset.id |
---|
789 | hda.metadata = self.metadata |
---|
790 | if copy_children: |
---|
791 | for child in self.children: |
---|
792 | child_copy = child.copy( copy_children = copy_children, parent_id = hda.id ) |
---|
793 | if not self.datatype.copy_safe_peek: |
---|
794 | # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs |
---|
795 | hda.set_peek() |
---|
796 | object_session( self ).flush() |
---|
797 | return hda |
---|
798 | def to_library_dataset_dataset_association( self, trans, target_folder, replace_dataset=None, parent_id=None, user=None, roles=[], ldda_message='' ): |
---|
799 | if replace_dataset: |
---|
800 | # The replace_dataset param ( when not None ) refers to a LibraryDataset that is being replaced with a new version. |
---|
801 | library_dataset = replace_dataset |
---|
802 | else: |
---|
803 | # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new |
---|
804 | # LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset. |
---|
805 | library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info ) |
---|
806 | object_session( self ).add( library_dataset ) |
---|
807 | object_session( self ).flush() |
---|
808 | if not user: |
---|
809 | # This should never happen since users must be authenticated to upload to a data library |
---|
810 | user = self.history.user |
---|
811 | ldda = LibraryDatasetDatasetAssociation( name=self.name, |
---|
812 | info=self.info, |
---|
813 | blurb=self.blurb, |
---|
814 | peek=self.peek, |
---|
815 | extension=self.extension, |
---|
816 | dbkey=self.dbkey, |
---|
817 | dataset=self.dataset, |
---|
818 | library_dataset=library_dataset, |
---|
819 | visible=self.visible, |
---|
820 | deleted=self.deleted, |
---|
821 | parent_id=parent_id, |
---|
822 | copied_from_history_dataset_association=self, |
---|
823 | user=user ) |
---|
824 | object_session( self ).add( ldda ) |
---|
825 | object_session( self ).flush() |
---|
826 | # If roles were selected on the upload form, restrict access to the Dataset to those roles |
---|
827 | for role in roles: |
---|
828 | dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role ) |
---|
829 | trans.sa_session.add( dp ) |
---|
830 | trans.sa_session.flush() |
---|
831 | # Must set metadata after ldda flushed, as MetadataFiles require ldda.id |
---|
832 | ldda.metadata = self.metadata |
---|
833 | if ldda_message: |
---|
834 | ldda.message = ldda_message |
---|
835 | if not replace_dataset: |
---|
836 | target_folder.add_library_dataset( library_dataset, genome_build=ldda.dbkey ) |
---|
837 | object_session( self ).add( target_folder ) |
---|
838 | object_session( self ).flush() |
---|
839 | library_dataset.library_dataset_dataset_association_id = ldda.id |
---|
840 | object_session( self ).add( library_dataset ) |
---|
841 | object_session( self ).flush() |
---|
842 | for child in self.children: |
---|
843 | child_copy = child.to_library_dataset_dataset_association( trans, |
---|
844 | target_folder=target_folder, |
---|
845 | replace_dataset=replace_dataset, |
---|
846 | parent_id=ldda.id, |
---|
847 | user=ldda.user ) |
---|
848 | if not self.datatype.copy_safe_peek: |
---|
849 | # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs |
---|
850 | ldda.set_peek() |
---|
851 | object_session( self ).flush() |
---|
852 | return ldda |
---|
853 | def clear_associated_files( self, metadata_safe = False, purge = False ): |
---|
854 | # metadata_safe = True means to only clear when assoc.metadata_safe == False |
---|
855 | for assoc in self.implicitly_converted_datasets: |
---|
856 | if not metadata_safe or not assoc.metadata_safe: |
---|
857 | assoc.clear( purge = purge ) |
---|
858 | def get_display_name( self ): |
---|
859 | ## Name can be either a string or a unicode object. If string, convert to unicode object assuming 'utf-8' format. |
---|
860 | hda_name = self.name |
---|
861 | if isinstance(hda_name, str): |
---|
862 | hda_name = unicode(hda_name, 'utf-8') |
---|
863 | return hda_name |
---|
864 | def get_access_roles( self, trans ): |
---|
865 | return self.dataset.get_access_roles( trans ) |
---|
866 | |
---|
867 | class HistoryDatasetAssociationDisplayAtAuthorization( object ): |
---|
868 | def __init__( self, hda=None, user=None, site=None ): |
---|
869 | self.history_dataset_association = hda |
---|
870 | self.user = user |
---|
871 | self.site = site |
---|
872 | |
---|
873 | class Library( object ): |
---|
874 | permitted_actions = get_permitted_actions( filter='LIBRARY' ) |
---|
875 | api_collection_visible_keys = ( 'id', 'name' ) |
---|
876 | api_element_visible_keys = ( 'name', 'description', 'synopsis' ) |
---|
877 | def __init__( self, name=None, description=None, synopsis=None, root_folder=None ): |
---|
878 | self.name = name or "Unnamed library" |
---|
879 | self.description = description |
---|
880 | self.synopsis = synopsis |
---|
881 | self.root_folder = root_folder |
---|
882 | def get_info_association( self, restrict=False, inherited=False ): |
---|
883 | if self.info_association: |
---|
884 | if not inherited or self.info_association[0].inheritable: |
---|
885 | return self.info_association[0], inherited |
---|
886 | else: |
---|
887 | return None, inherited |
---|
888 | return None, inherited |
---|
889 | def get_template_widgets( self, trans, get_contents=True ): |
---|
890 | # See if we have any associated templates - the returned value for |
---|
891 | # inherited is not applicable at the library level. The get_contents |
---|
892 | # param is passed by callers that are inheriting a template - these |
---|
893 | # are usually new library datsets for which we want to include template |
---|
894 | # fields on the upload form, but not necessarily the contents of the |
---|
895 | # inherited template saved for the parent. |
---|
896 | info_association, inherited = self.get_info_association() |
---|
897 | if info_association: |
---|
898 | template = info_association.template |
---|
899 | if get_contents: |
---|
900 | # See if we have any field contents |
---|
901 | info = info_association.info |
---|
902 | if info: |
---|
903 | return template.get_widgets( trans.user, contents=info.content ) |
---|
904 | return template.get_widgets( trans.user ) |
---|
905 | return [] |
---|
906 | def get_access_roles( self, trans ): |
---|
907 | roles = [] |
---|
908 | for lp in self.actions: |
---|
909 | if lp.action == trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action: |
---|
910 | roles.append( lp.role ) |
---|
911 | return roles |
---|
912 | def get_display_name( self ): |
---|
913 | # Library name can be either a string or a unicode object. If string, |
---|
914 | # convert to unicode object assuming 'utf-8' format. |
---|
915 | name = self.name |
---|
916 | if isinstance( name, str ): |
---|
917 | name = unicode( name, 'utf-8' ) |
---|
918 | return name |
---|
919 | def get_api_value( self, view='collection' ): |
---|
920 | rval = {} |
---|
921 | try: |
---|
922 | visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' ) |
---|
923 | except AttributeError: |
---|
924 | raise Exception( 'Unknown API view: %s' % view ) |
---|
925 | for key in visible_keys: |
---|
926 | try: |
---|
927 | rval[key] = self.__getattribute__( key ) |
---|
928 | except AttributeError: |
---|
929 | rval[key] = None |
---|
930 | return rval |
---|
931 | |
---|
932 | class LibraryFolder( object ): |
---|
933 | api_element_visible_keys = ( 'name', 'description', 'item_count', 'genome_build' ) |
---|
934 | def __init__( self, name=None, description=None, item_count=0, order_id=None ): |
---|
935 | self.name = name or "Unnamed folder" |
---|
936 | self.description = description |
---|
937 | self.item_count = item_count |
---|
938 | self.order_id = order_id |
---|
939 | self.genome_build = None |
---|
940 | def add_library_dataset( self, library_dataset, genome_build=None ): |
---|
941 | library_dataset.folder_id = self.id |
---|
942 | library_dataset.order_id = self.item_count |
---|
943 | self.item_count += 1 |
---|
944 | if genome_build not in [None, '?']: |
---|
945 | self.genome_build = genome_build |
---|
946 | def add_folder( self, folder ): |
---|
947 | folder.parent_id = self.id |
---|
948 | folder.order_id = self.item_count |
---|
949 | self.item_count += 1 |
---|
950 | def get_info_association( self, restrict=False, inherited=False ): |
---|
951 | # If restrict is True, we will return this folder's info_association, not inheriting. |
---|
952 | # If restrict is False, we'll return the next available info_association in the |
---|
953 | # inheritable hierarchy if it is "inheritable". True is also returned if the |
---|
954 | # info_association was inherited and False if not. This enables us to eliminate |
---|
955 | # displaying any contents of the inherited template. |
---|
956 | if self.info_association: |
---|
957 | if not inherited or self.info_association[0].inheritable: |
---|
958 | return self.info_association[0], inherited |
---|
959 | else: |
---|
960 | return None, inherited |
---|
961 | if restrict: |
---|
962 | return None, inherited |
---|
963 | if self.parent: |
---|
964 | return self.parent.get_info_association( inherited=True ) |
---|
965 | if self.library_root: |
---|
966 | return self.library_root[0].get_info_association( inherited=True ) |
---|
967 | return None, inherited |
---|
968 | def get_template_widgets( self, trans, get_contents=True ): |
---|
969 | # See if we have any associated templates. The get_contents |
---|
970 | # param is passed by callers that are inheriting a template - these |
---|
971 | # are usually new library datsets for which we want to include template |
---|
972 | # fields on the upload form. |
---|
973 | info_association, inherited = self.get_info_association() |
---|
974 | if info_association: |
---|
975 | if inherited: |
---|
976 | template = info_association.template.current.latest_form |
---|
977 | else: |
---|
978 | template = info_association.template |
---|
979 | # See if we have any field contents, but only if the info_association was |
---|
980 | # not inherited ( we do not want to display the inherited contents ). |
---|
981 | # (gvk: 8/30/10) Based on conversations with Dan, we agreed to ALWAYS inherit |
---|
982 | # contents. We'll use this behavior until we hear from the community that |
---|
983 | # contents should not be inherited. If we don't hear anything for a while, |
---|
984 | # eliminate the old commented out behavior. |
---|
985 | #if not inherited and get_contents: |
---|
986 | if get_contents: |
---|
987 | info = info_association.info |
---|
988 | if info: |
---|
989 | return template.get_widgets( trans.user, info.content ) |
---|
990 | else: |
---|
991 | return template.get_widgets( trans.user ) |
---|
992 | return [] |
---|
993 | @property |
---|
994 | def active_library_datasets( self ): |
---|
995 | def sort_by_attr( seq, attr ): |
---|
996 | """ |
---|
997 | Sort the sequence of objects by object's attribute |
---|
998 | Arguments: |
---|
999 | seq - the list or any sequence (including immutable one) of objects to sort. |
---|
1000 | attr - the name of attribute to sort by |
---|
1001 | """ |
---|
1002 | # Use the "Schwartzian transform" |
---|
1003 | # Create the auxiliary list of tuples where every i-th tuple has form |
---|
1004 | # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not |
---|
1005 | # only to provide stable sorting, but mainly to eliminate comparison of objects |
---|
1006 | # (which can be expensive or prohibited) in case of equal attribute values. |
---|
1007 | intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) |
---|
1008 | intermed.sort() |
---|
1009 | return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) |
---|
1010 | # This needs to be a list |
---|
1011 | active_library_datasets = [ ld for ld in self.datasets if ld.library_dataset_dataset_association and not ld.library_dataset_dataset_association.deleted ] |
---|
1012 | return sort_by_attr( [ ld for ld in active_library_datasets ], 'name' ) |
---|
1013 | @property |
---|
1014 | def activatable_library_datasets( self ): |
---|
1015 | # This needs to be a list |
---|
1016 | return [ ld for ld in self.datasets if ld.library_dataset_dataset_association and not ld.library_dataset_dataset_association.dataset.deleted ] |
---|
1017 | @property |
---|
1018 | def active_datasets( self ): |
---|
1019 | # This needs to be a list |
---|
1020 | return [ ld.library_dataset_dataset_association.dataset for ld in self.datasets if ld.library_dataset_dataset_association and not ld.library_dataset_dataset_association.deleted ] |
---|
1021 | def get_display_name( self ): |
---|
1022 | # Library folder name can be either a string or a unicode object. If string, |
---|
1023 | # convert to unicode object assuming 'utf-8' format. |
---|
1024 | name = self.name |
---|
1025 | if isinstance( name, str ): |
---|
1026 | name = unicode( name, 'utf-8' ) |
---|
1027 | return name |
---|
1028 | def get_api_value( self, view='collection' ): |
---|
1029 | rval = {} |
---|
1030 | info_association, inherited = self.get_info_association() |
---|
1031 | if info_association: |
---|
1032 | if inherited: |
---|
1033 | template = info_association.template.current.latest_form |
---|
1034 | else: |
---|
1035 | template = info_association.template |
---|
1036 | rval['data_template'] = template.name |
---|
1037 | |
---|
1038 | try: |
---|
1039 | visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' ) |
---|
1040 | except AttributeError: |
---|
1041 | raise Exception( 'Unknown API view: %s' % view ) |
---|
1042 | for key in visible_keys: |
---|
1043 | try: |
---|
1044 | rval[key] = self.__getattribute__( key ) |
---|
1045 | except AttributeError: |
---|
1046 | rval[key] = None |
---|
1047 | return rval |
---|
1048 | @property |
---|
1049 | def parent_library( self ): |
---|
1050 | f = self |
---|
1051 | while f.parent: |
---|
1052 | f = f.parent |
---|
1053 | return f.library_root[0] |
---|
1054 | |
---|
1055 | class LibraryDataset( object ): |
---|
1056 | # This class acts as a proxy to the currently selected LDDA |
---|
1057 | upload_options = [ ( 'upload_file', 'Upload files' ), |
---|
1058 | ( 'upload_directory', 'Upload directory of files' ), |
---|
1059 | ( 'upload_paths', 'Upload files from filesystem paths' ), |
---|
1060 | ( 'import_from_history', 'Import datasets from your current history' ) ] |
---|
1061 | def __init__( self, folder=None, order_id=None, name=None, info=None, library_dataset_dataset_association=None, **kwd ): |
---|
1062 | self.folder = folder |
---|
1063 | self.order_id = order_id |
---|
1064 | self.name = name |
---|
1065 | self.info = info |
---|
1066 | self.library_dataset_dataset_association = library_dataset_dataset_association |
---|
1067 | def set_library_dataset_dataset_association( self, ldda ): |
---|
1068 | self.library_dataset_dataset_association = ldda |
---|
1069 | ldda.library_dataset = self |
---|
1070 | object_session( self ).add_all( ( ldda, self ) ) |
---|
1071 | object_session( self ).flush() |
---|
1072 | def get_info( self ): |
---|
1073 | if self.library_dataset_dataset_association: |
---|
1074 | return self.library_dataset_dataset_association.info |
---|
1075 | elif self._info: |
---|
1076 | return self._info |
---|
1077 | else: |
---|
1078 | return 'no info' |
---|
1079 | def set_info( self, info ): |
---|
1080 | self._info = info |
---|
1081 | info = property( get_info, set_info ) |
---|
1082 | def get_name( self ): |
---|
1083 | if self.library_dataset_dataset_association: |
---|
1084 | return self.library_dataset_dataset_association.name |
---|
1085 | elif self._name: |
---|
1086 | return self._name |
---|
1087 | else: |
---|
1088 | return 'Unnamed dataset' |
---|
1089 | def set_name( self, name ): |
---|
1090 | self._name = name |
---|
1091 | name = property( get_name, set_name ) |
---|
1092 | def display_name( self ): |
---|
1093 | self.library_dataset_dataset_association.display_name() |
---|
1094 | def get_purged( self ): |
---|
1095 | return self.library_dataset_dataset_association.dataset.purged |
---|
1096 | def set_purged( self, purged ): |
---|
1097 | if purged: |
---|
1098 | raise Exception( "Not implemented" ) |
---|
1099 | if not purged and self.purged: |
---|
1100 | raise Exception( "Cannot unpurge once purged" ) |
---|
1101 | purged = property( get_purged, set_purged ) |
---|
1102 | def get_api_value( self, view='collection' ): |
---|
1103 | # Since this class is a proxy to rather complex attributes we want to |
---|
1104 | # display in other objects, we can't use the simpler method used by |
---|
1105 | # other model classes. |
---|
1106 | ldda = self.library_dataset_dataset_association |
---|
1107 | template_data = {} |
---|
1108 | for temp_info in ldda.info_association: |
---|
1109 | template = temp_info.template |
---|
1110 | content = temp_info.info.content |
---|
1111 | tmp_dict = {} |
---|
1112 | for i, field in enumerate(template.fields): |
---|
1113 | tmp_dict[field['label']] = content[i] |
---|
1114 | template_data[template.name] = tmp_dict |
---|
1115 | |
---|
1116 | rval = dict( name = ldda.name, |
---|
1117 | file_name = ldda.file_name, |
---|
1118 | uploaded_by = ldda.user.email, |
---|
1119 | message = ldda.message, |
---|
1120 | date_uploaded = ldda.create_time.isoformat(), |
---|
1121 | file_size = int( ldda.get_size() ), |
---|
1122 | data_type = ldda.ext, |
---|
1123 | genome_build = ldda.dbkey, |
---|
1124 | misc_info = ldda.info, |
---|
1125 | misc_blurb = ldda.blurb, |
---|
1126 | template_data = template_data ) |
---|
1127 | for name, spec in ldda.metadata.spec.items(): |
---|
1128 | val = ldda.metadata.get( name ) |
---|
1129 | if isinstance( val, MetadataFile ): |
---|
1130 | val = val.file_name |
---|
1131 | elif isinstance( val, list ): |
---|
1132 | val = ', '.join( val ) |
---|
1133 | rval['metadata_' + name] = val |
---|
1134 | return rval |
---|
1135 | |
---|
1136 | class LibraryDatasetDatasetAssociation( DatasetInstance ): |
---|
1137 | def __init__( self, |
---|
1138 | copied_from_history_dataset_association=None, |
---|
1139 | copied_from_library_dataset_dataset_association=None, |
---|
1140 | library_dataset=None, |
---|
1141 | user=None, |
---|
1142 | sa_session=None, |
---|
1143 | **kwd ): |
---|
1144 | # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset |
---|
1145 | # parameter in kwd is True so that the new object can be flushed. Is there a better way? |
---|
1146 | DatasetInstance.__init__( self, sa_session=sa_session, **kwd ) |
---|
1147 | if copied_from_history_dataset_association: |
---|
1148 | self.copied_from_history_dataset_association_id = copied_from_history_dataset_association.id |
---|
1149 | if copied_from_library_dataset_dataset_association: |
---|
1150 | self.copied_from_library_dataset_dataset_association_id = copied_from_library_dataset_dataset_association.id |
---|
1151 | self.library_dataset = library_dataset |
---|
1152 | self.user = user |
---|
1153 | def to_history_dataset_association( self, target_history, parent_id = None, add_to_history = False ): |
---|
1154 | hda = HistoryDatasetAssociation( name=self.name, |
---|
1155 | info=self.info, |
---|
1156 | blurb=self.blurb, |
---|
1157 | peek=self.peek, |
---|
1158 | extension=self.extension, |
---|
1159 | dbkey=self.dbkey, |
---|
1160 | dataset=self.dataset, |
---|
1161 | visible=self.visible, |
---|
1162 | deleted=self.deleted, |
---|
1163 | parent_id=parent_id, |
---|
1164 | copied_from_library_dataset_dataset_association=self, |
---|
1165 | history=target_history ) |
---|
1166 | object_session( self ).add( hda ) |
---|
1167 | object_session( self ).flush() |
---|
1168 | hda.metadata = self.metadata #need to set after flushed, as MetadataFiles require dataset.id |
---|
1169 | if add_to_history and target_history: |
---|
1170 | target_history.add_dataset( hda ) |
---|
1171 | for child in self.children: |
---|
1172 | child_copy = child.to_history_dataset_association( target_history = target_history, parent_id = hda.id, add_to_history = False ) |
---|
1173 | if not self.datatype.copy_safe_peek: |
---|
1174 | hda.set_peek() #in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs |
---|
1175 | object_session( self ).flush() |
---|
1176 | return hda |
---|
1177 | def copy( self, copy_children = False, parent_id = None, target_folder = None ): |
---|
1178 | ldda = LibraryDatasetDatasetAssociation( name=self.name, |
---|
1179 | info=self.info, |
---|
1180 | blurb=self.blurb, |
---|
1181 | peek=self.peek, |
---|
1182 | extension=self.extension, |
---|
1183 | dbkey=self.dbkey, |
---|
1184 | dataset=self.dataset, |
---|
1185 | visible=self.visible, |
---|
1186 | deleted=self.deleted, |
---|
1187 | parent_id=parent_id, |
---|
1188 | copied_from_library_dataset_dataset_association=self, |
---|
1189 | folder=target_folder ) |
---|
1190 | object_session( self ).add( ldda ) |
---|
1191 | object_session( self ).flush() |
---|
1192 | # Need to set after flushed, as MetadataFiles require dataset.id |
---|
1193 | ldda.metadata = self.metadata |
---|
1194 | if copy_children: |
---|
1195 | for child in self.children: |
---|
1196 | child_copy = child.copy( copy_children = copy_children, parent_id = ldda.id ) |
---|
1197 | if not self.datatype.copy_safe_peek: |
---|
1198 | # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs |
---|
1199 | ldda.set_peek() |
---|
1200 | object_session( self ).flush() |
---|
1201 | return ldda |
---|
1202 | def clear_associated_files( self, metadata_safe = False, purge = False ): |
---|
1203 | return |
---|
1204 | def get_access_roles( self, trans ): |
---|
1205 | return self.dataset.get_access_roles( trans ) |
---|
1206 | def get_info_association( self, restrict=False, inherited=False ): |
---|
1207 | # If restrict is True, we will return this ldda's info_association whether it |
---|
1208 | # exists or not ( in which case None will be returned ). If restrict is False, |
---|
1209 | # we'll return the next available info_association in the inheritable hierarchy. |
---|
1210 | # True is also returned if the info_association was inherited, and False if not. |
---|
1211 | # This enables us to eliminate displaying any contents of the inherited template. |
---|
1212 | if self.info_association: |
---|
1213 | return self.info_association[0], inherited |
---|
1214 | if restrict: |
---|
1215 | return None, inherited |
---|
1216 | return self.library_dataset.folder.get_info_association( inherited=True ) |
---|
1217 | def get_template_widgets( self, trans, get_contents=True ): |
---|
1218 | # See if we have any associated templatesThe get_contents |
---|
1219 | # param is passed by callers that are inheriting a template - these |
---|
1220 | # are usually new library datsets for which we want to include template |
---|
1221 | # fields on the upload form, but not necessarily the contents of the |
---|
1222 | # inherited template saved for the parent. |
---|
1223 | info_association, inherited = self.get_info_association() |
---|
1224 | if info_association: |
---|
1225 | if inherited: |
---|
1226 | template = info_association.template.current.latest_form |
---|
1227 | else: |
---|
1228 | template = info_association.template |
---|
1229 | # See if we have any field contents, but only if the info_association was |
---|
1230 | # not inherited ( we do not want to display the inherited contents ). |
---|
1231 | # (gvk: 8/30/10) Based on conversations with Dan, we agreed to ALWAYS inherit |
---|
1232 | # contents. We'll use this behavior until we hear from the community that |
---|
1233 | # contents should not be inherited. If we don't hear anything for a while, |
---|
1234 | # eliminate the old commented out behavior. |
---|
1235 | #if not inherited and get_contents: |
---|
1236 | if get_contents: |
---|
1237 | info = info_association.info |
---|
1238 | if info: |
---|
1239 | return template.get_widgets( trans.user, info.content ) |
---|
1240 | else: |
---|
1241 | return template.get_widgets( trans.user ) |
---|
1242 | return [] |
---|
1243 | def get_display_name( self ): |
---|
1244 | """ |
---|
1245 | LibraryDatasetDatasetAssociation name can be either a string or a unicode object. |
---|
1246 | If string, convert to unicode object assuming 'utf-8' format. |
---|
1247 | """ |
---|
1248 | ldda_name = self.name |
---|
1249 | if isinstance( ldda_name, str ): |
---|
1250 | ldda_name = unicode( ldda_name, 'utf-8' ) |
---|
1251 | return ldda_name |
---|
1252 | |
---|
1253 | class LibraryInfoAssociation( object ): |
---|
1254 | def __init__( self, library, form_definition, info, inheritable=False ): |
---|
1255 | self.library = library |
---|
1256 | self.template = form_definition |
---|
1257 | self.info = info |
---|
1258 | self.inheritable = inheritable |
---|
1259 | |
---|
1260 | class LibraryFolderInfoAssociation( object ): |
---|
1261 | def __init__( self, folder, form_definition, info, inheritable=False ): |
---|
1262 | self.folder = folder |
---|
1263 | self.template = form_definition |
---|
1264 | self.info = info |
---|
1265 | self.inheritable = inheritable |
---|
1266 | |
---|
1267 | class LibraryDatasetDatasetInfoAssociation( object ): |
---|
1268 | def __init__( self, library_dataset_dataset_association, form_definition, info ): |
---|
1269 | # TODO: need to figure out if this should be inheritable to the associated LibraryDataset |
---|
1270 | self.library_dataset_dataset_association = library_dataset_dataset_association |
---|
1271 | self.template = form_definition |
---|
1272 | self.info = info |
---|
1273 | |
---|
1274 | class ValidationError( object ): |
---|
1275 | def __init__( self, message=None, err_type=None, attributes=None ): |
---|
1276 | self.message = message |
---|
1277 | self.err_type = err_type |
---|
1278 | self.attributes = attributes |
---|
1279 | |
---|
1280 | class DatasetToValidationErrorAssociation( object ): |
---|
1281 | def __init__( self, dataset, validation_error ): |
---|
1282 | self.dataset = dataset |
---|
1283 | self.validation_error = validation_error |
---|
1284 | |
---|
1285 | class ImplicitlyConvertedDatasetAssociation( object ): |
---|
1286 | def __init__( self, id = None, parent = None, dataset = None, file_type = None, deleted = False, purged = False, metadata_safe = True ): |
---|
1287 | self.id = id |
---|
1288 | self.dataset = dataset |
---|
1289 | self.parent = parent |
---|
1290 | self.type = file_type |
---|
1291 | self.deleted = deleted |
---|
1292 | self.purged = purged |
---|
1293 | self.metadata_safe = metadata_safe |
---|
1294 | |
---|
1295 | def clear( self, purge = False ): |
---|
1296 | self.deleted = True |
---|
1297 | if self.dataset: |
---|
1298 | self.dataset.deleted = True |
---|
1299 | self.dataset.purged = purge |
---|
1300 | if purge: #do something with purging |
---|
1301 | self.purged = True |
---|
1302 | try: os.unlink( self.file_name ) |
---|
1303 | except Exception, e: print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e ) |
---|
1304 | |
---|
1305 | class Event( object ): |
---|
1306 | def __init__( self, message=None, history=None, user=None, galaxy_session=None ): |
---|
1307 | self.history = history |
---|
1308 | self.galaxy_session = galaxy_session |
---|
1309 | self.user = user |
---|
1310 | self.tool_id = None |
---|
1311 | self.message = message |
---|
1312 | |
---|
1313 | class GalaxySession( object ): |
---|
1314 | def __init__( self, |
---|
1315 | id=None, |
---|
1316 | user=None, |
---|
1317 | remote_host=None, |
---|
1318 | remote_addr=None, |
---|
1319 | referer=None, |
---|
1320 | current_history=None, |
---|
1321 | session_key=None, |
---|
1322 | is_valid=False, |
---|
1323 | prev_session_id=None ): |
---|
1324 | self.id = id |
---|
1325 | self.user = user |
---|
1326 | self.remote_host = remote_host |
---|
1327 | self.remote_addr = remote_addr |
---|
1328 | self.referer = referer |
---|
1329 | self.current_history = current_history |
---|
1330 | self.session_key = session_key |
---|
1331 | self.is_valid = is_valid |
---|
1332 | self.prev_session_id = prev_session_id |
---|
1333 | self.histories = [] |
---|
1334 | def add_history( self, history, association=None ): |
---|
1335 | if association is None: |
---|
1336 | self.histories.append( GalaxySessionToHistoryAssociation( self, history ) ) |
---|
1337 | else: |
---|
1338 | self.histories.append( association ) |
---|
1339 | |
---|
1340 | class GalaxySessionToHistoryAssociation( object ): |
---|
1341 | def __init__( self, galaxy_session, history ): |
---|
1342 | self.galaxy_session = galaxy_session |
---|
1343 | self.history = history |
---|
1344 | |
---|
1345 | class CloudImage( object ): |
---|
1346 | def __init__( self ): |
---|
1347 | self.id = None |
---|
1348 | self.instance_id = None |
---|
1349 | self.state = None |
---|
1350 | |
---|
1351 | class UCI( object ): |
---|
1352 | def __init__( self ): |
---|
1353 | self.id = None |
---|
1354 | self.user = None |
---|
1355 | |
---|
1356 | class CloudInstance( object ): |
---|
1357 | def __init__( self ): |
---|
1358 | self.id = None |
---|
1359 | self.user = None |
---|
1360 | self.name = None |
---|
1361 | self.instance_id = None |
---|
1362 | self.mi = None |
---|
1363 | self.state = None |
---|
1364 | self.public_dns = None |
---|
1365 | self.availability_zone = None |
---|
1366 | |
---|
1367 | class CloudStore( object ): |
---|
1368 | def __init__( self ): |
---|
1369 | self.id = None |
---|
1370 | self.volume_id = None |
---|
1371 | self.user = None |
---|
1372 | self.size = None |
---|
1373 | self.availability_zone = None |
---|
1374 | |
---|
1375 | class CloudSnapshot( object ): |
---|
1376 | def __init__( self ): |
---|
1377 | self.id = None |
---|
1378 | self.user = None |
---|
1379 | self.store_id = None |
---|
1380 | self.snapshot_id = None |
---|
1381 | |
---|
1382 | class CloudProvider( object ): |
---|
1383 | def __init__( self ): |
---|
1384 | self.id = None |
---|
1385 | self.user = None |
---|
1386 | self.type = None |
---|
1387 | |
---|
1388 | class CloudUserCredentials( object ): |
---|
1389 | def __init__( self ): |
---|
1390 | self.id = None |
---|
1391 | self.user = None |
---|
1392 | self.name = None |
---|
1393 | self.accessKey = None |
---|
1394 | self.secretKey = None |
---|
1395 | self.credentials = [] |
---|
1396 | |
---|
1397 | class StoredWorkflow( object ): |
---|
1398 | def __init__( self ): |
---|
1399 | self.id = None |
---|
1400 | self.user = None |
---|
1401 | self.name = None |
---|
1402 | self.slug = None |
---|
1403 | self.published = False |
---|
1404 | self.latest_workflow_id = None |
---|
1405 | self.workflows = [] |
---|
1406 | |
---|
1407 | class Workflow( object ): |
---|
1408 | def __init__( self ): |
---|
1409 | self.user = None |
---|
1410 | self.name = None |
---|
1411 | self.has_cycles = None |
---|
1412 | self.has_errors = None |
---|
1413 | self.steps = [] |
---|
1414 | |
---|
1415 | class WorkflowStep( object ): |
---|
1416 | def __init__( self ): |
---|
1417 | self.id = None |
---|
1418 | self.type = None |
---|
1419 | self.tool_id = None |
---|
1420 | self.tool_inputs = None |
---|
1421 | self.tool_errors = None |
---|
1422 | self.position = None |
---|
1423 | self.input_connections = [] |
---|
1424 | self.config = None |
---|
1425 | |
---|
1426 | class WorkflowStepConnection( object ): |
---|
1427 | def __init__( self ): |
---|
1428 | self.output_step_id = None |
---|
1429 | self.output_name = None |
---|
1430 | self.input_step_id = None |
---|
1431 | self.input_name = None |
---|
1432 | |
---|
1433 | class WorkflowOutput(object): |
---|
1434 | def __init__( self, workflow_step, output_name): |
---|
1435 | self.workflow_step = workflow_step |
---|
1436 | self.output_name = output_name |
---|
1437 | |
---|
1438 | class StoredWorkflowUserShareAssociation( object ): |
---|
1439 | def __init__( self ): |
---|
1440 | self.stored_workflow = None |
---|
1441 | self.user = None |
---|
1442 | |
---|
1443 | class StoredWorkflowMenuEntry( object ): |
---|
1444 | def __init__( self ): |
---|
1445 | self.stored_workflow = None |
---|
1446 | self.user = None |
---|
1447 | self.order_index = None |
---|
1448 | |
---|
1449 | class WorkflowInvocation( object ): |
---|
1450 | pass |
---|
1451 | |
---|
1452 | class WorkflowInvocationStep( object ): |
---|
1453 | pass |
---|
1454 | |
---|
1455 | class MetadataFile( object ): |
---|
1456 | def __init__( self, dataset = None, name = None ): |
---|
1457 | if isinstance( dataset, HistoryDatasetAssociation ): |
---|
1458 | self.history_dataset = dataset |
---|
1459 | elif isinstance( dataset, LibraryDatasetDatasetAssociation ): |
---|
1460 | self.library_dataset = dataset |
---|
1461 | self.name = name |
---|
1462 | @property |
---|
1463 | def file_name( self ): |
---|
1464 | assert self.id is not None, "ID must be set before filename used (commit the object)" |
---|
1465 | path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) ) |
---|
1466 | # Create directory if it does not exist |
---|
1467 | try: |
---|
1468 | os.makedirs( path ) |
---|
1469 | except OSError, e: |
---|
1470 | # File Exists is okay, otherwise reraise |
---|
1471 | if e.errno != errno.EEXIST: |
---|
1472 | raise |
---|
1473 | # Return filename inside hashed directory |
---|
1474 | return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) ) |
---|
1475 | |
---|
1476 | class FormDefinition( object ): |
---|
1477 | types = Bunch( REQUEST = 'Sequencing Request Form', |
---|
1478 | SAMPLE = 'Sequencing Sample Form', |
---|
1479 | LIBRARY_INFO_TEMPLATE = 'Library information template', |
---|
1480 | USER_INFO = 'User Information' ) |
---|
1481 | def __init__(self, name=None, desc=None, fields=[], |
---|
1482 | form_definition_current=None, form_type=None, layout=None): |
---|
1483 | self.name = name |
---|
1484 | self.desc = desc |
---|
1485 | self.fields = fields |
---|
1486 | self.form_definition_current = form_definition_current |
---|
1487 | self.type = form_type |
---|
1488 | self.layout = layout |
---|
1489 | def fields_of_grid(self, grid_index): |
---|
1490 | ''' |
---|
1491 | This method returns the list of fields belonging to the given grid. |
---|
1492 | ''' |
---|
1493 | gridfields = {} |
---|
1494 | for i, f in enumerate(self.fields): |
---|
1495 | if str(f['layout']) == str(grid_index): |
---|
1496 | gridfields[i] = f |
---|
1497 | return gridfields |
---|
1498 | def get_widgets( self, user, contents=[], **kwd ): |
---|
1499 | ''' |
---|
1500 | Return the list of widgets that comprise a form definition, |
---|
1501 | including field contents if any. |
---|
1502 | ''' |
---|
1503 | params = util.Params( kwd ) |
---|
1504 | widgets = [] |
---|
1505 | for index, field in enumerate( self.fields ): |
---|
1506 | field_type = field[ 'type' ] |
---|
1507 | field_name = 'field_%i' % index |
---|
1508 | # determine the value of the field |
---|
1509 | if field_name in kwd: |
---|
1510 | # The form was submitted via refresh_on_change |
---|
1511 | if field_type == 'CheckboxField': |
---|
1512 | value = CheckboxField.is_checked( params.get( field_name, False ) ) |
---|
1513 | else: |
---|
1514 | value = util.restore_text( params.get( field_name, '' ) ) |
---|
1515 | elif contents: |
---|
1516 | try: |
---|
1517 | # This field has a saved value. |
---|
1518 | value = str( contents[ index ] ) |
---|
1519 | except: |
---|
1520 | # If there was an error getting the saved value, we'll still |
---|
1521 | # display the widget, but it will be empty. |
---|
1522 | if field_type == 'CheckboxField': |
---|
1523 | # Since we do not have contents, set checkbox value to False |
---|
1524 | value = False |
---|
1525 | else: |
---|
1526 | # Set other field types to empty string |
---|
1527 | value = '' |
---|
1528 | else: |
---|
1529 | # if none of the above, then leave the field empty |
---|
1530 | if field_type == 'CheckboxField': |
---|
1531 | # Since we do not have contents, set checkbox value to False |
---|
1532 | value = False |
---|
1533 | else: |
---|
1534 | # Set other field types to the default value of the field |
---|
1535 | value = field.get('default', '') |
---|
1536 | # Create the field widget |
---|
1537 | field_widget = eval( field_type )( field_name ) |
---|
1538 | if field_type == 'TextField': |
---|
1539 | field_widget.set_size( 40 ) |
---|
1540 | field_widget.value = value |
---|
1541 | elif field_type == 'TextArea': |
---|
1542 | field_widget.set_size( 3, 40 ) |
---|
1543 | field_widget.value = value |
---|
1544 | elif field_type == 'AddressField': |
---|
1545 | field_widget.user = user |
---|
1546 | field_widget.value = value |
---|
1547 | field_widget.params = params |
---|
1548 | elif field['type'] == 'WorkflowField': |
---|
1549 | field_widget.user = user |
---|
1550 | field_widget.value = value |
---|
1551 | field_widget.params = params |
---|
1552 | elif field_type == 'SelectField': |
---|
1553 | for option in field[ 'selectlist' ]: |
---|
1554 | if option == value: |
---|
1555 | field_widget.add_option( option, option, selected=True ) |
---|
1556 | else: |
---|
1557 | field_widget.add_option( option, option ) |
---|
1558 | elif field_type == 'CheckboxField': |
---|
1559 | field_widget.set_checked( value ) |
---|
1560 | if field[ 'required' ] == 'required': |
---|
1561 | req = 'Required' |
---|
1562 | else: |
---|
1563 | req = 'Optional' |
---|
1564 | if field[ 'helptext' ]: |
---|
1565 | helptext='%s (%s)' % ( field[ 'helptext' ], req ) |
---|
1566 | else: |
---|
1567 | helptext = '' |
---|
1568 | widgets.append( dict( label=field[ 'label' ], |
---|
1569 | widget=field_widget, |
---|
1570 | helptext=helptext ) ) |
---|
1571 | return widgets |
---|
1572 | |
---|
1573 | class FormDefinitionCurrent( object ): |
---|
1574 | def __init__(self, form_definition=None): |
---|
1575 | self.latest_form = form_definition |
---|
1576 | |
---|
1577 | class FormValues( object ): |
---|
1578 | def __init__(self, form_def=None, content=None): |
---|
1579 | self.form_definition = form_def |
---|
1580 | self.content = content |
---|
1581 | |
---|
1582 | class Request( object ): |
---|
1583 | states = Bunch( NEW = 'New', |
---|
1584 | SUBMITTED = 'In Progress', |
---|
1585 | REJECTED = 'Rejected', |
---|
1586 | COMPLETE = 'Complete' ) |
---|
1587 | def __init__( self, name=None, desc=None, request_type=None, user=None, form_values=None, notification=None ): |
---|
1588 | self.name = name |
---|
1589 | self.desc = desc |
---|
1590 | self.type = request_type |
---|
1591 | self.values = form_values |
---|
1592 | self.user = user |
---|
1593 | self.notification = notification |
---|
1594 | self.samples_list = [] |
---|
1595 | @property |
---|
1596 | def state( self ): |
---|
1597 | latest_event = self.latest_event |
---|
1598 | if latest_event: |
---|
1599 | return latest_event.state |
---|
1600 | return None |
---|
1601 | @property |
---|
1602 | def latest_event( self ): |
---|
1603 | if self.events: |
---|
1604 | return self.events[0] |
---|
1605 | return None |
---|
1606 | @property |
---|
1607 | def samples_have_common_state( self ): |
---|
1608 | """ |
---|
1609 | Returns the state of this request's samples when they are all |
---|
1610 | in one common state. Otherwise returns False. |
---|
1611 | """ |
---|
1612 | state_for_comparison = self.samples[0].state |
---|
1613 | if state_for_comparison is None: |
---|
1614 | for s in self.samples: |
---|
1615 | if s.state is not None: |
---|
1616 | return False |
---|
1617 | for s in self.samples: |
---|
1618 | if s.state.id != state_for_comparison.id: |
---|
1619 | return False |
---|
1620 | return state_for_comparison |
---|
1621 | @property |
---|
1622 | def last_comment( self ): |
---|
1623 | latest_event = self.latest_event |
---|
1624 | if latest_event: |
---|
1625 | if latest_event.comment: |
---|
1626 | return latest_event.comment |
---|
1627 | return '' |
---|
1628 | return 'No comment' |
---|
1629 | def has_sample( self, sample_name ): |
---|
1630 | for s in self.samples: |
---|
1631 | if s.name == sample_name: |
---|
1632 | return s |
---|
1633 | return False |
---|
1634 | @property |
---|
1635 | def is_unsubmitted( self ): |
---|
1636 | return self.state in [ self.states.REJECTED, self.states.NEW ] |
---|
1637 | @property |
---|
1638 | def is_rejected( self ): |
---|
1639 | return self.state == self.states.REJECTED |
---|
1640 | @property |
---|
1641 | def is_submitted( self ): |
---|
1642 | return self.state == self.states.SUBMITTED |
---|
1643 | @property |
---|
1644 | def is_new( self ): |
---|
1645 | return self.state == self.states.NEW |
---|
1646 | @property |
---|
1647 | def is_complete( self ): |
---|
1648 | return self.state == self.states.COMPLETE |
---|
1649 | @property |
---|
1650 | def samples_without_library_destinations( self ): |
---|
1651 | # Return all samples that are not associated with a library |
---|
1652 | samples = [] |
---|
1653 | for sample in self.samples: |
---|
1654 | if not sample.library: |
---|
1655 | samples.append( sample ) |
---|
1656 | return samples |
---|
1657 | def send_email_notification( self, trans, common_state, final_state=False ): |
---|
1658 | # Check if an email notification is configured to be sent when the samples |
---|
1659 | # are in this state |
---|
1660 | if self.notification and common_state.id not in self.notification[ 'sample_states' ]: |
---|
1661 | return |
---|
1662 | comments = '' |
---|
1663 | # Send email |
---|
1664 | if trans.app.config.smtp_server is not None and self.notification and self.notification[ 'email' ]: |
---|
1665 | host = trans.request.host.split( ':' )[0] |
---|
1666 | if host in [ 'localhost', '127.0.0.1' ]: |
---|
1667 | host = socket.getfqdn() |
---|
1668 | body = """ |
---|
1669 | Galaxy Sample Tracking Notification |
---|
1670 | =================================== |
---|
1671 | |
---|
1672 | User: %(user)s |
---|
1673 | |
---|
1674 | Sequencing request: %(request_name)s |
---|
1675 | Sequencer configuration: %(request_type)s |
---|
1676 | Sequencing request state: %(request_state)s |
---|
1677 | |
---|
1678 | Number of samples: %(num_samples)s |
---|
1679 | All samples in state: %(sample_state)s |
---|
1680 | |
---|
1681 | """ |
---|
1682 | values = dict( user=self.user.email, |
---|
1683 | request_name=self.name, |
---|
1684 | request_type=self.type.name, |
---|
1685 | request_state=self.state, |
---|
1686 | num_samples=str( len( self.samples ) ), |
---|
1687 | sample_state=common_state.name, |
---|
1688 | create_time=self.create_time, |
---|
1689 | submit_time=self.create_time ) |
---|
1690 | body = body % values |
---|
1691 | # check if this is the final state of the samples |
---|
1692 | if final_state: |
---|
1693 | txt = "Sample Name -> Data Library/Folder\r\n" |
---|
1694 | for s in self.samples: |
---|
1695 | txt = txt + "%s -> %s/%s\r\n" % ( s.name, s.library.name, s.folder.name ) |
---|
1696 | body = body + txt |
---|
1697 | to = self.notification['email'] |
---|
1698 | frm = 'galaxy-no-reply@' + host |
---|
1699 | subject = "Galaxy Sample Tracking notification: '%s' sequencing request" % self.name |
---|
1700 | message = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s" % ( frm, ", ".join( to ), subject, body ) |
---|
1701 | try: |
---|
1702 | s = smtplib.SMTP() |
---|
1703 | s.connect( trans.app.config.smtp_server ) |
---|
1704 | s.sendmail( frm, to, message ) |
---|
1705 | s.quit() |
---|
1706 | comments = "Email notification sent to %s." % ", ".join( to ).strip().strip( ',' ) |
---|
1707 | except: |
---|
1708 | comments = "Email notification failed." |
---|
1709 | # update the request history with the email notification event |
---|
1710 | elif not trans.app.config.smtp_server: |
---|
1711 | comments = "Email notification failed as SMTP server not set in config file" |
---|
1712 | if comments: |
---|
1713 | event = trans.app.model.RequestEvent( self, self.state, comments ) |
---|
1714 | trans.sa_session.add( event ) |
---|
1715 | trans.sa_session.flush() |
---|
1716 | return comments |
---|
1717 | |
---|
1718 | class RequestEvent( object ): |
---|
1719 | def __init__(self, request=None, request_state=None, comment=''): |
---|
1720 | self.request = request |
---|
1721 | self.state = request_state |
---|
1722 | self.comment = comment |
---|
1723 | |
---|
1724 | class RequestType( object ): |
---|
1725 | rename_dataset_options = Bunch( NO = 'Do not rename', |
---|
1726 | SAMPLE_NAME = 'Preprend sample name', |
---|
1727 | EXPERIMENT_NAME = 'Prepend experiment name', |
---|
1728 | EXPERIMENT_AND_SAMPLE_NAME = 'Prepend experiment and sample name') |
---|
1729 | permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' ) |
---|
1730 | def __init__( self, name=None, desc=None, request_form=None, sample_form=None, datatx_info=None ): |
---|
1731 | self.name = name |
---|
1732 | self.desc = desc |
---|
1733 | self.request_form = request_form |
---|
1734 | self.sample_form = sample_form |
---|
1735 | self.datatx_info = datatx_info |
---|
1736 | @property |
---|
1737 | def state( self ): |
---|
1738 | # The states mapper for this object orders ascending |
---|
1739 | return self.states[-1] |
---|
1740 | |
---|
1741 | class RequestTypePermissions( object ): |
---|
1742 | def __init__( self, action, request_type, role ): |
---|
1743 | self.action = action |
---|
1744 | self.request_type = request_type |
---|
1745 | self.role = role |
---|
1746 | |
---|
1747 | class Sample( object ): |
---|
1748 | bulk_operations = Bunch( CHANGE_STATE = 'Change state', |
---|
1749 | SELECT_LIBRARY = 'Select data library and folder' ) |
---|
1750 | transfer_status = Bunch( NOT_STARTED = 'Not started', |
---|
1751 | IN_QUEUE = 'In queue', |
---|
1752 | TRANSFERRING = 'Transferring dataset', |
---|
1753 | ADD_TO_LIBRARY = 'Adding to data library', |
---|
1754 | COMPLETE = 'Complete', |
---|
1755 | ERROR = 'Error' ) |
---|
1756 | def __init__(self, name=None, desc=None, request=None, form_values=None, bar_code=None, library=None, folder=None): |
---|
1757 | self.name = name |
---|
1758 | self.desc = desc |
---|
1759 | self.request = request |
---|
1760 | self.values = form_values |
---|
1761 | self.bar_code = bar_code |
---|
1762 | self.library = library |
---|
1763 | self.folder = folder |
---|
1764 | @property |
---|
1765 | def state( self ): |
---|
1766 | latest_event = self.latest_event |
---|
1767 | if latest_event: |
---|
1768 | return latest_event.state |
---|
1769 | return None |
---|
1770 | @property |
---|
1771 | def latest_event( self ): |
---|
1772 | if self.events: |
---|
1773 | return self.events[0] |
---|
1774 | return None |
---|
1775 | @property |
---|
1776 | def untransferred_dataset_files( self ): |
---|
1777 | untransferred_datasets = [] |
---|
1778 | for dataset in self.datasets: |
---|
1779 | if dataset.status == self.transfer_status.NOT_STARTED: |
---|
1780 | untransferred_datasets.append( dataset ) |
---|
1781 | return untransferred_datasets |
---|
1782 | @property |
---|
1783 | def inprogress_dataset_files( self ): |
---|
1784 | inprogress_datasets = [] |
---|
1785 | for dataset in self.datasets: |
---|
1786 | if dataset.status not in [ self.transfer_status.NOT_STARTED, self.transfer_status.COMPLETE ]: |
---|
1787 | inprogress_datasets.append( dataset ) |
---|
1788 | return inprogress_datasets |
---|
1789 | @property |
---|
1790 | def transferred_dataset_files( self ): |
---|
1791 | transferred_datasets = [] |
---|
1792 | for dataset in self.datasets: |
---|
1793 | if dataset.status == self.transfer_status.COMPLETE: |
---|
1794 | transferred_datasets.append( dataset ) |
---|
1795 | return transferred_datasets |
---|
1796 | def dataset_size( self, filepath ): |
---|
1797 | def print_ticks(d): |
---|
1798 | pass |
---|
1799 | datatx_info = self.request.type.datatx_info |
---|
1800 | cmd = 'ssh %s@%s "du -sh \'%s\'"' % ( datatx_info['username'], |
---|
1801 | datatx_info['host'], |
---|
1802 | filepath) |
---|
1803 | output = pexpect.run(cmd, events={'.ssword:*': datatx_info['password']+'\r\n', |
---|
1804 | pexpect.TIMEOUT:print_ticks}, |
---|
1805 | timeout=10) |
---|
1806 | return output.replace(filepath, '').strip() |
---|
1807 | |
---|
1808 | class SampleState( object ): |
---|
1809 | def __init__(self, name=None, desc=None, request_type=None): |
---|
1810 | self.name = name |
---|
1811 | self.desc = desc |
---|
1812 | self.request_type = request_type |
---|
1813 | |
---|
1814 | class SampleEvent( object ): |
---|
1815 | def __init__(self, sample=None, sample_state=None, comment=''): |
---|
1816 | self.sample = sample |
---|
1817 | self.state = sample_state |
---|
1818 | self.comment = comment |
---|
1819 | |
---|
1820 | class SampleDataset( object ): |
---|
1821 | def __init__(self, sample=None, name=None, file_path=None, |
---|
1822 | status=None, error_msg=None, size=None): |
---|
1823 | self.sample = sample |
---|
1824 | self.name = name |
---|
1825 | self.file_path = file_path |
---|
1826 | self.status = status |
---|
1827 | self.error_msg = error_msg |
---|
1828 | self.size = size |
---|
1829 | |
---|
1830 | class UserAddress( object ): |
---|
1831 | def __init__(self, user=None, desc=None, name=None, institution=None, |
---|
1832 | address=None, city=None, state=None, postal_code=None, |
---|
1833 | country=None, phone=None): |
---|
1834 | self.user = user |
---|
1835 | self.desc = desc |
---|
1836 | self.name = name |
---|
1837 | self.institution = institution |
---|
1838 | self.address = address |
---|
1839 | self.city = city |
---|
1840 | self.state = state |
---|
1841 | self.postal_code = postal_code |
---|
1842 | self.country = country |
---|
1843 | self.phone = phone |
---|
1844 | def get_html(self): |
---|
1845 | html = '' |
---|
1846 | if self.name: |
---|
1847 | html = html + self.name |
---|
1848 | if self.institution: |
---|
1849 | html = html + '<br/>' + self.institution |
---|
1850 | if self.address: |
---|
1851 | html = html + '<br/>' + self.address |
---|
1852 | if self.city: |
---|
1853 | html = html + '<br/>' + self.city |
---|
1854 | if self.state: |
---|
1855 | html = html + ' ' + self.state |
---|
1856 | if self.postal_code: |
---|
1857 | html = html + ' ' + self.postal_code |
---|
1858 | if self.country: |
---|
1859 | html = html + '<br/>' + self.country |
---|
1860 | if self.phone: |
---|
1861 | html = html + '<br/>' + 'Phone: ' + self.phone |
---|
1862 | return html |
---|
1863 | |
---|
1864 | class Page( object ): |
---|
1865 | def __init__( self ): |
---|
1866 | self.id = None |
---|
1867 | self.user = None |
---|
1868 | self.title = None |
---|
1869 | self.slug = None |
---|
1870 | self.latest_revision_id = None |
---|
1871 | self.revisions = [] |
---|
1872 | self.importable = None |
---|
1873 | self.published = None |
---|
1874 | |
---|
1875 | class PageRevision( object ): |
---|
1876 | def __init__( self ): |
---|
1877 | self.user = None |
---|
1878 | self.title = None |
---|
1879 | self.content = None |
---|
1880 | |
---|
1881 | class PageUserShareAssociation( object ): |
---|
1882 | def __init__( self ): |
---|
1883 | self.page = None |
---|
1884 | self.user = None |
---|
1885 | |
---|
1886 | class Visualization( object ): |
---|
1887 | def __init__( self ): |
---|
1888 | self.id = None |
---|
1889 | self.user = None |
---|
1890 | self.type = None |
---|
1891 | self.title = None |
---|
1892 | self.latest_revision = None |
---|
1893 | self.revisions = [] |
---|
1894 | |
---|
1895 | class VisualizationRevision( object ): |
---|
1896 | def __init__( self ): |
---|
1897 | self.id = None |
---|
1898 | self.visualization = None |
---|
1899 | self.title = None |
---|
1900 | self.config = None |
---|
1901 | |
---|
1902 | class VisualizationUserShareAssociation( object ): |
---|
1903 | def __init__( self ): |
---|
1904 | self.visualization = None |
---|
1905 | self.user = None |
---|
1906 | |
---|
1907 | class Tag ( object ): |
---|
1908 | def __init__( self, id=None, type=None, parent_id=None, name=None ): |
---|
1909 | self.id = id |
---|
1910 | self.type = type |
---|
1911 | self.parent_id = parent_id |
---|
1912 | self.name = name |
---|
1913 | |
---|
1914 | def __str__ ( self ): |
---|
1915 | return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name ) |
---|
1916 | |
---|
1917 | class ItemTagAssociation ( object ): |
---|
1918 | def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ): |
---|
1919 | self.id = id |
---|
1920 | self.user = user |
---|
1921 | self.item_id = item_id |
---|
1922 | self.tag_id = tag_id |
---|
1923 | self.user_tname = user_tname |
---|
1924 | self.value = None |
---|
1925 | self.user_value = None |
---|
1926 | |
---|
1927 | class HistoryTagAssociation ( ItemTagAssociation ): |
---|
1928 | pass |
---|
1929 | |
---|
1930 | class DatasetTagAssociation ( ItemTagAssociation ): |
---|
1931 | pass |
---|
1932 | |
---|
1933 | class HistoryDatasetAssociationTagAssociation ( ItemTagAssociation ): |
---|
1934 | pass |
---|
1935 | |
---|
1936 | class PageTagAssociation ( ItemTagAssociation ): |
---|
1937 | pass |
---|
1938 | |
---|
1939 | class WorkflowStepTagAssociation ( ItemTagAssociation ): |
---|
1940 | pass |
---|
1941 | |
---|
1942 | class StoredWorkflowTagAssociation ( ItemTagAssociation ): |
---|
1943 | pass |
---|
1944 | |
---|
1945 | class VisualizationTagAssociation ( ItemTagAssociation ): |
---|
1946 | pass |
---|
1947 | |
---|
1948 | # Item annotation classes. |
---|
1949 | |
---|
1950 | class HistoryAnnotationAssociation( object ): |
---|
1951 | pass |
---|
1952 | |
---|
1953 | class HistoryDatasetAssociationAnnotationAssociation( object ): |
---|
1954 | pass |
---|
1955 | |
---|
1956 | class StoredWorkflowAnnotationAssociation( object ): |
---|
1957 | pass |
---|
1958 | |
---|
1959 | class WorkflowStepAnnotationAssociation( object ): |
---|
1960 | pass |
---|
1961 | |
---|
1962 | class PageAnnotationAssociation( object ): |
---|
1963 | pass |
---|
1964 | |
---|
1965 | class VisualizationAnnotationAssociation( object ): |
---|
1966 | pass |
---|
1967 | |
---|
1968 | # Item rating classes. |
---|
1969 | |
---|
1970 | class ItemRatingAssociation( object ): |
---|
1971 | def __init__( self, id=None, user=None, item=None, rating=0 ): |
---|
1972 | self.id = id |
---|
1973 | self.user = user |
---|
1974 | self.item = item |
---|
1975 | self.rating = rating |
---|
1976 | |
---|
1977 | def set_item( self, item ): |
---|
1978 | """ Set association's item. """ |
---|
1979 | pass |
---|
1980 | |
---|
1981 | class HistoryRatingAssociation( ItemRatingAssociation ): |
---|
1982 | def set_item( self, history ): |
---|
1983 | self.history = history |
---|
1984 | |
---|
1985 | class HistoryDatasetAssociationRatingAssociation( ItemRatingAssociation ): |
---|
1986 | def set_item( self, history_dataset_association ): |
---|
1987 | self.history_dataset_association = history_dataset_association |
---|
1988 | |
---|
1989 | class StoredWorkflowRatingAssociation( ItemRatingAssociation ): |
---|
1990 | def set_item( self, stored_workflow ): |
---|
1991 | self.stored_workflow = stored_workflow |
---|
1992 | |
---|
1993 | class PageRatingAssociation( ItemRatingAssociation ): |
---|
1994 | def set_item( self, page ): |
---|
1995 | self.page = page |
---|
1996 | |
---|
1997 | class VisualizationRatingAssociation( ItemRatingAssociation ): |
---|
1998 | def set_item( self, visualization ): |
---|
1999 | self.visualization = visualization |
---|
2000 | |
---|
2001 | class UserPreference ( object ): |
---|
2002 | def __init__( self, name=None, value=None ): |
---|
2003 | self.name = name |
---|
2004 | self.value = value |
---|
2005 | |
---|
2006 | class UserAction( object ): |
---|
2007 | def __init__( self, id=None, create_time=None, user_id=None, session_id=None, action=None, params=None, context=None): |
---|
2008 | self.id = id |
---|
2009 | self.create_time = create_time |
---|
2010 | self.user_id = user_id |
---|
2011 | self.session_id = session_id |
---|
2012 | self.action = action |
---|
2013 | self.params = params |
---|
2014 | self.context = context |
---|
2015 | |
---|
2016 | class APIKeys( object ): |
---|
2017 | pass |
---|
2018 | |
---|
2019 | ## ---- Utility methods ------------------------------------------------------- |
---|
2020 | |
---|
2021 | def directory_hash_id( id ): |
---|
2022 | s = str( id ) |
---|
2023 | l = len( s ) |
---|
2024 | # Shortcut -- ids 0-999 go under ../000/ |
---|
2025 | if l < 4: |
---|
2026 | return [ "000" ] |
---|
2027 | # Pad with zeros until a multiple of three |
---|
2028 | padded = ( ( 3 - len( s ) % 3 ) * "0" ) + s |
---|
2029 | # Drop the last three digits -- 1000 files per directory |
---|
2030 | padded = padded[:-3] |
---|
2031 | # Break into chunks of three |
---|
2032 | return [ padded[i*3:(i+1)*3] for i in range( len( padded ) // 3 ) ] |
---|