1 | """ |
---|
2 | Migration script to add the sample_dataset table and remove the 'dataset_files' column |
---|
3 | from the 'sample' table |
---|
4 | """ |
---|
5 | |
---|
6 | from sqlalchemy import * |
---|
7 | from sqlalchemy.orm import * |
---|
8 | from migrate import * |
---|
9 | from migrate.changeset import * |
---|
10 | from sqlalchemy.exc import * |
---|
11 | |
---|
12 | from galaxy.model.custom_types import * |
---|
13 | from galaxy.util.json import from_json_string, to_json_string |
---|
14 | |
---|
15 | import datetime |
---|
16 | now = datetime.datetime.utcnow |
---|
17 | |
---|
18 | import logging |
---|
19 | log = logging.getLogger( __name__ ) |
---|
20 | |
---|
21 | metadata = MetaData( migrate_engine ) |
---|
22 | db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) |
---|
23 | |
---|
24 | |
---|
25 | def nextval( table, col='id' ): |
---|
26 | if migrate_engine.name == 'postgres': |
---|
27 | return "nextval('%s_%s_seq')" % ( table, col ) |
---|
28 | elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': |
---|
29 | return "null" |
---|
30 | else: |
---|
31 | raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name ) |
---|
32 | |
---|
33 | def localtimestamp(): |
---|
34 | if migrate_engine.name == 'postgres' or migrate_engine.name == 'mysql': |
---|
35 | return "LOCALTIMESTAMP" |
---|
36 | elif migrate_engine.name == 'sqlite': |
---|
37 | return "current_date || ' ' || current_time" |
---|
38 | else: |
---|
39 | raise Exception( 'Unable to convert data for unknown database type: %s' % db ) |
---|
40 | |
---|
41 | SampleDataset_table = Table('sample_dataset', metadata, |
---|
42 | Column( "id", Integer, primary_key=True ), |
---|
43 | Column( "create_time", DateTime, default=now ), |
---|
44 | Column( "update_time", DateTime, default=now, onupdate=now ), |
---|
45 | Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ), |
---|
46 | Column( "name", TrimmedString( 255 ), nullable=False ), |
---|
47 | Column( "file_path", TrimmedString( 255 ), nullable=False ), |
---|
48 | Column( "status", TrimmedString( 255 ), nullable=False ), |
---|
49 | Column( "error_msg", TEXT ), |
---|
50 | Column( "size", TrimmedString( 255 ) ) ) |
---|
51 | |
---|
52 | def upgrade(): |
---|
53 | print __doc__ |
---|
54 | metadata.reflect() |
---|
55 | try: |
---|
56 | SampleDataset_table.create() |
---|
57 | except Exception, e: |
---|
58 | log.debug( "Creating sample_dataset table failed: %s" % str( e ) ) |
---|
59 | |
---|
60 | cmd = "SELECT id, dataset_files FROM sample" |
---|
61 | result = db_session.execute( cmd ) |
---|
62 | for r in result: |
---|
63 | sample_id = r[0] |
---|
64 | if r[1]: |
---|
65 | dataset_files = from_json_string(r[1]) |
---|
66 | for df in dataset_files: |
---|
67 | if type(df) == type(dict()): |
---|
68 | cmd = "INSERT INTO sample_dataset VALUES (%s, %s, %s, %s, '%s', '%s', '%s', '%s', '%s')" |
---|
69 | cmd = cmd % ( nextval('sample_dataset'), |
---|
70 | localtimestamp(), |
---|
71 | localtimestamp(), |
---|
72 | str(sample_id), |
---|
73 | df.get('name', ''), |
---|
74 | df.get('filepath', ''), |
---|
75 | df.get('status', '').replace('"', '').replace("'", ""), |
---|
76 | "", |
---|
77 | df.get('size', '').replace('"', '').replace("'", "").replace(df.get('filepath', ''), '').strip() ) |
---|
78 | db_session.execute( cmd ) |
---|
79 | |
---|
80 | # Delete the dataset_files column in the Sample table |
---|
81 | try: |
---|
82 | Sample_table = Table( "sample", metadata, autoload=True ) |
---|
83 | except NoSuchTableError: |
---|
84 | Sample_table = None |
---|
85 | log.debug( "Failed loading table sample" ) |
---|
86 | if Sample_table: |
---|
87 | try: |
---|
88 | Sample_table.c.dataset_files.drop() |
---|
89 | except Exception, e: |
---|
90 | log.debug( "Deleting column 'dataset_files' from the 'sample' table failed: %s" % ( str( e ) ) ) |
---|
91 | |
---|
92 | |
---|
93 | def downgrade(): |
---|
94 | pass |
---|