1 | import ConfigParser |
---|
2 | import sys, os |
---|
3 | import array |
---|
4 | import time |
---|
5 | import optparse,array |
---|
6 | import shutil, traceback |
---|
7 | import urllib,urllib2, cookielib |
---|
8 | |
---|
9 | assert sys.version_info[:2] >= ( 2, 4 ) |
---|
10 | new_path = [ os.path.join( os.getcwd(), "lib" ) ] |
---|
11 | new_path.extend( sys.path[1:] ) # remove scripts/ from the path |
---|
12 | sys.path = new_path |
---|
13 | |
---|
14 | from galaxy import eggs |
---|
15 | import pkg_resources |
---|
16 | |
---|
17 | import pkg_resources |
---|
18 | pkg_resources.require( "pycrypto" ) |
---|
19 | |
---|
20 | from Crypto.Cipher import Blowfish |
---|
21 | from Crypto.Util.randpool import RandomPool |
---|
22 | from Crypto.Util import number |
---|
23 | |
---|
24 | |
---|
25 | class GalaxyWebInterface(object): |
---|
26 | def __init__(self, server_host, server_port, datatx_email, datatx_password, config_id_secret): |
---|
27 | self.server_host = server_host |
---|
28 | self.server_port = server_port |
---|
29 | self.datatx_email = datatx_email |
---|
30 | self.datatx_password = datatx_password |
---|
31 | self.config_id_secret = config_id_secret |
---|
32 | # create url |
---|
33 | self.base_url = "http://%s:%s" % (self.server_host, self.server_port) |
---|
34 | # login |
---|
35 | url = "%s/user/login?email=%s&password=%s&login_button=Login" % (self.base_url, self.datatx_email, self.datatx_password) |
---|
36 | cj = cookielib.CookieJar() |
---|
37 | self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) |
---|
38 | #print url |
---|
39 | f = self.opener.open(url) |
---|
40 | if f.read().find("ogged in as "+self.datatx_email) == -1: |
---|
41 | # if the user doesnt exist, create the user |
---|
42 | url = "%s/user/create?email=%s&username=%s&password=%s&confirm=%s&create_user_button=Submit" % ( self.base_url, self.datatx_email, self.datatx_email, self.datatx_password, self.datatx_password ) |
---|
43 | f = self.opener.open(url) |
---|
44 | if f.read().find("ogged in as "+self.datatx_email) == -1: |
---|
45 | raise Exception("The "+self.datatx_email+" user could not login to Galaxy") |
---|
46 | |
---|
47 | def add_to_library(self, server_dir, library_id, folder_id, dbkey=''): |
---|
48 | ''' |
---|
49 | This method adds the dataset file to the target data library & folder |
---|
50 | by opening the corresponding url in Galaxy server running. |
---|
51 | ''' |
---|
52 | params = urllib.urlencode(dict( cntrller='library_admin', |
---|
53 | tool_id='upload1', |
---|
54 | tool_state='None', |
---|
55 | library_id=self.encode_id(library_id), |
---|
56 | folder_id=self.encode_id(folder_id), |
---|
57 | upload_option='upload_directory', |
---|
58 | file_type='auto', |
---|
59 | server_dir=os.path.basename(server_dir), |
---|
60 | dbkey=dbkey, |
---|
61 | show_dataset_id='True', |
---|
62 | runtool_btn='Upload to library')) |
---|
63 | url = self.base_url+"/library_common/upload_library_dataset" |
---|
64 | print url |
---|
65 | print params |
---|
66 | try: |
---|
67 | f = self.opener.open(url, params) |
---|
68 | if f.read().find("Data Library") == -1: |
---|
69 | raise Exception("Dataset could not be uploaded to the data library. URL: %s, PARAMS=%s" % (url, params)) |
---|
70 | except: |
---|
71 | return 'ERROR', url, params |
---|
72 | |
---|
73 | def import_to_history(self, ldda_id, library_id, folder_id): |
---|
74 | params = urllib.urlencode(dict( cntrller='library_admin', |
---|
75 | show_deleted='False', |
---|
76 | library_id=self.encode_id(library_id), |
---|
77 | folder_id=self.encode_id(folder_id), |
---|
78 | ldda_ids=self.encode_id(ldda_id), |
---|
79 | do_action='import_to_history', |
---|
80 | use_panels='False')) |
---|
81 | #url = "http://lion.bx.psu.edu:8080/library_common/act_on_multiple_datasets?library_id=adb5f5c93f827949&show_deleted=False&ldda_ids=adb5f5c93f827949&cntrller=library_admin&do_action=import_to_history&use_panels=False" |
---|
82 | url = self.base_url+"/library_common/act_on_multiple_datasets" |
---|
83 | f = self.opener.open(url, params) |
---|
84 | x = f.read() |
---|
85 | if x.find("1 dataset(s) have been imported into your history.") == -1: |
---|
86 | raise Exception("Dataset could not be imported into history") |
---|
87 | |
---|
88 | def run_workflow(self, workflow_id, hid, workflow_step): |
---|
89 | input = str(workflow_step)+'|input' |
---|
90 | params = urllib.urlencode({'id':self.encode_id(workflow_id), |
---|
91 | 'run_workflow': 'Run workflow', |
---|
92 | input: hid}) |
---|
93 | url = self.base_url+"/workflow/run" |
---|
94 | f = self.opener.open(url, params) |
---|
95 | |
---|
96 | def logout(self): |
---|
97 | # finally logout |
---|
98 | f = self.opener.open(self.base_url+'/user/logout') |
---|
99 | |
---|
100 | def encode_id(self, obj_id ): |
---|
101 | id_cipher = Blowfish.new( self.config_id_secret ) |
---|
102 | # Convert to string |
---|
103 | s = str( obj_id ) |
---|
104 | # Pad to a multiple of 8 with leading "!" |
---|
105 | s = ( "!" * ( 8 - len(s) % 8 ) ) + s |
---|
106 | # Encrypt |
---|
107 | return id_cipher.encrypt( s ).encode( 'hex' ) |
---|
108 | |
---|
109 | def update_request_state(self, request_id): |
---|
110 | params = urllib.urlencode(dict( cntrller='requests_admin', |
---|
111 | request_id=request_id)) |
---|
112 | url = self.base_url + "/requests_common/update_request_state" |
---|
113 | f = self.opener.open(url, params) |
---|
114 | print url |
---|
115 | print params |
---|
116 | x = f.read() |
---|
117 | |
---|
118 | |
---|
119 | |
---|
120 | |
---|
121 | |
---|
122 | |
---|