1 | """ |
---|
2 | Universe configuration builder. |
---|
3 | """ |
---|
4 | |
---|
5 | import sys, os |
---|
6 | import logging, logging.config |
---|
7 | from optparse import OptionParser |
---|
8 | import ConfigParser |
---|
9 | from galaxy.util import string_as_bool |
---|
10 | |
---|
11 | from galaxy import eggs |
---|
12 | import pkg_resources |
---|
13 | |
---|
14 | log = logging.getLogger( __name__ ) |
---|
15 | |
---|
16 | def resolve_path( path, root ): |
---|
17 | """If 'path' is relative make absolute by prepending 'root'""" |
---|
18 | if not( os.path.isabs( path ) ): |
---|
19 | path = os.path.join( root, path ) |
---|
20 | return path |
---|
21 | |
---|
22 | class ConfigurationError( Exception ): |
---|
23 | pass |
---|
24 | |
---|
25 | class Configuration( object ): |
---|
26 | deprecated_options = ( 'database_file', ) |
---|
27 | def __init__( self, **kwargs ): |
---|
28 | self.config_dict = kwargs |
---|
29 | self.root = kwargs.get( 'root_dir', '.' ) |
---|
30 | # Collect the umask and primary gid from the environment |
---|
31 | self.umask = os.umask( 077 ) # get the current umask |
---|
32 | os.umask( self.umask ) # can't get w/o set, so set it back |
---|
33 | self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster |
---|
34 | # Database related configuration |
---|
35 | self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root ) |
---|
36 | self.database_connection = kwargs.get( "database_connection", False ) |
---|
37 | self.database_engine_options = get_database_engine_options( kwargs ) |
---|
38 | self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) ) |
---|
39 | self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) ) |
---|
40 | # Where dataset files are stored |
---|
41 | self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root ) |
---|
42 | self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root ) |
---|
43 | self.cookie_path = kwargs.get( "cookie_path", "/" ) |
---|
44 | # web API |
---|
45 | self.enable_api = string_as_bool( kwargs.get( 'enable_api', False ) ) |
---|
46 | # dataset Track files |
---|
47 | self.track_store_path = kwargs.get( "track_store_path", "${extra_files_path}/tracks") |
---|
48 | self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root ) |
---|
49 | self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() ) |
---|
50 | self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root ) |
---|
51 | self.tool_config = resolve_path( kwargs.get( 'tool_config_file', 'tool_conf.xml' ), self.root ) |
---|
52 | self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root ) |
---|
53 | self.tool_secret = kwargs.get( "tool_secret", "" ) |
---|
54 | self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" ) |
---|
55 | self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) ) |
---|
56 | self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) ) |
---|
57 | self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) ) |
---|
58 | self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None ) |
---|
59 | self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None ) |
---|
60 | self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) ) |
---|
61 | self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) ) |
---|
62 | self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) ) |
---|
63 | self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) ) |
---|
64 | self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root ) |
---|
65 | self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root ) |
---|
66 | self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) ) |
---|
67 | self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) ) |
---|
68 | self.job_queue_cleanup_interval = int( kwargs.get("job_queue_cleanup_interval", "5") ) |
---|
69 | self.cluster_files_directory = os.path.abspath( kwargs.get( "cluster_files_directory", "database/pbs" ) ) |
---|
70 | self.job_working_directory = resolve_path( kwargs.get( "job_working_directory", "database/job_working_directory" ), self.root ) |
---|
71 | self.outputs_to_working_directory = string_as_bool( kwargs.get( 'outputs_to_working_directory', False ) ) |
---|
72 | self.output_size_limit = int( kwargs.get( 'output_size_limit', 0 ) ) |
---|
73 | self.job_walltime = kwargs.get( 'job_walltime', None ) |
---|
74 | self.admin_users = kwargs.get( "admin_users", "" ) |
---|
75 | self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join@bx.psu.edu") |
---|
76 | self.error_email_to = kwargs.get( 'error_email_to', None ) |
---|
77 | self.smtp_server = kwargs.get( 'smtp_server', None ) |
---|
78 | self.start_job_runners = kwargs.get( 'start_job_runners', None ) |
---|
79 | self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' ) |
---|
80 | self.pbs_application_server = kwargs.get('pbs_application_server', "" ) |
---|
81 | self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" ) |
---|
82 | self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "" ) |
---|
83 | self.pbs_stage_path = kwargs.get('pbs_stage_path', "" ) |
---|
84 | self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) ) |
---|
85 | self.use_memdump = string_as_bool( kwargs.get( 'use_memdump', 'False' ) ) |
---|
86 | self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) ) |
---|
87 | self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) ) |
---|
88 | self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",") |
---|
89 | self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "wormbase,tair,modencode_worm,modencode_fly" ).lower().split(",") |
---|
90 | self.genetrack_display_sites = kwargs.get( 'genetrack_display_sites', "main,test" ).lower().split(",") |
---|
91 | self.brand = kwargs.get( 'brand', None ) |
---|
92 | self.wiki_url = kwargs.get( 'wiki_url', 'http://g2.trac.bx.psu.edu/' ) |
---|
93 | self.bugs_email = kwargs.get( 'bugs_email', None ) |
---|
94 | self.blog_url = kwargs.get( 'blog_url', None ) |
---|
95 | self.screencasts_url = kwargs.get( 'screencasts_url', None ) |
---|
96 | self.library_import_dir = kwargs.get( 'library_import_dir', None ) |
---|
97 | if self.library_import_dir is not None and not os.path.exists( self.library_import_dir ): |
---|
98 | raise ConfigurationError( "library_import_dir specified in config (%s) does not exist" % self.library_import_dir ) |
---|
99 | self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None ) |
---|
100 | if self.user_library_import_dir is not None and not os.path.exists( self.user_library_import_dir ): |
---|
101 | raise ConfigurationError( "user_library_import_dir specified in config (%s) does not exist" % self.user_library_import_dir ) |
---|
102 | self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False ) |
---|
103 | self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' ) |
---|
104 | # Location for dependencies |
---|
105 | if 'tool_dependency_dir' in kwargs: |
---|
106 | self.tool_dependency_dir = resolve_path( kwargs.get( "tool_dependency_dir" ), self.root ) |
---|
107 | self.use_tool_dependencies = True |
---|
108 | else: |
---|
109 | self.tool_dependency_dir = None |
---|
110 | self.use_tool_dependencies = False |
---|
111 | # Configuration options for taking advantage of nginx features |
---|
112 | self.upstream_gzip = string_as_bool( kwargs.get( 'upstream_gzip', False ) ) |
---|
113 | self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False ) |
---|
114 | self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False ) |
---|
115 | self.nginx_x_archive_files_base = kwargs.get( 'nginx_x_archive_files_base', False ) |
---|
116 | self.nginx_upload_store = kwargs.get( 'nginx_upload_store', False ) |
---|
117 | self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False ) |
---|
118 | if self.nginx_upload_store: |
---|
119 | self.nginx_upload_store = os.path.abspath( self.nginx_upload_store ) |
---|
120 | # Parse global_conf and save the parser |
---|
121 | global_conf = kwargs.get( 'global_conf', None ) |
---|
122 | global_conf_parser = ConfigParser.ConfigParser() |
---|
123 | self.global_conf_parser = global_conf_parser |
---|
124 | if global_conf and "__file__" in global_conf: |
---|
125 | global_conf_parser.read(global_conf['__file__']) |
---|
126 | # Heartbeat log file name override |
---|
127 | if global_conf is not None: |
---|
128 | self.heartbeat_log = global_conf.get( 'heartbeat_log', 'heartbeat.log' ) |
---|
129 | #Store per-tool runner config |
---|
130 | try: |
---|
131 | self.tool_runners = global_conf_parser.items("galaxy:tool_runners") |
---|
132 | except ConfigParser.NoSectionError: |
---|
133 | self.tool_runners = [] |
---|
134 | self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' ) |
---|
135 | # Cloud configuration options |
---|
136 | self.cloud_controller_instance = string_as_bool( kwargs.get( 'cloud_controller_instance', 'False' ) ) |
---|
137 | if self.cloud_controller_instance == True: |
---|
138 | self.enable_cloud_execution = string_as_bool( kwargs.get( 'enable_cloud_execution', 'True' ) ) |
---|
139 | else: |
---|
140 | self.enable_cloud_execution = string_as_bool( kwargs.get( 'enable_cloud_execution', 'False' ) ) |
---|
141 | # Galaxy messaging (AMQP) configuration options |
---|
142 | self.amqp = {} |
---|
143 | try: |
---|
144 | amqp_config = global_conf_parser.items("galaxy_amqp") |
---|
145 | except ConfigParser.NoSectionError: |
---|
146 | amqp_config = {} |
---|
147 | for k, v in amqp_config: |
---|
148 | self.amqp[k] = v |
---|
149 | def get( self, key, default ): |
---|
150 | return self.config_dict.get( key, default ) |
---|
151 | def get_bool( self, key, default ): |
---|
152 | if key in self.config_dict: |
---|
153 | return string_as_bool( self.config_dict[key] ) |
---|
154 | else: |
---|
155 | return default |
---|
156 | def check( self ): |
---|
157 | # Check that required directories exist |
---|
158 | for path in self.root, self.file_path, self.tool_path, self.tool_data_path, self.template_path, self.job_working_directory, self.cluster_files_directory: |
---|
159 | if not os.path.isdir( path ): |
---|
160 | raise ConfigurationError("Directory does not exist: %s" % path ) |
---|
161 | # Check that required files exist |
---|
162 | for path in self.tool_config, self.datatypes_config: |
---|
163 | if not os.path.isfile(path): |
---|
164 | raise ConfigurationError("File not found: %s" % path ) |
---|
165 | # Check job runners so the admin can scramble dependent egg. |
---|
166 | if self.start_job_runners is not None: |
---|
167 | runner_to_egg = dict( pbs = 'pbs_python', sge = 'DRMAA_python', drmaa = 'drmaa' ) |
---|
168 | for runner in self.start_job_runners.split( ',' ): |
---|
169 | try: |
---|
170 | pkg_resources.require( runner_to_egg[runner] ) |
---|
171 | except eggs.EggNotFetchable, e: |
---|
172 | raise eggs.EggNotFetchable( 'You must scramble the %s egg to use the %s job runner. Instructions are available at:\n http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster' % ( runner_to_egg[runner], runner ) ) |
---|
173 | except KeyError: |
---|
174 | raise Exception( 'No such job runner: %s. Please double-check the value of start_job_runners in universe_wsgi.ini' % runner ) |
---|
175 | # Check for deprecated options. |
---|
176 | for key in self.config_dict.keys(): |
---|
177 | if key in self.deprecated_options: |
---|
178 | log.warning( "Config option '%s' is deprecated and will be removed in a future release. Please consult the latest version of the sample configuration file." % key ) |
---|
179 | |
---|
180 | def is_admin_user( self,user ): |
---|
181 | """ |
---|
182 | Determine if the provided user is listed in `admin_users`. |
---|
183 | |
---|
184 | NOTE: This is temporary, admin users will likely be specified in the |
---|
185 | database in the future. |
---|
186 | """ |
---|
187 | admin_users = self.get( "admin_users", "" ).split( "," ) |
---|
188 | return ( user is not None and user.email in admin_users ) |
---|
189 | |
---|
190 | def get_database_engine_options( kwargs ): |
---|
191 | """ |
---|
192 | Allow options for the SQLAlchemy database engine to be passed by using |
---|
193 | the prefix "database_engine_option_". |
---|
194 | """ |
---|
195 | conversions = { |
---|
196 | 'convert_unicode': string_as_bool, |
---|
197 | 'pool_timeout': int, |
---|
198 | 'echo': string_as_bool, |
---|
199 | 'echo_pool': string_as_bool, |
---|
200 | 'pool_recycle': int, |
---|
201 | 'pool_size': int, |
---|
202 | 'max_overflow': int, |
---|
203 | 'pool_threadlocal': string_as_bool, |
---|
204 | 'server_side_cursors': string_as_bool |
---|
205 | } |
---|
206 | prefix = "database_engine_option_" |
---|
207 | prefix_len = len( prefix ) |
---|
208 | rval = {} |
---|
209 | for key, value in kwargs.iteritems(): |
---|
210 | if key.startswith( prefix ): |
---|
211 | key = key[prefix_len:] |
---|
212 | if key in conversions: |
---|
213 | value = conversions[key](value) |
---|
214 | rval[ key ] = value |
---|
215 | return rval |
---|
216 | |
---|
217 | def configure_logging( config ): |
---|
218 | """ |
---|
219 | Allow some basic logging configuration to be read from the cherrpy |
---|
220 | config. |
---|
221 | """ |
---|
222 | # PasteScript will have already configured the logger if the appropriate |
---|
223 | # sections were found in the config file, so we do nothing if the |
---|
224 | # config has a loggers section, otherwise we do some simple setup |
---|
225 | # using the 'log_*' values from the config. |
---|
226 | if config.global_conf_parser.has_section( "loggers" ): |
---|
227 | return |
---|
228 | format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" ) |
---|
229 | level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ] |
---|
230 | destination = config.get( "log_destination", "stdout" ) |
---|
231 | log.info( "Logging at '%s' level to '%s'" % ( level, destination ) ) |
---|
232 | # Get root logger |
---|
233 | root = logging.getLogger() |
---|
234 | # Set level |
---|
235 | root.setLevel( level ) |
---|
236 | # Turn down paste httpserver logging |
---|
237 | if level <= logging.DEBUG: |
---|
238 | logging.getLogger( "paste.httpserver.ThreadPool" ).setLevel( logging.WARN ) |
---|
239 | # Remove old handlers |
---|
240 | for h in root.handlers[:]: |
---|
241 | root.removeHandler(h) |
---|
242 | # Create handler |
---|
243 | if destination == "stdout": |
---|
244 | handler = logging.StreamHandler( sys.stdout ) |
---|
245 | else: |
---|
246 | handler = logging.FileHandler( destination ) |
---|
247 | # Create formatter |
---|
248 | formatter = logging.Formatter( format ) |
---|
249 | # Hook everything up |
---|
250 | handler.setFormatter( formatter ) |
---|
251 | root.addHandler( handler ) |
---|