Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/planemo/galaxy/config.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
| author | shellac |
|---|---|
| date | Sat, 02 May 2020 07:14:21 -0400 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:26e78fe6e8c4 |
|---|---|
| 1 """Abstractions for setting up a Galaxy instance.""" | |
| 2 from __future__ import absolute_import | |
| 3 from __future__ import print_function | |
| 4 | |
| 5 import abc | |
| 6 import contextlib | |
| 7 import os | |
| 8 import random | |
| 9 import shutil | |
| 10 from string import Template | |
| 11 from tempfile import mkdtemp | |
| 12 | |
| 13 from galaxy.containers.docker_model import DockerVolume | |
| 14 from galaxy.tool_util.deps import docker_util | |
| 15 from galaxy.tool_util.deps.commands import argv_to_str | |
| 16 from pkg_resources import parse_version | |
| 17 from six import ( | |
| 18 add_metaclass, | |
| 19 iteritems | |
| 20 ) | |
| 21 from six.moves import shlex_quote | |
| 22 | |
| 23 from planemo import git | |
| 24 from planemo.config import OptionSource | |
| 25 from planemo.deps import ensure_dependency_resolvers_conf_configured | |
| 26 from planemo.docker import docker_host_args | |
| 27 from planemo.io import ( | |
| 28 communicate, | |
| 29 kill_pid_file, | |
| 30 shell, | |
| 31 shell_join, | |
| 32 untar_to, | |
| 33 wait_on, | |
| 34 warn, | |
| 35 write_file, | |
| 36 ) | |
| 37 from planemo.mulled import build_involucro_context | |
| 38 from planemo.shed import tool_shed_url | |
| 39 from planemo.virtualenv import DEFAULT_PYTHON_VERSION | |
| 40 from .api import ( | |
| 41 DEFAULT_MASTER_API_KEY, | |
| 42 gi, | |
| 43 user_api_key, | |
| 44 ) | |
| 45 from .distro_tools import ( | |
| 46 DISTRO_TOOLS_ID_TO_PATH | |
| 47 ) | |
| 48 from .run import ( | |
| 49 setup_common_startup_args, | |
| 50 setup_venv, | |
| 51 ) | |
| 52 from .workflows import ( | |
| 53 find_tool_ids, | |
| 54 import_workflow, | |
| 55 install_shed_repos, | |
| 56 ) | |
| 57 | |
| 58 | |
| 59 NO_TEST_DATA_MESSAGE = ( | |
| 60 "planemo couldn't find a target test-data directory, you should likely " | |
| 61 "create a test-data directory or pass an explicit path using --test_data." | |
| 62 ) | |
| 63 | |
| 64 WEB_SERVER_CONFIG_TEMPLATE = """ | |
| 65 [server:${server_name}] | |
| 66 use = egg:Paste#http | |
| 67 port = ${port} | |
| 68 host = ${host} | |
| 69 use_threadpool = True | |
| 70 threadpool_kill_thread_limit = 10800 | |
| 71 [app:main] | |
| 72 paste.app_factory = galaxy.web.buildapp:app_factory | |
| 73 """ | |
| 74 | |
| 75 TOOL_CONF_TEMPLATE = """<toolbox> | |
| 76 <tool file="data_source/upload.xml" /> | |
| 77 ${tool_definition} | |
| 78 </toolbox> | |
| 79 """ | |
| 80 | |
| 81 SHED_TOOL_CONF_TEMPLATE = """<?xml version="1.0"?> | |
| 82 <toolbox tool_path="${shed_tool_path}"> | |
| 83 </toolbox> | |
| 84 """ | |
| 85 | |
| 86 SHED_DATA_MANAGER_CONF_TEMPLATE = """<?xml version="1.0"?> | |
| 87 <data_managers> | |
| 88 </data_managers> | |
| 89 """ | |
| 90 | |
| 91 EMPTY_JOB_METRICS_TEMPLATE = """<?xml version="1.0"?> | |
| 92 <job_metrics> | |
| 93 </job_metrics> | |
| 94 """ | |
| 95 | |
| 96 TOOL_SHEDS_CONF = """<tool_sheds> | |
| 97 <tool_shed name="Target Shed" url="${shed_target_url}" /> | |
| 98 </tool_sheds> | |
| 99 """ | |
| 100 | |
| 101 JOB_CONFIG_LOCAL = """<job_conf> | |
| 102 <plugins> | |
| 103 <plugin id="planemo_runner" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/> | |
| 104 </plugins> | |
| 105 <handlers> | |
| 106 <handler id="main"/> | |
| 107 </handlers> | |
| 108 <destinations default="planemo_dest"> | |
| 109 <destination id="planemo_dest" runner="planemo_runner"> | |
| 110 <param id="require_container">${require_container}</param> | |
| 111 <param id="docker_enabled">${docker_enable}</param> | |
| 112 <param id="docker_sudo">${docker_sudo}</param> | |
| 113 <param id="docker_sudo_cmd">${docker_sudo_cmd}</param> | |
| 114 <param id="docker_cmd">${docker_cmd}</param> | |
| 115 ${docker_host_param} | |
| 116 </destination> | |
| 117 <destination id="upload_dest" runner="planemo_runner"> | |
| 118 <param id="docker_enable">false</param> | |
| 119 </destination> | |
| 120 </destinations> | |
| 121 <tools> | |
| 122 <tool id="upload1" destination="upload_dest" /> | |
| 123 </tools> | |
| 124 </job_conf> | |
| 125 """ | |
| 126 | |
| 127 LOGGING_TEMPLATE = """ | |
| 128 ## Configure Python loggers. | |
| 129 [loggers] | |
| 130 keys = root,paste,displayapperrors,galaxydeps,galaxymasterapikey,galaxy | |
| 131 | |
| 132 [handlers] | |
| 133 keys = console | |
| 134 | |
| 135 [formatters] | |
| 136 keys = generic | |
| 137 | |
| 138 [logger_root] | |
| 139 level = WARN | |
| 140 handlers = console | |
| 141 | |
| 142 [logger_paste] | |
| 143 level = WARN | |
| 144 handlers = console | |
| 145 qualname = paste | |
| 146 propagate = 0 | |
| 147 | |
| 148 [logger_galaxydeps] | |
| 149 level = DEBUG | |
| 150 handlers = console | |
| 151 qualname = galaxy.tools.deps | |
| 152 propagate = 0 | |
| 153 | |
| 154 [logger_galaxymasterapikey] | |
| 155 level = WARN | |
| 156 handlers = console | |
| 157 qualname = galaxy.web.framework.webapp | |
| 158 propagate = 0 | |
| 159 | |
| 160 [logger_displayapperrors] | |
| 161 level = ERROR | |
| 162 handlers = | |
| 163 qualname = galaxy.datatypes.display_applications.application | |
| 164 propagate = 0 | |
| 165 | |
| 166 [logger_galaxy] | |
| 167 level = ${log_level} | |
| 168 handlers = console | |
| 169 qualname = galaxy | |
| 170 propagate = 0 | |
| 171 | |
| 172 [handler_console] | |
| 173 class = StreamHandler | |
| 174 args = (sys.stderr,) | |
| 175 level = DEBUG | |
| 176 formatter = generic | |
| 177 | |
| 178 [formatter_generic] | |
| 179 format = %(asctime)s %(levelname)-5.5s [%(name)s] %(message)s | |
| 180 """ | |
| 181 | |
| 182 | |
| 183 EMPTY_TOOL_CONF_TEMPLATE = """<toolbox></toolbox>""" | |
| 184 | |
| 185 DEFAULT_GALAXY_BRANCH = "master" | |
| 186 DEFAULT_GALAXY_SOURCE = "https://github.com/galaxyproject/galaxy" | |
| 187 CWL_GALAXY_SOURCE = "https://github.com/common-workflow-language/galaxy" | |
| 188 | |
| 189 DATABASE_LOCATION_TEMPLATE = "sqlite:///%s?isolation_level=IMMEDIATE" | |
| 190 | |
| 191 COMMAND_STARTUP_COMMAND = "./scripts/common_startup.sh ${COMMON_STARTUP_ARGS}" | |
| 192 | |
| 193 CLEANUP_IGNORE_ERRORS = True | |
| 194 DEFAULT_GALAXY_BRAND = 'Configured by Planemo' | |
| 195 | |
| 196 | |
| 197 @contextlib.contextmanager | |
| 198 def galaxy_config(ctx, runnables, **kwds): | |
| 199 """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" | |
| 200 c = local_galaxy_config | |
| 201 if kwds.get("dockerize", False): | |
| 202 c = docker_galaxy_config | |
| 203 elif kwds.get("external", False): | |
| 204 c = external_galaxy_config | |
| 205 | |
| 206 with c(ctx, runnables, **kwds) as config: | |
| 207 yield config | |
| 208 | |
| 209 | |
| 210 def simple_docker_volume(path): | |
| 211 path = os.path.abspath(path) | |
| 212 return DockerVolume("%s:%s:rw" % (path, path)) | |
| 213 | |
| 214 | |
| 215 @contextlib.contextmanager | |
| 216 def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds): | |
| 217 """Set up a ``GalaxyConfig`` for Docker container.""" | |
| 218 test_data_dir = _find_test_data(runnables, **kwds) | |
| 219 | |
| 220 with _config_directory(ctx, **kwds) as config_directory: | |
| 221 def config_join(*args): | |
| 222 return os.path.join(config_directory, *args) | |
| 223 | |
| 224 ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) | |
| 225 _handle_job_metrics(config_directory, kwds) | |
| 226 | |
| 227 shed_tool_conf = "config/shed_tool_conf.xml" | |
| 228 all_tool_paths = _all_tool_paths(runnables, **kwds) | |
| 229 | |
| 230 tool_directories = set([]) # Things to mount... | |
| 231 for tool_path in all_tool_paths: | |
| 232 directory = os.path.dirname(os.path.normpath(tool_path)) | |
| 233 if os.path.exists(directory): | |
| 234 tool_directories.add(directory) | |
| 235 | |
| 236 # TODO: remap these. | |
| 237 tool_volumes = [] | |
| 238 for tool_directory in tool_directories: | |
| 239 volume = simple_docker_volume(tool_directory) | |
| 240 tool_volumes.append(volume) | |
| 241 | |
| 242 empty_tool_conf = config_join("empty_tool_conf.xml") | |
| 243 | |
| 244 tool_conf = config_join("tool_conf.xml") | |
| 245 | |
| 246 shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") | |
| 247 _ensure_directory(shed_tool_path) | |
| 248 | |
| 249 sheds_config_path = _configure_sheds_config_file( | |
| 250 ctx, config_directory, **kwds | |
| 251 ) | |
| 252 port = _get_port(kwds) | |
| 253 properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) | |
| 254 _handle_container_resolution(ctx, kwds, properties) | |
| 255 master_api_key = _get_master_api_key(kwds) | |
| 256 | |
| 257 template_args = dict( | |
| 258 shed_tool_path=shed_tool_path, | |
| 259 tool_conf=tool_conf, | |
| 260 ) | |
| 261 tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) | |
| 262 | |
| 263 _write_tool_conf(ctx, all_tool_paths, tool_conf) | |
| 264 write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) | |
| 265 | |
| 266 properties.update(dict( | |
| 267 tool_config_file=tool_config_file, | |
| 268 tool_sheds_config_file=sheds_config_path, | |
| 269 migrated_tools_config=empty_tool_conf, | |
| 270 )) | |
| 271 | |
| 272 server_name = "planemo%d" % random.randint(0, 100000) | |
| 273 | |
| 274 # Value substitutions in Galaxy properties - for consistency with | |
| 275 # non-Dockerized version. | |
| 276 template_args = dict( | |
| 277 ) | |
| 278 env = _build_env_for_galaxy(properties, template_args) | |
| 279 env["NONUSE"] = "nodejs,proftp,reports" | |
| 280 if ctx.verbose: | |
| 281 env["GALAXY_LOGGING"] = "full" | |
| 282 | |
| 283 # TODO: setup FTP upload dir and disable FTP server in container. | |
| 284 _build_test_env(properties, env) | |
| 285 | |
| 286 docker_target_kwds = docker_host_args(**kwds) | |
| 287 volumes = tool_volumes + [simple_docker_volume(config_directory)] | |
| 288 export_directory = kwds.get("export_directory", None) | |
| 289 if export_directory is not None: | |
| 290 volumes.append(DockerVolume("%s:/export:rw" % export_directory)) | |
| 291 | |
| 292 # TODO: Allow this to real Docker volumes and allow multiple. | |
| 293 extra_volume = kwds.get("docker_extra_volume") | |
| 294 if extra_volume: | |
| 295 volumes.append(simple_docker_volume(extra_volume)) | |
| 296 yield DockerGalaxyConfig( | |
| 297 ctx, | |
| 298 config_directory, | |
| 299 env, | |
| 300 test_data_dir, | |
| 301 port, | |
| 302 server_name, | |
| 303 master_api_key, | |
| 304 runnables, | |
| 305 docker_target_kwds=docker_target_kwds, | |
| 306 volumes=volumes, | |
| 307 export_directory=export_directory, | |
| 308 kwds=kwds, | |
| 309 ) | |
| 310 | |
| 311 | |
| 312 @contextlib.contextmanager | |
| 313 def local_galaxy_config(ctx, runnables, for_tests=False, **kwds): | |
| 314 """Set up a ``GalaxyConfig`` in an auto-cleaned context.""" | |
| 315 test_data_dir = _find_test_data(runnables, **kwds) | |
| 316 tool_data_table = _find_tool_data_table( | |
| 317 runnables, | |
| 318 test_data_dir=test_data_dir, | |
| 319 **kwds | |
| 320 ) | |
| 321 data_manager_config_paths = [r.data_manager_conf_path for r in runnables if r.data_manager_conf_path] | |
| 322 galaxy_root = _find_galaxy_root(ctx, **kwds) | |
| 323 install_galaxy = kwds.get("install_galaxy", False) | |
| 324 if galaxy_root is not None: | |
| 325 if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root): | |
| 326 os.rmdir(galaxy_root) | |
| 327 if os.path.isdir(galaxy_root) and install_galaxy: | |
| 328 raise Exception("%s is an existing non-empty directory, cannot install Galaxy again" % galaxy_root) | |
| 329 | |
| 330 # Duplicate block in docker variant above. | |
| 331 if kwds.get("mulled_containers", False) and not kwds.get("docker", False): | |
| 332 if ctx.get_option_source("docker") != OptionSource.cli: | |
| 333 kwds["docker"] = True | |
| 334 else: | |
| 335 raise Exception("Specified no docker and mulled containers together.") | |
| 336 | |
| 337 with _config_directory(ctx, **kwds) as config_directory: | |
| 338 def config_join(*args): | |
| 339 return os.path.join(config_directory, *args) | |
| 340 | |
| 341 install_env = {} | |
| 342 if kwds.get('galaxy_skip_client_build', True): | |
| 343 install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1' | |
| 344 if galaxy_root is None: | |
| 345 galaxy_root = config_join("galaxy-dev") | |
| 346 if not os.path.isdir(galaxy_root): | |
| 347 _build_eggs_cache(ctx, install_env, kwds) | |
| 348 _install_galaxy(ctx, galaxy_root, install_env, kwds) | |
| 349 | |
| 350 if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'): | |
| 351 # on python 3 we use gunicorn, | |
| 352 # which requires 'main' as server name | |
| 353 server_name = 'main' | |
| 354 else: | |
| 355 server_name = "planemo%d" % random.randint(0, 100000) | |
| 356 # Once we don't have to support earlier than 18.01 - try putting these files | |
| 357 # somewhere better than with Galaxy. | |
| 358 log_file = "%s.log" % server_name | |
| 359 pid_file = "%s.pid" % server_name | |
| 360 ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml")) | |
| 361 _handle_job_config_file(config_directory, server_name, kwds) | |
| 362 _handle_job_metrics(config_directory, kwds) | |
| 363 file_path = kwds.get("file_path") or config_join("files") | |
| 364 _ensure_directory(file_path) | |
| 365 | |
| 366 tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps") | |
| 367 _ensure_directory(tool_dependency_dir) | |
| 368 | |
| 369 shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml") | |
| 370 all_tool_paths = _all_tool_paths(runnables, **kwds) | |
| 371 empty_tool_conf = config_join("empty_tool_conf.xml") | |
| 372 | |
| 373 tool_conf = config_join("tool_conf.xml") | |
| 374 | |
| 375 shed_data_manager_config_file = config_join("shed_data_manager_conf.xml") | |
| 376 | |
| 377 shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools") | |
| 378 _ensure_directory(shed_tool_path) | |
| 379 | |
| 380 sheds_config_path = _configure_sheds_config_file( | |
| 381 ctx, config_directory, **kwds | |
| 382 ) | |
| 383 | |
| 384 database_location = config_join("galaxy.sqlite") | |
| 385 master_api_key = _get_master_api_key(kwds) | |
| 386 dependency_dir = os.path.join(config_directory, "deps") | |
| 387 _ensure_directory(shed_tool_path) | |
| 388 port = _get_port(kwds) | |
| 389 template_args = dict( | |
| 390 port=port, | |
| 391 host=kwds.get("host", "127.0.0.1"), | |
| 392 server_name=server_name, | |
| 393 temp_directory=config_directory, | |
| 394 shed_tool_path=shed_tool_path, | |
| 395 database_location=database_location, | |
| 396 tool_conf=tool_conf, | |
| 397 debug=kwds.get("debug", "true"), | |
| 398 id_secret=kwds.get("id_secret", "test_secret"), | |
| 399 log_level="DEBUG" if ctx.verbose else "INFO", | |
| 400 ) | |
| 401 tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf) | |
| 402 # Setup both galaxy_email and older test user test@bx.psu.edu | |
| 403 # as admins for command_line, etc... | |
| 404 properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests) | |
| 405 properties.update(dict( | |
| 406 server_name="main", | |
| 407 ftp_upload_dir_template="${ftp_upload_dir}", | |
| 408 ftp_upload_purge="False", | |
| 409 ftp_upload_dir=test_data_dir or os.path.abspath('.'), | |
| 410 ftp_upload_site="Test Data", | |
| 411 check_upload_content="False", | |
| 412 tool_dependency_dir=dependency_dir, | |
| 413 file_path=file_path, | |
| 414 new_file_path="${temp_directory}/tmp", | |
| 415 tool_config_file=tool_config_file, | |
| 416 tool_sheds_config_file=sheds_config_path, | |
| 417 manage_dependency_relationships="False", | |
| 418 job_working_directory="${temp_directory}/job_working_directory", | |
| 419 template_cache_path="${temp_directory}/compiled_templates", | |
| 420 citation_cache_type="file", | |
| 421 citation_cache_data_dir="${temp_directory}/citations/data", | |
| 422 citation_cache_lock_dir="${temp_directory}/citations/lock", | |
| 423 database_auto_migrate="True", | |
| 424 enable_beta_tool_formats="True", | |
| 425 id_secret="${id_secret}", | |
| 426 log_level="${log_level}", | |
| 427 debug="${debug}", | |
| 428 watch_tools="auto", | |
| 429 default_job_shell="/bin/bash", # For conda dependency resolution | |
| 430 tool_data_table_config_path=tool_data_table, | |
| 431 data_manager_config_file=",".join(data_manager_config_paths) or None, # without 'or None' may raise IOError in galaxy (see #946) | |
| 432 integrated_tool_panel_config=("${temp_directory}/" | |
| 433 "integrated_tool_panel_conf.xml"), | |
| 434 migrated_tools_config=empty_tool_conf, | |
| 435 test_data_dir=test_data_dir, # TODO: make gx respect this | |
| 436 shed_data_manager_config_file=shed_data_manager_config_file, | |
| 437 )) | |
| 438 _handle_container_resolution(ctx, kwds, properties) | |
| 439 write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args)) | |
| 440 properties["database_connection"] = _database_connection(database_location, **kwds) | |
| 441 | |
| 442 _handle_kwd_overrides(properties, kwds) | |
| 443 | |
| 444 # TODO: consider following property | |
| 445 # watch_tool = False | |
| 446 # datatypes_config_file = config/datatypes_conf.xml | |
| 447 # welcome_url = /static/welcome.html | |
| 448 # logo_url = / | |
| 449 # sanitize_all_html = True | |
| 450 # serve_xss_vulnerable_mimetypes = False | |
| 451 # track_jobs_in_database = None | |
| 452 # outputs_to_working_directory = False | |
| 453 # retry_job_output_collection = 0 | |
| 454 | |
| 455 env = _build_env_for_galaxy(properties, template_args) | |
| 456 env.update(install_env) | |
| 457 _build_test_env(properties, env) | |
| 458 env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf | |
| 459 env['GALAXY_TEST_DBURI'] = properties["database_connection"] | |
| 460 | |
| 461 env["GALAXY_TEST_UPLOAD_ASYNC"] = "false" | |
| 462 env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini") | |
| 463 env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1" | |
| 464 # Following are needed in 18.01 to prevent Galaxy from changing log and pid. | |
| 465 # https://github.com/galaxyproject/planemo/issues/788 | |
| 466 env["GALAXY_LOG"] = log_file | |
| 467 env["GALAXY_PID"] = pid_file | |
| 468 web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args) | |
| 469 write_file(config_join("galaxy.ini"), web_config) | |
| 470 _write_tool_conf(ctx, all_tool_paths, tool_conf) | |
| 471 write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE) | |
| 472 | |
| 473 shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args) | |
| 474 # Write a new shed_tool_conf.xml if needed. | |
| 475 write_file(shed_tool_conf, shed_tool_conf_contents, force=False) | |
| 476 | |
| 477 write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE) | |
| 478 | |
| 479 yield LocalGalaxyConfig( | |
| 480 ctx, | |
| 481 config_directory, | |
| 482 env, | |
| 483 test_data_dir, | |
| 484 port, | |
| 485 server_name, | |
| 486 master_api_key, | |
| 487 runnables, | |
| 488 galaxy_root, | |
| 489 kwds, | |
| 490 ) | |
| 491 | |
| 492 | |
| 493 def _all_tool_paths(runnables, **kwds): | |
| 494 tool_paths = [r.path for r in runnables if r.has_tools and not r.data_manager_conf_path] | |
| 495 all_tool_paths = list(tool_paths) + list(kwds.get("extra_tools", [])) | |
| 496 for runnable in runnables: | |
| 497 if runnable.type.name == "galaxy_workflow": | |
| 498 tool_ids = find_tool_ids(runnable.path) | |
| 499 for tool_id in tool_ids: | |
| 500 if tool_id in DISTRO_TOOLS_ID_TO_PATH: | |
| 501 all_tool_paths.append(DISTRO_TOOLS_ID_TO_PATH[tool_id]) | |
| 502 | |
| 503 return all_tool_paths | |
| 504 | |
| 505 | |
| 506 def _shared_galaxy_properties(config_directory, kwds, for_tests): | |
| 507 """Setup properties useful for local and Docker Galaxy instances. | |
| 508 | |
| 509 Most things related to paths, etc... are very different between Galaxy | |
| 510 modalities and many taken care of internally to the container in that mode. | |
| 511 But this method sets up API stuff, tool, and job stuff that can be shared. | |
| 512 """ | |
| 513 master_api_key = _get_master_api_key(kwds) | |
| 514 user_email = _user_email(kwds) | |
| 515 properties = { | |
| 516 'master_api_key': master_api_key, | |
| 517 'admin_users': "%s,test@bx.psu.edu" % user_email, | |
| 518 'expose_dataset_path': "True", | |
| 519 'cleanup_job': 'never', | |
| 520 'collect_outputs_from': "job_working_directory", | |
| 521 'allow_path_paste': "True", | |
| 522 'check_migrate_tools': "False", | |
| 523 'use_cached_dependency_manager': str(kwds.get("conda_auto_install", False)), | |
| 524 'brand': kwds.get("galaxy_brand", DEFAULT_GALAXY_BRAND), | |
| 525 'strict_cwl_validation': str(not kwds.get("non_strict_cwl", False)), | |
| 526 } | |
| 527 if kwds.get("galaxy_single_user", True): | |
| 528 properties['single_user'] = user_email | |
| 529 | |
| 530 if for_tests: | |
| 531 empty_dir = os.path.join(config_directory, "empty") | |
| 532 _ensure_directory(empty_dir) | |
| 533 properties["tour_config_dir"] = empty_dir | |
| 534 properties["interactive_environment_plugins_directory"] = empty_dir | |
| 535 properties["visualization_plugins_directory"] = empty_dir | |
| 536 return properties | |
| 537 | |
| 538 | |
| 539 @contextlib.contextmanager | |
| 540 def external_galaxy_config(ctx, runnables, for_tests=False, **kwds): | |
| 541 yield BaseGalaxyConfig( | |
| 542 ctx=ctx, | |
| 543 galaxy_url=kwds.get("galaxy_url", None), | |
| 544 master_api_key=_get_master_api_key(kwds), | |
| 545 user_api_key=kwds.get("galaxy_user_key", None), | |
| 546 runnables=runnables, | |
| 547 kwds=kwds | |
| 548 ) | |
| 549 | |
| 550 | |
| 551 def _get_master_api_key(kwds): | |
| 552 master_api_key = kwds.get("galaxy_admin_key") or DEFAULT_MASTER_API_KEY | |
| 553 return master_api_key | |
| 554 | |
| 555 | |
| 556 def _get_port(kwds): | |
| 557 port = int(kwds.get("port", 9090)) | |
| 558 return port | |
| 559 | |
| 560 | |
| 561 def _user_email(kwds): | |
| 562 user_email = kwds.get("galaxy_email") | |
| 563 return user_email | |
| 564 | |
| 565 | |
| 566 @contextlib.contextmanager | |
| 567 def _config_directory(ctx, **kwds): | |
| 568 config_directory = kwds.get("config_directory", None) | |
| 569 created_config_directory = False | |
| 570 if not config_directory: | |
| 571 created_config_directory = True | |
| 572 config_directory = os.path.realpath(mkdtemp()) | |
| 573 ctx.vlog("Created directory for Galaxy configuration [%s]" % config_directory) | |
| 574 try: | |
| 575 yield config_directory | |
| 576 finally: | |
| 577 cleanup = not kwds.get("no_cleanup", False) | |
| 578 if created_config_directory and cleanup: | |
| 579 shutil.rmtree(config_directory) | |
| 580 | |
| 581 | |
| 582 @add_metaclass(abc.ABCMeta) | |
| 583 class GalaxyInterface(object): | |
| 584 """Abstraction around a Galaxy instance. | |
| 585 | |
| 586 Description of a Galaxy instance and how to interact with it - this could | |
| 587 potentially be a remote, already running instance or an instance Planemo manages | |
| 588 to execute some task(s). | |
| 589 """ | |
| 590 | |
| 591 @abc.abstractproperty | |
| 592 def gi(self): | |
| 593 """Return an admin bioblend Galaxy instance for API interactions.""" | |
| 594 | |
| 595 @abc.abstractproperty | |
| 596 def user_gi(self): | |
| 597 """Return a user-backed bioblend Galaxy instance for API interactions.""" | |
| 598 | |
| 599 @abc.abstractmethod | |
| 600 def install_repo(self, *args, **kwds): | |
| 601 """Install specified tool shed repository.""" | |
| 602 | |
| 603 @abc.abstractproperty | |
| 604 def tool_shed_client(self): | |
| 605 """Return a admin bioblend tool shed client.""" | |
| 606 | |
| 607 @abc.abstractmethod | |
| 608 def wait_for_all_installed(self): | |
| 609 """Wait for all queued up repositories installs to complete.""" | |
| 610 | |
| 611 @abc.abstractmethod | |
| 612 def install_workflows(self): | |
| 613 """Install all workflows configured with these planemo arguments.""" | |
| 614 | |
| 615 @abc.abstractmethod | |
| 616 def workflow_id(self, path): | |
| 617 """Get installed workflow API ID for input path.""" | |
| 618 | |
| 619 | |
| 620 @add_metaclass(abc.ABCMeta) | |
| 621 class GalaxyConfig(GalaxyInterface): | |
| 622 """Specialization of GalaxyInterface for Galaxy instances Planemo manages itself. | |
| 623 | |
| 624 This assumes more than an API connection is available - Planemo needs to be able to | |
| 625 start and stop the Galaxy instance, recover logs, etc... There are currently two | |
| 626 implementations - a locally executed Galaxy and one running inside a Docker containe | |
| 627 """ | |
| 628 | |
| 629 @abc.abstractproperty | |
| 630 def kill(self): | |
| 631 """Stop the running instance.""" | |
| 632 | |
| 633 @abc.abstractmethod | |
| 634 def startup_command(self, ctx, **kwds): | |
| 635 """Return a shell command used to startup this instance. | |
| 636 | |
| 637 Among other common planmo kwds, this should respect the | |
| 638 ``daemon`` keyword. | |
| 639 """ | |
| 640 | |
| 641 @abc.abstractproperty | |
| 642 def log_contents(self): | |
| 643 """Retrieve text of log for running Galaxy instance.""" | |
| 644 | |
| 645 @abc.abstractmethod | |
| 646 def cleanup(self): | |
| 647 """Cleanup allocated resources to run this instance.""" | |
| 648 | |
| 649 @abc.abstractproperty | |
| 650 def use_path_paste(self): | |
| 651 """Use path paste to upload data.""" | |
| 652 | |
| 653 | |
| 654 class BaseGalaxyConfig(GalaxyInterface): | |
| 655 | |
| 656 def __init__( | |
| 657 self, | |
| 658 ctx, | |
| 659 galaxy_url, | |
| 660 master_api_key, | |
| 661 user_api_key, | |
| 662 runnables, | |
| 663 kwds, | |
| 664 ): | |
| 665 self._ctx = ctx | |
| 666 self.galaxy_url = galaxy_url | |
| 667 self.master_api_key = master_api_key | |
| 668 self._user_api_key = user_api_key | |
| 669 self.runnables = runnables | |
| 670 self._kwds = kwds | |
| 671 self._workflow_ids = {} | |
| 672 | |
| 673 @property | |
| 674 def gi(self): | |
| 675 assert self.galaxy_url | |
| 676 return gi(url=self.galaxy_url, key=self.master_api_key) | |
| 677 | |
| 678 @property | |
| 679 def user_gi(self): | |
| 680 user_api_key = self.user_api_key | |
| 681 assert user_api_key | |
| 682 return self._gi_for_key(user_api_key) | |
| 683 | |
| 684 @property | |
| 685 def user_api_key(self): | |
| 686 # TODO: thread-safe | |
| 687 if self._user_api_key is None: | |
| 688 # TODO: respect --galaxy_email - seems like a real bug | |
| 689 self._user_api_key = user_api_key(self.gi) | |
| 690 | |
| 691 return self._user_api_key | |
| 692 | |
| 693 def _gi_for_key(self, key): | |
| 694 assert self.galaxy_url | |
| 695 return gi(url=self.galaxy_url, key=key) | |
| 696 | |
| 697 def install_repo(self, *args, **kwds): | |
| 698 self.tool_shed_client.install_repository_revision( | |
| 699 *args, **kwds | |
| 700 ) | |
| 701 | |
| 702 @property | |
| 703 def tool_shed_client(self): | |
| 704 return self.gi.toolShed | |
| 705 | |
| 706 def wait_for_all_installed(self): | |
| 707 def status_ready(repo): | |
| 708 status = repo["status"] | |
| 709 if status in ["Installing", "New"]: | |
| 710 return None | |
| 711 if status == "Installed": | |
| 712 return True | |
| 713 raise Exception("Error installing repo status is %s" % status) | |
| 714 | |
| 715 def ready(): | |
| 716 repos = self.tool_shed_client.get_repositories() | |
| 717 ready = all(map(status_ready, repos)) | |
| 718 return ready or None | |
| 719 | |
| 720 wait_on(ready, "galaxy tool installation", timeout=60 * 60 * 1) | |
| 721 | |
| 722 def install_workflows(self): | |
| 723 for runnable in self.runnables: | |
| 724 if runnable.type.name in ["galaxy_workflow", "cwl_workflow"]: | |
| 725 self._install_workflow(runnable) | |
| 726 | |
| 727 def _install_workflow(self, runnable): | |
| 728 if self._kwds["shed_install"]: | |
| 729 install_shed_repos(runnable, self.gi, self._kwds.get("ignore_dependency_problems", False)) | |
| 730 | |
| 731 default_from_path = self._kwds.get("workflows_from_path", False) | |
| 732 # TODO: Allow serialization so this doesn't need to assume a | |
| 733 # shared filesystem with Galaxy server. | |
| 734 from_path = default_from_path or (runnable.type.name == "cwl_workflow") | |
| 735 workflow = import_workflow( | |
| 736 runnable.path, admin_gi=self.gi, user_gi=self.user_gi, from_path=from_path | |
| 737 ) | |
| 738 self._workflow_ids[runnable.path] = workflow["id"] | |
| 739 | |
| 740 def workflow_id(self, path): | |
| 741 return self._workflow_ids[path] | |
| 742 | |
| 743 @property | |
| 744 def use_path_paste(self): | |
| 745 option = self._kwds.get("paste_test_data_paths") | |
| 746 if option is None: | |
| 747 return self.default_use_path_paste | |
| 748 else: | |
| 749 return option | |
| 750 | |
| 751 @property | |
| 752 def default_use_path_paste(self): | |
| 753 return False | |
| 754 | |
| 755 | |
| 756 class BaseManagedGalaxyConfig(BaseGalaxyConfig): | |
| 757 | |
| 758 def __init__( | |
| 759 self, | |
| 760 ctx, | |
| 761 config_directory, | |
| 762 env, | |
| 763 test_data_dir, | |
| 764 port, | |
| 765 server_name, | |
| 766 master_api_key, | |
| 767 runnables, | |
| 768 kwds, | |
| 769 ): | |
| 770 galaxy_url = "http://localhost:%d" % port | |
| 771 super(BaseManagedGalaxyConfig, self).__init__( | |
| 772 ctx=ctx, | |
| 773 galaxy_url=galaxy_url, | |
| 774 master_api_key=master_api_key, | |
| 775 user_api_key=None, | |
| 776 runnables=runnables, | |
| 777 kwds=kwds | |
| 778 ) | |
| 779 self.config_directory = config_directory | |
| 780 self.env = env | |
| 781 self.test_data_dir = test_data_dir | |
| 782 self.port = port | |
| 783 self.server_name = server_name | |
| 784 | |
| 785 | |
| 786 class DockerGalaxyConfig(BaseManagedGalaxyConfig): | |
| 787 """A :class:`GalaxyConfig` description of a Dockerized Galaxy instance.""" | |
| 788 | |
| 789 def __init__( | |
| 790 self, | |
| 791 ctx, | |
| 792 config_directory, | |
| 793 env, | |
| 794 test_data_dir, | |
| 795 port, | |
| 796 server_name, | |
| 797 master_api_key, | |
| 798 runnables, | |
| 799 docker_target_kwds, | |
| 800 volumes, | |
| 801 export_directory, | |
| 802 kwds, | |
| 803 ): | |
| 804 super(DockerGalaxyConfig, self).__init__( | |
| 805 ctx, | |
| 806 config_directory, | |
| 807 env, | |
| 808 test_data_dir, | |
| 809 port, | |
| 810 server_name, | |
| 811 master_api_key, | |
| 812 runnables, | |
| 813 kwds, | |
| 814 ) | |
| 815 self.docker_target_kwds = docker_target_kwds | |
| 816 self.volumes = volumes | |
| 817 self.export_directory = export_directory | |
| 818 | |
| 819 def kill(self): | |
| 820 """Kill planemo container...""" | |
| 821 kill_command = docker_util.kill_command( | |
| 822 self.server_name, | |
| 823 **self.docker_target_kwds | |
| 824 ) | |
| 825 return shell(kill_command) | |
| 826 | |
| 827 def startup_command(self, ctx, **kwds): | |
| 828 """Return a shell command used to startup this instance. | |
| 829 | |
| 830 Among other common planmo kwds, this should respect the | |
| 831 ``daemon`` keyword. | |
| 832 """ | |
| 833 daemon = kwds.get("daemon", False) | |
| 834 daemon_str = "" if not daemon else " -d" | |
| 835 docker_run_extras = "-p %s:80%s" % (self.port, daemon_str) | |
| 836 env_directives = ["%s='%s'" % item for item in self.env.items()] | |
| 837 image = kwds.get("docker_galaxy_image", "bgruening/galaxy-stable") | |
| 838 run_command = docker_util.build_docker_run_command( | |
| 839 "", image, | |
| 840 interactive=False, | |
| 841 env_directives=env_directives, | |
| 842 working_directory=None, | |
| 843 name=self.server_name, | |
| 844 run_extra_arguments=docker_run_extras, | |
| 845 set_user=False, | |
| 846 volumes=self.volumes, | |
| 847 **self.docker_target_kwds | |
| 848 ) | |
| 849 chmod_command = [ | |
| 850 "chmod", | |
| 851 "-R", | |
| 852 "o+rwx", | |
| 853 self.config_directory, | |
| 854 ] | |
| 855 if self.export_directory: | |
| 856 chmod_command.append(self.export_directory) | |
| 857 | |
| 858 return shell_join( | |
| 859 argv_to_str(chmod_command), | |
| 860 run_command, | |
| 861 ) | |
| 862 | |
| 863 @property | |
| 864 def log_contents(self): | |
| 865 logs_command = docker_util.logs_command( | |
| 866 self.server_name, | |
| 867 **self.docker_target_kwds | |
| 868 ) | |
| 869 output, _ = communicate( | |
| 870 logs_command | |
| 871 ) | |
| 872 return output | |
| 873 | |
| 874 def cleanup(self): | |
| 875 shutil.rmtree(self.config_directory, CLEANUP_IGNORE_ERRORS) | |
| 876 | |
| 877 | |
| 878 class LocalGalaxyConfig(BaseManagedGalaxyConfig): | |
| 879 """A local, non-containerized implementation of :class:`GalaxyConfig`.""" | |
| 880 | |
| 881 def __init__( | |
| 882 self, | |
| 883 ctx, | |
| 884 config_directory, | |
| 885 env, | |
| 886 test_data_dir, | |
| 887 port, | |
| 888 server_name, | |
| 889 master_api_key, | |
| 890 runnables, | |
| 891 galaxy_root, | |
| 892 kwds, | |
| 893 ): | |
| 894 super(LocalGalaxyConfig, self).__init__( | |
| 895 ctx, | |
| 896 config_directory, | |
| 897 env, | |
| 898 test_data_dir, | |
| 899 port, | |
| 900 server_name, | |
| 901 master_api_key, | |
| 902 runnables, | |
| 903 kwds, | |
| 904 ) | |
| 905 self.galaxy_root = galaxy_root | |
| 906 | |
| 907 def kill(self): | |
| 908 kill_pid_file(self.pid_file) | |
| 909 | |
| 910 def startup_command(self, ctx, **kwds): | |
| 911 """Return a shell command used to startup this instance. | |
| 912 | |
| 913 Among other common planemo kwds, this should respect the | |
| 914 ``daemon`` keyword. | |
| 915 """ | |
| 916 daemon = kwds.get("daemon", False) | |
| 917 # TODO: Allow running dockerized Galaxy here instead. | |
| 918 setup_venv_command = setup_venv(ctx, kwds) | |
| 919 run_script = "%s $COMMON_STARTUP_ARGS" % shlex_quote(os.path.join(self.galaxy_root, "run.sh")) | |
| 920 if daemon: | |
| 921 run_script += " --daemon" | |
| 922 self.env["GALAXY_RUN_ALL"] = "1" | |
| 923 else: | |
| 924 run_script += " --server-name %s" % shlex_quote(self.server_name) | |
| 925 server_ini = os.path.join(self.config_directory, "galaxy.ini") | |
| 926 self.env["GALAXY_CONFIG_FILE"] = server_ini | |
| 927 if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'): | |
| 928 # We need to start under gunicorn | |
| 929 self.env['APP_WEBSERVER'] = 'gunicorn' | |
| 930 self.env['GUNICORN_CMD_ARGS'] = "--bind={host}:{port} --name={server_name}".format( | |
| 931 host=kwds.get('host', '127.0.0.1'), | |
| 932 port=kwds['port'], | |
| 933 server_name=self.server_name, | |
| 934 ) | |
| 935 cd_to_galaxy_command = ['cd', self.galaxy_root] | |
| 936 return shell_join( | |
| 937 cd_to_galaxy_command, | |
| 938 setup_venv_command, | |
| 939 setup_common_startup_args(), | |
| 940 run_script, | |
| 941 ) | |
| 942 | |
| 943 @property | |
| 944 def log_file(self): | |
| 945 """Log file used when planemo serves this Galaxy instance.""" | |
| 946 file_name = "%s.log" % self.server_name | |
| 947 return os.path.join(self.galaxy_root, file_name) | |
| 948 | |
| 949 @property | |
| 950 def pid_file(self): | |
| 951 pid_file_name = "%s.pid" % self.server_name | |
| 952 return os.path.join(self.galaxy_root, pid_file_name) | |
| 953 | |
| 954 @property | |
| 955 def log_contents(self): | |
| 956 if not os.path.exists(self.log_file): | |
| 957 return "" | |
| 958 with open(self.log_file, "r") as f: | |
| 959 return f.read() | |
| 960 | |
| 961 def cleanup(self): | |
| 962 shutil.rmtree(self.config_directory, CLEANUP_IGNORE_ERRORS) | |
| 963 | |
| 964 @property | |
| 965 def default_use_path_paste(self): | |
| 966 # If Planemo started a local, native Galaxy instance assume files URLs can be | |
| 967 # pasted. | |
| 968 return True | |
| 969 | |
| 970 | |
| 971 def _database_connection(database_location, **kwds): | |
| 972 default_connection = DATABASE_LOCATION_TEMPLATE % database_location | |
| 973 database_connection = kwds.get("database_connection") or default_connection | |
| 974 return database_connection | |
| 975 | |
| 976 | |
| 977 def _find_galaxy_root(ctx, **kwds): | |
| 978 root_prop = "galaxy_root" | |
| 979 cwl = kwds.get("cwl", False) | |
| 980 if cwl: | |
| 981 root_prop = "cwl_galaxy_root" | |
| 982 galaxy_root = kwds.get(root_prop, None) | |
| 983 if galaxy_root: | |
| 984 return galaxy_root | |
| 985 else: | |
| 986 par_dir = os.getcwd() | |
| 987 while True: | |
| 988 run = os.path.join(par_dir, "run.sh") | |
| 989 config = os.path.join(par_dir, "config") | |
| 990 if os.path.isfile(run) and os.path.isdir(config): | |
| 991 return par_dir | |
| 992 new_par_dir = os.path.dirname(par_dir) | |
| 993 if new_par_dir == par_dir: | |
| 994 break | |
| 995 par_dir = new_par_dir | |
| 996 return None | |
| 997 | |
| 998 | |
| 999 def _find_test_data(runnables, **kwds): | |
| 1000 test_data_search_path = "." | |
| 1001 runnables = [r for r in runnables if r.has_tools] | |
| 1002 if len(runnables) > 0: | |
| 1003 test_data_search_path = runnables[0].test_data_search_path | |
| 1004 | |
| 1005 # Find test data directory associated with path. | |
| 1006 test_data = kwds.get("test_data", None) | |
| 1007 if test_data: | |
| 1008 return os.path.abspath(test_data) | |
| 1009 else: | |
| 1010 test_data = _search_tool_path_for(test_data_search_path, "test-data") | |
| 1011 if test_data: | |
| 1012 return test_data | |
| 1013 warn(NO_TEST_DATA_MESSAGE) | |
| 1014 return None | |
| 1015 | |
| 1016 | |
| 1017 def _find_tool_data_table(runnables, test_data_dir, **kwds): | |
| 1018 tool_data_search_path = "." | |
| 1019 runnables = [r for r in runnables if r.has_tools] | |
| 1020 if len(runnables) > 0: | |
| 1021 tool_data_search_path = runnables[0].tool_data_search_path | |
| 1022 | |
| 1023 tool_data_table = kwds.get("tool_data_table", None) | |
| 1024 if tool_data_table: | |
| 1025 return os.path.abspath(tool_data_table) | |
| 1026 else: | |
| 1027 extra_paths = [test_data_dir] if test_data_dir else [] | |
| 1028 return _search_tool_path_for( | |
| 1029 tool_data_search_path, | |
| 1030 "tool_data_table_conf.xml.test", | |
| 1031 extra_paths, | |
| 1032 ) or _search_tool_path_for( # if all else fails just use sample | |
| 1033 tool_data_search_path, | |
| 1034 "tool_data_table_conf.xml.sample" | |
| 1035 ) | |
| 1036 | |
| 1037 | |
| 1038 def _search_tool_path_for(path, target, extra_paths=[]): | |
| 1039 """Check for presence of a target in different artifact directories.""" | |
| 1040 if not os.path.isdir(path): | |
| 1041 tool_dir = os.path.dirname(path) | |
| 1042 else: | |
| 1043 tool_dir = path | |
| 1044 possible_dirs = [tool_dir, "."] + extra_paths | |
| 1045 for possible_dir in possible_dirs: | |
| 1046 possible_path = os.path.join(possible_dir, target) | |
| 1047 if os.path.exists(possible_path): | |
| 1048 return os.path.abspath(possible_path) | |
| 1049 return None | |
| 1050 | |
| 1051 | |
| 1052 def _configure_sheds_config_file(ctx, config_directory, **kwds): | |
| 1053 if "shed_target" not in kwds: | |
| 1054 kwds = kwds.copy() | |
| 1055 kwds["shed_target"] = "toolshed" | |
| 1056 shed_target_url = tool_shed_url(ctx, **kwds) | |
| 1057 contents = _sub(TOOL_SHEDS_CONF, {"shed_target_url": shed_target_url}) | |
| 1058 tool_sheds_conf = os.path.join(config_directory, "tool_sheds_conf.xml") | |
| 1059 write_file(tool_sheds_conf, contents) | |
| 1060 return tool_sheds_conf | |
| 1061 | |
| 1062 | |
| 1063 def _tool_conf_entry_for(tool_paths): | |
| 1064 tool_definitions = "" | |
| 1065 for tool_path in tool_paths: | |
| 1066 if os.path.isdir(tool_path): | |
| 1067 tool_definitions += '''<tool_dir dir="%s" />''' % tool_path | |
| 1068 else: | |
| 1069 tool_definitions += '''<tool file="%s" />''' % tool_path | |
| 1070 return tool_definitions | |
| 1071 | |
| 1072 | |
| 1073 def _install_galaxy(ctx, galaxy_root, env, kwds): | |
| 1074 if not kwds.get("no_cache_galaxy", False): | |
| 1075 _install_galaxy_via_git(ctx, galaxy_root, env, kwds) | |
| 1076 else: | |
| 1077 _install_galaxy_via_download(ctx, galaxy_root, env, kwds) | |
| 1078 | |
| 1079 | |
| 1080 def _install_galaxy_via_download(ctx, galaxy_root, env, kwds): | |
| 1081 branch = _galaxy_branch(kwds) | |
| 1082 untar_to("https://codeload.github.com/galaxyproject/galaxy/tar.gz/" + branch, tar_args=['-xvzf', '-', 'galaxy-' + branch], dest_dir=galaxy_root) | |
| 1083 _install_with_command(ctx, galaxy_root, env, kwds) | |
| 1084 | |
| 1085 | |
| 1086 def _install_galaxy_via_git(ctx, galaxy_root, env, kwds): | |
| 1087 gx_repo = _ensure_galaxy_repository_available(ctx, kwds) | |
| 1088 branch = _galaxy_branch(kwds) | |
| 1089 command = git.command_clone(ctx, gx_repo, galaxy_root, branch=branch) | |
| 1090 shell(command, env=env) | |
| 1091 _install_with_command(ctx, galaxy_root, env, kwds) | |
| 1092 | |
| 1093 | |
| 1094 def _build_eggs_cache(ctx, env, kwds): | |
| 1095 if kwds.get("no_cache_galaxy", False): | |
| 1096 return None | |
| 1097 workspace = ctx.workspace | |
| 1098 eggs_path = os.path.join(workspace, "gx_eggs") | |
| 1099 if not os.path.exists(eggs_path): | |
| 1100 os.makedirs(eggs_path) | |
| 1101 env["GALAXY_EGGS_PATH"] = eggs_path | |
| 1102 | |
| 1103 | |
| 1104 def _galaxy_branch(kwds): | |
| 1105 branch = kwds.get("galaxy_branch", None) | |
| 1106 if branch is None: | |
| 1107 cwl = kwds.get("cwl", False) | |
| 1108 branch = "cwl-1.0" if cwl else None | |
| 1109 if branch is None: | |
| 1110 branch = DEFAULT_GALAXY_BRANCH | |
| 1111 | |
| 1112 return branch | |
| 1113 | |
| 1114 | |
| 1115 def _galaxy_source(kwds): | |
| 1116 source = kwds.get("galaxy_source", None) | |
| 1117 if source is None: | |
| 1118 cwl = kwds.get("cwl", False) | |
| 1119 source = CWL_GALAXY_SOURCE if cwl else None | |
| 1120 if source is None: | |
| 1121 source = DEFAULT_GALAXY_SOURCE | |
| 1122 | |
| 1123 return source | |
| 1124 | |
| 1125 | |
| 1126 def _install_with_command(ctx, galaxy_root, env, kwds): | |
| 1127 setup_venv_command = setup_venv(ctx, kwds) | |
| 1128 env['__PYVENV_LAUNCHER__'] = '' | |
| 1129 install_cmd = shell_join( | |
| 1130 setup_venv_command, | |
| 1131 setup_common_startup_args(), | |
| 1132 COMMAND_STARTUP_COMMAND, | |
| 1133 ) | |
| 1134 shell(install_cmd, cwd=galaxy_root, env=env) | |
| 1135 | |
| 1136 | |
| 1137 def _ensure_galaxy_repository_available(ctx, kwds): | |
| 1138 workspace = ctx.workspace | |
| 1139 cwl = kwds.get("cwl", False) | |
| 1140 galaxy_source = kwds.get('galaxy_source') | |
| 1141 if galaxy_source and galaxy_source != DEFAULT_GALAXY_SOURCE: | |
| 1142 sanitized_repo_name = "".join(c if c.isalnum() else '_' for c in kwds['galaxy_source']).rstrip()[:255] | |
| 1143 gx_repo = os.path.join(workspace, "gx_repo_%s" % sanitized_repo_name) | |
| 1144 else: | |
| 1145 gx_repo = os.path.join(workspace, "gx_repo") | |
| 1146 if cwl: | |
| 1147 gx_repo += "_cwl" | |
| 1148 if os.path.exists(gx_repo): | |
| 1149 # Convert the git repository from bare to mirror, if needed | |
| 1150 shell(['git', '--git-dir', gx_repo, 'config', 'remote.origin.fetch', '+refs/*:refs/*']) | |
| 1151 shell(['git', '--git-dir', gx_repo, 'config', 'remote.origin.mirror', 'true']) | |
| 1152 # Attempt remote update - but don't fail if not interweb, etc... | |
| 1153 shell("git --git-dir %s remote update >/dev/null 2>&1" % gx_repo) | |
| 1154 else: | |
| 1155 remote_repo = _galaxy_source(kwds) | |
| 1156 command = git.command_clone(ctx, remote_repo, gx_repo, mirror=True) | |
| 1157 shell(command) | |
| 1158 return gx_repo | |
| 1159 | |
| 1160 | |
| 1161 def _build_env_for_galaxy(properties, template_args): | |
| 1162 env = {} | |
| 1163 for key, value in iteritems(properties): | |
| 1164 if value is not None: # Do not override None with empty string | |
| 1165 var = "GALAXY_CONFIG_OVERRIDE_%s" % key.upper() | |
| 1166 value = _sub(value, template_args) | |
| 1167 env[var] = value | |
| 1168 return env | |
| 1169 | |
| 1170 | |
| 1171 def _build_test_env(properties, env): | |
| 1172 # Keeping these environment variables around for a little while but | |
| 1173 # many are probably not needed as of the following commit. | |
| 1174 # https://bitbucket.org/galaxy/galaxy-central/commits/d7dd1f9 | |
| 1175 test_property_variants = { | |
| 1176 'GALAXY_TEST_JOB_CONFIG_FILE': 'job_config_file', | |
| 1177 'GALAXY_TEST_MIGRATED_TOOL_CONF': 'migrated_tools_config', | |
| 1178 'GALAXY_TEST_TOOL_CONF': 'tool_config_file', | |
| 1179 'GALAXY_TEST_FILE_DIR': 'test_data_dir', | |
| 1180 'GALAXY_TOOL_DEPENDENCY_DIR': 'tool_dependency_dir', | |
| 1181 # Next line would be required for tool shed tests. | |
| 1182 # 'GALAXY_TEST_TOOL_DEPENDENCY_DIR': 'tool_dependency_dir', | |
| 1183 } | |
| 1184 for test_key, gx_key in test_property_variants.items(): | |
| 1185 value = properties.get(gx_key, None) | |
| 1186 if value is not None: | |
| 1187 env[test_key] = value | |
| 1188 | |
| 1189 | |
| 1190 def _handle_job_config_file(config_directory, server_name, kwds): | |
| 1191 job_config_file = kwds.get("job_config_file", None) | |
| 1192 if not job_config_file: | |
| 1193 template_str = JOB_CONFIG_LOCAL | |
| 1194 job_config_file = os.path.join( | |
| 1195 config_directory, | |
| 1196 "job_conf.xml", | |
| 1197 ) | |
| 1198 docker_enable = str(kwds.get("docker", False)) | |
| 1199 docker_host = str(kwds.get("docker_host", docker_util.DEFAULT_HOST)) | |
| 1200 docker_host_param = "" | |
| 1201 if docker_host: | |
| 1202 docker_host_param = """<param id="docker_host">%s</param>""" % docker_host | |
| 1203 | |
| 1204 conf_contents = Template(template_str).safe_substitute({ | |
| 1205 "server_name": server_name, | |
| 1206 "docker_enable": docker_enable, | |
| 1207 "require_container": "false", | |
| 1208 "docker_sudo": str(kwds.get("docker_sudo", False)), | |
| 1209 "docker_sudo_cmd": str(kwds.get("docker_sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND)), | |
| 1210 "docker_cmd": str(kwds.get("docker_cmd", docker_util.DEFAULT_DOCKER_COMMAND)), | |
| 1211 "docker_host": docker_host_param, | |
| 1212 }) | |
| 1213 write_file(job_config_file, conf_contents) | |
| 1214 kwds["job_config_file"] = job_config_file | |
| 1215 | |
| 1216 | |
| 1217 def _write_tool_conf(ctx, tool_paths, tool_conf_path): | |
| 1218 tool_definition = _tool_conf_entry_for(tool_paths) | |
| 1219 tool_conf_template_kwds = dict(tool_definition=tool_definition) | |
| 1220 tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, tool_conf_template_kwds) | |
| 1221 write_file(tool_conf_path, tool_conf_contents) | |
| 1222 ctx.vlog( | |
| 1223 "Writing tool_conf to path %s with contents [%s]", | |
| 1224 tool_conf_path, | |
| 1225 tool_conf_contents, | |
| 1226 ) | |
| 1227 | |
| 1228 | |
| 1229 def _handle_container_resolution(ctx, kwds, galaxy_properties): | |
| 1230 if kwds.get("mulled_containers", False): | |
| 1231 galaxy_properties["enable_beta_mulled_containers"] = "True" | |
| 1232 involucro_context = build_involucro_context(ctx, **kwds) | |
| 1233 galaxy_properties["involucro_auto_init"] = "False" # Use planemo's | |
| 1234 galaxy_properties["involucro_path"] = involucro_context.involucro_bin | |
| 1235 | |
| 1236 | |
| 1237 def _handle_job_metrics(config_directory, kwds): | |
| 1238 metrics_conf = os.path.join(config_directory, "job_metrics_conf.xml") | |
| 1239 with open(metrics_conf, "w") as fh: | |
| 1240 fh.write(EMPTY_JOB_METRICS_TEMPLATE) | |
| 1241 kwds["job_metrics_config_file"] = metrics_conf | |
| 1242 | |
| 1243 | |
| 1244 def _handle_kwd_overrides(properties, kwds): | |
| 1245 kwds_gx_properties = [ | |
| 1246 'job_config_file', | |
| 1247 'job_metrics_config_file', | |
| 1248 'dependency_resolvers_config_file', | |
| 1249 ] | |
| 1250 for prop in kwds_gx_properties: | |
| 1251 val = kwds.get(prop, None) | |
| 1252 if val: | |
| 1253 properties[prop] = val | |
| 1254 | |
| 1255 | |
| 1256 def _sub(template, args): | |
| 1257 if template is None: | |
| 1258 return '' | |
| 1259 return Template(template).safe_substitute(args) | |
| 1260 | |
| 1261 | |
| 1262 def _ensure_directory(path): | |
| 1263 if path is not None and not os.path.exists(path): | |
| 1264 os.makedirs(path) | |
| 1265 | |
| 1266 | |
| 1267 __all__ = ( | |
| 1268 "DATABASE_LOCATION_TEMPLATE", | |
| 1269 "galaxy_config", | |
| 1270 ) |
