Mercurial > repos > gga > apollo_fetch_jbrowse
comparison webapollo.py @ 0:c6d7f19953a6 draft
planemo upload for repository https://github.com/galaxy-genome-annotation/galaxy-tools/tree/master/tools/apollo commit f745b23c84a615bf434d717c8c0e553a012f0268
| author | gga |
|---|---|
| date | Mon, 11 Sep 2017 05:47:25 -0400 |
| parents | |
| children | 921c8461a6a6 |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:c6d7f19953a6 |
|---|---|
| 1 from __future__ import print_function | |
| 2 | |
| 3 import argparse | |
| 4 import collections | |
| 5 import json | |
| 6 import logging | |
| 7 import os | |
| 8 import time | |
| 9 | |
| 10 from abc import abstractmethod | |
| 11 from builtins import next | |
| 12 from builtins import object | |
| 13 from builtins import str | |
| 14 | |
| 15 from BCBio import GFF | |
| 16 | |
| 17 from Bio import SeqIO | |
| 18 | |
| 19 from future import standard_library | |
| 20 | |
| 21 import requests | |
| 22 | |
| 23 | |
| 24 standard_library.install_aliases() | |
| 25 try: | |
| 26 import StringIO as io | |
| 27 except BaseException: | |
| 28 import io | |
| 29 logging.getLogger("requests").setLevel(logging.CRITICAL) | |
| 30 log = logging.getLogger() | |
| 31 | |
| 32 | |
| 33 ############################################# | |
| 34 # BEGIN IMPORT OF CACHING LIBRARY # | |
| 35 ############################################# | |
| 36 # This code is licensed under the MIT # | |
| 37 # License and is a copy of code publicly # | |
| 38 # available in rev. # | |
| 39 # e27332bc82f4e327aedaec17c9b656ae719322ed # | |
| 40 # of https://github.com/tkem/cachetools/ # | |
| 41 ############################################# | |
| 42 | |
| 43 class DefaultMapping(collections.MutableMapping): | |
| 44 | |
| 45 __slots__ = () | |
| 46 | |
| 47 @abstractmethod | |
| 48 def __contains__(self, key): # pragma: nocover | |
| 49 return False | |
| 50 | |
| 51 @abstractmethod | |
| 52 def __getitem__(self, key): # pragma: nocover | |
| 53 if hasattr(self.__class__, '__missing__'): | |
| 54 return self.__class__.__missing__(self, key) | |
| 55 else: | |
| 56 raise KeyError(key) | |
| 57 | |
| 58 def get(self, key, default=None): | |
| 59 if key in self: | |
| 60 return self[key] | |
| 61 else: | |
| 62 return default | |
| 63 | |
| 64 __marker = object() | |
| 65 | |
| 66 def pop(self, key, default=__marker): | |
| 67 if key in self: | |
| 68 value = self[key] | |
| 69 del self[key] | |
| 70 elif default is self.__marker: | |
| 71 raise KeyError(key) | |
| 72 else: | |
| 73 value = default | |
| 74 return value | |
| 75 | |
| 76 def setdefault(self, key, default=None): | |
| 77 if key in self: | |
| 78 value = self[key] | |
| 79 else: | |
| 80 self[key] = value = default | |
| 81 return value | |
| 82 | |
| 83 | |
| 84 DefaultMapping.register(dict) | |
| 85 | |
| 86 | |
| 87 class _DefaultSize(object): | |
| 88 def __getitem__(self, _): | |
| 89 return 1 | |
| 90 | |
| 91 def __setitem__(self, _, value): | |
| 92 assert value == 1 | |
| 93 | |
| 94 def pop(self, _): | |
| 95 return 1 | |
| 96 | |
| 97 | |
| 98 class Cache(DefaultMapping): | |
| 99 """Mutable mapping to serve as a simple cache or cache base class.""" | |
| 100 | |
| 101 __size = _DefaultSize() | |
| 102 | |
| 103 def __init__(self, maxsize, missing=None, getsizeof=None): | |
| 104 if missing: | |
| 105 self.__missing = missing | |
| 106 if getsizeof: | |
| 107 self.__getsizeof = getsizeof | |
| 108 self.__size = dict() | |
| 109 self.__data = dict() | |
| 110 self.__currsize = 0 | |
| 111 self.__maxsize = maxsize | |
| 112 | |
| 113 def __repr__(self): | |
| 114 return '%s(%r, maxsize=%r, currsize=%r)' % ( | |
| 115 self.__class__.__name__, | |
| 116 list(self.__data.items()), | |
| 117 self.__maxsize, | |
| 118 self.__currsize, | |
| 119 ) | |
| 120 | |
| 121 def __getitem__(self, key): | |
| 122 try: | |
| 123 return self.__data[key] | |
| 124 except KeyError: | |
| 125 return self.__missing__(key) | |
| 126 | |
| 127 def __setitem__(self, key, value): | |
| 128 maxsize = self.__maxsize | |
| 129 size = self.getsizeof(value) | |
| 130 if size > maxsize: | |
| 131 raise ValueError('value too large') | |
| 132 if key not in self.__data or self.__size[key] < size: | |
| 133 while self.__currsize + size > maxsize: | |
| 134 self.popitem() | |
| 135 if key in self.__data: | |
| 136 diffsize = size - self.__size[key] | |
| 137 else: | |
| 138 diffsize = size | |
| 139 self.__data[key] = value | |
| 140 self.__size[key] = size | |
| 141 self.__currsize += diffsize | |
| 142 | |
| 143 def __delitem__(self, key): | |
| 144 size = self.__size.pop(key) | |
| 145 del self.__data[key] | |
| 146 self.__currsize -= size | |
| 147 | |
| 148 def __contains__(self, key): | |
| 149 return key in self.__data | |
| 150 | |
| 151 def __missing__(self, key): | |
| 152 value = self.__missing(key) | |
| 153 try: | |
| 154 self.__setitem__(key, value) | |
| 155 except ValueError: | |
| 156 pass # value too large | |
| 157 return value | |
| 158 | |
| 159 def __iter__(self): | |
| 160 return iter(self.__data) | |
| 161 | |
| 162 def __len__(self): | |
| 163 return len(self.__data) | |
| 164 | |
| 165 @staticmethod | |
| 166 def __getsizeof(value): | |
| 167 return 1 | |
| 168 | |
| 169 @staticmethod | |
| 170 def __missing(key): | |
| 171 raise KeyError(key) | |
| 172 | |
| 173 @property | |
| 174 def maxsize(self): | |
| 175 """The maximum size of the cache.""" | |
| 176 return self.__maxsize | |
| 177 | |
| 178 @property | |
| 179 def currsize(self): | |
| 180 """The current size of the cache.""" | |
| 181 return self.__currsize | |
| 182 | |
| 183 def getsizeof(self, value): | |
| 184 """Return the size of a cache element's value.""" | |
| 185 return self.__getsizeof(value) | |
| 186 | |
| 187 | |
| 188 class _Link(object): | |
| 189 | |
| 190 __slots__ = ('key', 'expire', 'next', 'prev') | |
| 191 | |
| 192 def __init__(self, key=None, expire=None): | |
| 193 self.key = key | |
| 194 self.expire = expire | |
| 195 | |
| 196 def __reduce__(self): | |
| 197 return _Link, (self.key, self.expire) | |
| 198 | |
| 199 def unlink(self): | |
| 200 next = self.next | |
| 201 prev = self.prev | |
| 202 prev.next = next | |
| 203 next.prev = prev | |
| 204 | |
| 205 | |
| 206 class _Timer(object): | |
| 207 | |
| 208 def __init__(self, timer): | |
| 209 self.__timer = timer | |
| 210 self.__nesting = 0 | |
| 211 | |
| 212 def __call__(self): | |
| 213 if self.__nesting == 0: | |
| 214 return self.__timer() | |
| 215 else: | |
| 216 return self.__time | |
| 217 | |
| 218 def __enter__(self): | |
| 219 if self.__nesting == 0: | |
| 220 self.__time = time = self.__timer() | |
| 221 else: | |
| 222 time = self.__time | |
| 223 self.__nesting += 1 | |
| 224 return time | |
| 225 | |
| 226 def __exit__(self, *exc): | |
| 227 self.__nesting -= 1 | |
| 228 | |
| 229 def __reduce__(self): | |
| 230 return _Timer, (self.__timer,) | |
| 231 | |
| 232 def __getattr__(self, name): | |
| 233 return getattr(self.__timer, name) | |
| 234 | |
| 235 | |
| 236 class TTLCache(Cache): | |
| 237 """LRU Cache implementation with per-item time-to-live (TTL) value.""" | |
| 238 | |
| 239 def __init__(self, maxsize, ttl, timer=time.time, missing=None, | |
| 240 getsizeof=None): | |
| 241 Cache.__init__(self, maxsize, missing, getsizeof) | |
| 242 self.__root = root = _Link() | |
| 243 root.prev = root.next = root | |
| 244 self.__links = collections.OrderedDict() | |
| 245 self.__timer = _Timer(timer) | |
| 246 self.__ttl = ttl | |
| 247 | |
| 248 def __contains__(self, key): | |
| 249 try: | |
| 250 link = self.__links[key] # no reordering | |
| 251 except KeyError: | |
| 252 return False | |
| 253 else: | |
| 254 return not (link.expire < self.__timer()) | |
| 255 | |
| 256 def __getitem__(self, key, cache_getitem=Cache.__getitem__): | |
| 257 try: | |
| 258 link = self.__getlink(key) | |
| 259 except KeyError: | |
| 260 expired = False | |
| 261 else: | |
| 262 expired = link.expire < self.__timer() | |
| 263 if expired: | |
| 264 return self.__missing__(key) | |
| 265 else: | |
| 266 return cache_getitem(self, key) | |
| 267 | |
| 268 def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): | |
| 269 with self.__timer as time: | |
| 270 self.expire(time) | |
| 271 cache_setitem(self, key, value) | |
| 272 try: | |
| 273 link = self.__getlink(key) | |
| 274 except KeyError: | |
| 275 self.__links[key] = link = _Link(key) | |
| 276 else: | |
| 277 link.unlink() | |
| 278 link.expire = time + self.__ttl | |
| 279 link.next = root = self.__root | |
| 280 link.prev = prev = root.prev | |
| 281 prev.next = root.prev = link | |
| 282 | |
| 283 def __delitem__(self, key, cache_delitem=Cache.__delitem__): | |
| 284 cache_delitem(self, key) | |
| 285 link = self.__links.pop(key) | |
| 286 link.unlink() | |
| 287 if link.expire < self.__timer(): | |
| 288 raise KeyError(key) | |
| 289 | |
| 290 def __iter__(self): | |
| 291 root = self.__root | |
| 292 curr = root.next | |
| 293 while curr is not root: | |
| 294 # "freeze" time for iterator access | |
| 295 with self.__timer as time: | |
| 296 if not (curr.expire < time): | |
| 297 yield curr.key | |
| 298 curr = curr.next | |
| 299 | |
| 300 def __len__(self): | |
| 301 root = self.__root | |
| 302 curr = root.next | |
| 303 time = self.__timer() | |
| 304 count = len(self.__links) | |
| 305 while curr is not root and curr.expire < time: | |
| 306 count -= 1 | |
| 307 curr = curr.next | |
| 308 return count | |
| 309 | |
| 310 def __setstate__(self, state): | |
| 311 self.__dict__.update(state) | |
| 312 root = self.__root | |
| 313 root.prev = root.next = root | |
| 314 for link in sorted(self.__links.values(), key=lambda obj: obj.expire): | |
| 315 link.next = root | |
| 316 link.prev = prev = root.prev | |
| 317 prev.next = root.prev = link | |
| 318 self.expire(self.__timer()) | |
| 319 | |
| 320 def __repr__(self, cache_repr=Cache.__repr__): | |
| 321 with self.__timer as time: | |
| 322 self.expire(time) | |
| 323 return cache_repr(self) | |
| 324 | |
| 325 @property | |
| 326 def currsize(self): | |
| 327 with self.__timer as time: | |
| 328 self.expire(time) | |
| 329 return super(TTLCache, self).currsize | |
| 330 | |
| 331 @property | |
| 332 def timer(self): | |
| 333 """The timer function used by the cache.""" | |
| 334 return self.__timer | |
| 335 | |
| 336 @property | |
| 337 def ttl(self): | |
| 338 """The time-to-live value of the cache's items.""" | |
| 339 return self.__ttl | |
| 340 | |
| 341 def expire(self, time=None): | |
| 342 """Remove expired items from the cache.""" | |
| 343 if time is None: | |
| 344 time = self.__timer() | |
| 345 root = self.__root | |
| 346 curr = root.next | |
| 347 links = self.__links | |
| 348 cache_delitem = Cache.__delitem__ | |
| 349 while curr is not root and curr.expire < time: | |
| 350 cache_delitem(self, curr.key) | |
| 351 del links[curr.key] | |
| 352 next = curr.next | |
| 353 curr.unlink() | |
| 354 curr = next | |
| 355 | |
| 356 def clear(self): | |
| 357 with self.__timer as time: | |
| 358 self.expire(time) | |
| 359 Cache.clear(self) | |
| 360 | |
| 361 def get(self, *args, **kwargs): | |
| 362 with self.__timer: | |
| 363 return Cache.get(self, *args, **kwargs) | |
| 364 | |
| 365 def pop(self, *args, **kwargs): | |
| 366 with self.__timer: | |
| 367 return Cache.pop(self, *args, **kwargs) | |
| 368 | |
| 369 def setdefault(self, *args, **kwargs): | |
| 370 with self.__timer: | |
| 371 return Cache.setdefault(self, *args, **kwargs) | |
| 372 | |
| 373 def popitem(self): | |
| 374 """Remove and return the `(key, value)` pair least recently used that | |
| 375 has not already expired. | |
| 376 | |
| 377 """ | |
| 378 with self.__timer as time: | |
| 379 self.expire(time) | |
| 380 try: | |
| 381 key = next(iter(self.__links)) | |
| 382 except StopIteration: | |
| 383 raise KeyError('%s is empty' % self.__class__.__name__) | |
| 384 else: | |
| 385 return (key, self.pop(key)) | |
| 386 | |
| 387 if hasattr(collections.OrderedDict, 'move_to_end'): | |
| 388 def __getlink(self, key): | |
| 389 value = self.__links[key] | |
| 390 self.__links.move_to_end(key) | |
| 391 return value | |
| 392 else: | |
| 393 def __getlink(self, key): | |
| 394 value = self.__links.pop(key) | |
| 395 self.__links[key] = value | |
| 396 return value | |
| 397 | |
| 398 | |
| 399 ############################################# | |
| 400 # END IMPORT OF CACHING LIBRARY # | |
| 401 ############################################# | |
| 402 | |
| 403 | |
| 404 cache = TTLCache( | |
| 405 100, # Up to 100 items | |
| 406 5 * 60 # 5 minute cache life | |
| 407 ) | |
| 408 userCache = TTLCache( | |
| 409 2, # Up to 2 items | |
| 410 60 # 1 minute cache life | |
| 411 ) | |
| 412 | |
| 413 | |
| 414 class UnknownUserException(Exception): | |
| 415 pass | |
| 416 | |
| 417 | |
| 418 def WAAuth(parser): | |
| 419 parser.add_argument('apollo', help='Complete Apollo URL') | |
| 420 parser.add_argument('username', help='WA Username') | |
| 421 parser.add_argument('password', help='WA Password') | |
| 422 | |
| 423 | |
| 424 def OrgOrGuess(parser): | |
| 425 parser.add_argument('--org_json', type=argparse.FileType("r"), help='Apollo JSON output, source for common name') | |
| 426 parser.add_argument('--org_raw', help='Common Name') | |
| 427 parser.add_argument('--org_id', help='Organism ID') | |
| 428 | |
| 429 | |
| 430 def CnOrGuess(parser): | |
| 431 OrgOrGuess(parser) | |
| 432 parser.add_argument('--seq_fasta', type=argparse.FileType("r"), help='Fasta file, IDs used as sequence sources') | |
| 433 parser.add_argument('--seq_raw', nargs='*', help='Sequence Names') | |
| 434 | |
| 435 | |
| 436 def GuessOrg(args, wa): | |
| 437 if args.org_json: | |
| 438 orgs = [x.get('commonName', None) | |
| 439 for x in json.load(args.org_json)] | |
| 440 orgs = [x for x in orgs if x is not None] | |
| 441 return orgs | |
| 442 elif args.org_raw: | |
| 443 org = args.org_raw.strip() | |
| 444 if len(org) > 0: | |
| 445 return [org] | |
| 446 else: | |
| 447 raise Exception("Organism Common Name not provided") | |
| 448 elif args.org_id: | |
| 449 return [wa.organisms.findOrganismById(args.org_id).get('commonName', None)] | |
| 450 else: | |
| 451 raise Exception("Organism Common Name not provided") | |
| 452 | |
| 453 | |
| 454 def GuessCn(args, wa): | |
| 455 org = GuessOrg(args, wa) | |
| 456 seqs = [] | |
| 457 if args.seq_fasta: | |
| 458 # If we have a fasta, pull all rec ids from that. | |
| 459 for rec in SeqIO.parse(args.seq_fasta, 'fasta'): | |
| 460 seqs.append(rec.id) | |
| 461 elif args.seq_raw: | |
| 462 # Otherwise raw list. | |
| 463 seqs = [x.strip() for x in args.seq_raw if len(x.strip()) > 0] | |
| 464 | |
| 465 return org, seqs | |
| 466 | |
| 467 | |
| 468 def AssertUser(user_list): | |
| 469 if len(user_list) == 0: | |
| 470 raise UnknownUserException() | |
| 471 elif len(user_list) == 1: | |
| 472 return user_list[0] | |
| 473 else: | |
| 474 raise Exception("Too many users!") | |
| 475 | |
| 476 | |
| 477 def AssertAdmin(user): | |
| 478 if user.role == 'ADMIN': | |
| 479 return True | |
| 480 else: | |
| 481 raise Exception("User is not an administrator. Permission denied") | |
| 482 | |
| 483 | |
| 484 class WebApolloInstance(object): | |
| 485 | |
| 486 def __init__(self, url, username, password): | |
| 487 self.apollo_url = url | |
| 488 self.username = username | |
| 489 self.password = password | |
| 490 | |
| 491 self.annotations = AnnotationsClient(self) | |
| 492 self.groups = GroupsClient(self) | |
| 493 self.io = IOClient(self) | |
| 494 self.organisms = OrganismsClient(self) | |
| 495 self.users = UsersClient(self) | |
| 496 self.metrics = MetricsClient(self) | |
| 497 self.bio = RemoteRecord(self) | |
| 498 self.status = StatusClient(self) | |
| 499 self.canned_comments = CannedCommentsClient(self) | |
| 500 self.canned_keys = CannedKeysClient(self) | |
| 501 self.canned_values = CannedValuesClient(self) | |
| 502 | |
| 503 def __str__(self): | |
| 504 return '<WebApolloInstance at %s>' % self.apollo_url | |
| 505 | |
| 506 def requireUser(self, email): | |
| 507 cacheKey = 'user-list' | |
| 508 try: | |
| 509 # Get the cached value | |
| 510 data = userCache[cacheKey] | |
| 511 except KeyError: | |
| 512 # If we hit a key error above, indicating that | |
| 513 # we couldn't find the key, we'll simply re-request | |
| 514 # the data | |
| 515 data = self.users.loadUsers() | |
| 516 userCache[cacheKey] = data | |
| 517 | |
| 518 return AssertUser([x for x in data if x.username == email]) | |
| 519 | |
| 520 | |
| 521 class GroupObj(object): | |
| 522 def __init__(self, **kwargs): | |
| 523 self.name = kwargs['name'] | |
| 524 | |
| 525 if 'id' in kwargs: | |
| 526 self.groupId = kwargs['id'] | |
| 527 | |
| 528 | |
| 529 class UserObj(object): | |
| 530 ROLE_USER = 'USER' | |
| 531 ROLE_ADMIN = 'ADMIN' | |
| 532 | |
| 533 def __init__(self, **kwargs): | |
| 534 # Generally expect 'userId', 'firstName', 'lastName', 'username' (email) | |
| 535 for attr in kwargs.keys(): | |
| 536 setattr(self, attr, kwargs[attr]) | |
| 537 | |
| 538 if 'groups' in kwargs: | |
| 539 groups = [] | |
| 540 for groupData in kwargs['groups']: | |
| 541 groups.append(GroupObj(**groupData)) | |
| 542 self.groups = groups | |
| 543 | |
| 544 self.__props = kwargs.keys() | |
| 545 | |
| 546 def isAdmin(self): | |
| 547 if hasattr(self, 'role'): | |
| 548 return self.role == self.ROLE_ADMIN | |
| 549 return False | |
| 550 | |
| 551 def refresh(self, wa): | |
| 552 # This method requires some sleeping usually. | |
| 553 newU = wa.users.loadUser(self).toDict() | |
| 554 for prop in newU: | |
| 555 setattr(self, prop, newU[prop]) | |
| 556 | |
| 557 def toDict(self): | |
| 558 data = {} | |
| 559 for prop in self.__props: | |
| 560 data[prop] = getattr(self, prop) | |
| 561 return data | |
| 562 | |
| 563 def orgPerms(self): | |
| 564 for orgPer in self.organismPermissions: | |
| 565 if len(orgPer['permissions']) > 2: | |
| 566 orgPer['permissions'] = json.loads(orgPer['permissions']) | |
| 567 yield orgPer | |
| 568 | |
| 569 def __str__(self): | |
| 570 return '<User %s: %s %s <%s>>' % (self.userId, self.firstName, | |
| 571 self.lastName, self.username) | |
| 572 | |
| 573 | |
| 574 class Client(object): | |
| 575 | |
| 576 def __init__(self, webapolloinstance, **requestArgs): | |
| 577 self._wa = webapolloinstance | |
| 578 | |
| 579 self.__verify = requestArgs.get('verify', True) | |
| 580 self._requestArgs = requestArgs | |
| 581 | |
| 582 if 'verify' in self._requestArgs: | |
| 583 del self._requestArgs['verify'] | |
| 584 | |
| 585 def request(self, clientMethod, data, post_params={}, isJson=True): | |
| 586 url = self._wa.apollo_url + self.CLIENT_BASE + clientMethod | |
| 587 | |
| 588 headers = { | |
| 589 'Content-Type': 'application/json' | |
| 590 } | |
| 591 | |
| 592 data.update({ | |
| 593 'username': self._wa.username, | |
| 594 'password': self._wa.password, | |
| 595 }) | |
| 596 | |
| 597 r = requests.post(url, data=json.dumps(data), headers=headers, | |
| 598 verify=self.__verify, params=post_params, allow_redirects=False, **self._requestArgs) | |
| 599 | |
| 600 if r.status_code == 200 or r.status_code == 302: | |
| 601 if isJson: | |
| 602 d = r.json() | |
| 603 if 'username' in d: | |
| 604 del d['username'] | |
| 605 if 'password' in d: | |
| 606 del d['password'] | |
| 607 return d | |
| 608 else: | |
| 609 return r.text | |
| 610 | |
| 611 # @see self.body for HTTP response body | |
| 612 raise Exception("Unexpected response from apollo %s: %s" % | |
| 613 (r.status_code, r.text)) | |
| 614 | |
| 615 def get(self, clientMethod, get_params): | |
| 616 url = self._wa.apollo_url + self.CLIENT_BASE + clientMethod | |
| 617 headers = {} | |
| 618 | |
| 619 r = requests.get(url, headers=headers, verify=self.__verify, | |
| 620 params=get_params, **self._requestArgs) | |
| 621 if r.status_code == 200: | |
| 622 d = r.json() | |
| 623 if 'username' in d: | |
| 624 del d['username'] | |
| 625 if 'password' in d: | |
| 626 del d['password'] | |
| 627 return d | |
| 628 # @see self.body for HTTP response body | |
| 629 raise Exception("Unexpected response from apollo %s: %s" % | |
| 630 (r.status_code, r.text)) | |
| 631 | |
| 632 | |
| 633 class MetricsClient(Client): | |
| 634 CLIENT_BASE = '/metrics/' | |
| 635 | |
| 636 def getServerMetrics(self): | |
| 637 return self.get('metrics', {}) | |
| 638 | |
| 639 | |
| 640 class AnnotationsClient(Client): | |
| 641 CLIENT_BASE = '/annotationEditor/' | |
| 642 | |
| 643 def _update_data(self, data): | |
| 644 if not hasattr(self, '_extra_data'): | |
| 645 raise Exception("Please call setSequence first") | |
| 646 | |
| 647 data.update(self._extra_data) | |
| 648 return data | |
| 649 | |
| 650 def setSequence(self, sequence, organism): | |
| 651 self._extra_data = { | |
| 652 'sequence': sequence, | |
| 653 'organism': organism, | |
| 654 } | |
| 655 | |
| 656 def setDescription(self, featureDescriptions): | |
| 657 data = { | |
| 658 'features': featureDescriptions, | |
| 659 } | |
| 660 data = self._update_data(data) | |
| 661 return self.request('setDescription', data) | |
| 662 | |
| 663 def setName(self, uniquename, name): | |
| 664 # TODO | |
| 665 data = { | |
| 666 'features': [ | |
| 667 { | |
| 668 'uniquename': uniquename, | |
| 669 'name': name, | |
| 670 } | |
| 671 ], | |
| 672 } | |
| 673 data = self._update_data(data) | |
| 674 return self.request('setName', data) | |
| 675 | |
| 676 def setNames(self, features): | |
| 677 # TODO | |
| 678 data = { | |
| 679 'features': features, | |
| 680 } | |
| 681 data = self._update_data(data) | |
| 682 return self.request('setName', data) | |
| 683 | |
| 684 def setStatus(self, statuses): | |
| 685 # TODO | |
| 686 data = { | |
| 687 'features': statuses, | |
| 688 } | |
| 689 data = self._update_data(data) | |
| 690 return self.request('setStatus', data) | |
| 691 | |
| 692 def setSymbol(self, symbols): | |
| 693 data = { | |
| 694 'features': symbols, | |
| 695 } | |
| 696 data.update(self._extra_data) | |
| 697 return self.request('setSymbol', data) | |
| 698 | |
| 699 def getComments(self, feature_id): | |
| 700 data = { | |
| 701 'features': [{'uniquename': feature_id}], | |
| 702 } | |
| 703 data = self._update_data(data) | |
| 704 return self.request('getComments', data) | |
| 705 | |
| 706 def addComments(self, feature_id, comments): | |
| 707 # TODO: This is probably not great and will delete comments, if I had to guess... | |
| 708 data = { | |
| 709 'features': [ | |
| 710 { | |
| 711 'uniquename': feature_id, | |
| 712 'comments': comments | |
| 713 } | |
| 714 ], | |
| 715 } | |
| 716 data = self._update_data(data) | |
| 717 return self.request('addComments', data) | |
| 718 | |
| 719 def addAttributes(self, feature_id, attributes): | |
| 720 nrps = [] | |
| 721 for (key, values) in attributes.items(): | |
| 722 for value in values: | |
| 723 nrps.append({ | |
| 724 'tag': key, | |
| 725 'value': value | |
| 726 }) | |
| 727 | |
| 728 data = { | |
| 729 'features': [ | |
| 730 { | |
| 731 'uniquename': feature_id, | |
| 732 'non_reserved_properties': nrps | |
| 733 } | |
| 734 ] | |
| 735 } | |
| 736 data = self._update_data(data) | |
| 737 return self.request('addAttribute', data) | |
| 738 | |
| 739 def deleteAttribute(self, feature_id, key, value): | |
| 740 data = { | |
| 741 'features': [ | |
| 742 { | |
| 743 'uniquename': feature_id, | |
| 744 'non_reserved_properties': [ | |
| 745 {'tag': key, 'value': value} | |
| 746 ] | |
| 747 } | |
| 748 ] | |
| 749 } | |
| 750 data = self._update_data(data) | |
| 751 return self.request('addAttribute', data) | |
| 752 | |
| 753 def getFeatures(self): | |
| 754 data = self._update_data({}) | |
| 755 return self.request('getFeatures', data) | |
| 756 | |
| 757 def getSequence(self, uniquename): | |
| 758 data = { | |
| 759 'features': [ | |
| 760 {'uniquename': uniquename} | |
| 761 ] | |
| 762 } | |
| 763 data = self._update_data(data) | |
| 764 return self.request('getSequence', data) | |
| 765 | |
| 766 def addFeature(self, feature, trustme=False): | |
| 767 if not trustme: | |
| 768 raise NotImplementedError("Waiting on better docs from project. If you know what you are doing, pass trustme=True to this function.") | |
| 769 | |
| 770 data = { | |
| 771 'features': feature, | |
| 772 } | |
| 773 data = self._update_data(data) | |
| 774 return self.request('addFeature', data) | |
| 775 | |
| 776 def addTranscript(self, transcript, trustme=False): | |
| 777 if not trustme: | |
| 778 raise NotImplementedError("Waiting on better docs from project. If you know what you are doing, pass trustme=True to this function.") | |
| 779 | |
| 780 data = {} | |
| 781 data.update(transcript) | |
| 782 data = self._update_data(data) | |
| 783 return self.request('addTranscript', data) | |
| 784 | |
| 785 # addExon, add/delete/updateComments, addTranscript skipped due to docs | |
| 786 | |
| 787 def duplicateTranscript(self, transcriptId): | |
| 788 data = { | |
| 789 'features': [{'uniquename': transcriptId}] | |
| 790 } | |
| 791 | |
| 792 data = self._update_data(data) | |
| 793 return self.request('duplicateTranscript', data) | |
| 794 | |
| 795 def setTranslationStart(self, uniquename, start): | |
| 796 data = { | |
| 797 'features': [{ | |
| 798 'uniquename': uniquename, | |
| 799 'location': { | |
| 800 'fmin': start | |
| 801 } | |
| 802 }] | |
| 803 } | |
| 804 data = self._update_data(data) | |
| 805 return self.request('setTranslationStart', data) | |
| 806 | |
| 807 def setTranslationEnd(self, uniquename, end): | |
| 808 data = { | |
| 809 'features': [{ | |
| 810 'uniquename': uniquename, | |
| 811 'location': { | |
| 812 'fmax': end | |
| 813 } | |
| 814 }] | |
| 815 } | |
| 816 data = self._update_data(data) | |
| 817 return self.request('setTranslationEnd', data) | |
| 818 | |
| 819 def setLongestOrf(self, uniquename): | |
| 820 data = { | |
| 821 'features': [{ | |
| 822 'uniquename': uniquename, | |
| 823 }] | |
| 824 } | |
| 825 data = self._update_data(data) | |
| 826 return self.request('setLongestOrf', data) | |
| 827 | |
| 828 def setBoundaries(self, uniquename, start, end): | |
| 829 data = { | |
| 830 'features': [{ | |
| 831 'uniquename': uniquename, | |
| 832 'location': { | |
| 833 'fmin': start, | |
| 834 'fmax': end, | |
| 835 } | |
| 836 }] | |
| 837 } | |
| 838 data = self._update_data(data) | |
| 839 return self.request('setBoundaries', data) | |
| 840 | |
| 841 def getSequenceAlterations(self): | |
| 842 data = { | |
| 843 } | |
| 844 data = self._update_data(data) | |
| 845 return self.request('getSequenceAlterations', data) | |
| 846 | |
| 847 def setReadthroughStopCodon(self, uniquename): | |
| 848 data = { | |
| 849 'features': [{ | |
| 850 'uniquename': uniquename, | |
| 851 }] | |
| 852 } | |
| 853 data = self._update_data(data) | |
| 854 return self.request('setReadthroughStopCodon', data) | |
| 855 | |
| 856 def deleteSequenceAlteration(self, uniquename): | |
| 857 data = { | |
| 858 'features': [{ | |
| 859 'uniquename': uniquename, | |
| 860 }] | |
| 861 } | |
| 862 data = self._update_data(data) | |
| 863 return self.request('deleteSequenceAlteration', data) | |
| 864 | |
| 865 def flipStrand(self, uniquenames): | |
| 866 data = { | |
| 867 'features': [ | |
| 868 {'uniquename': x} for x in uniquenames | |
| 869 ] | |
| 870 } | |
| 871 data = self._update_data(data) | |
| 872 return self.request('flipStrand', data) | |
| 873 | |
| 874 def mergeExons(self, exonA, exonB): | |
| 875 data = { | |
| 876 'features': [ | |
| 877 {'uniquename': exonA}, | |
| 878 {'uniquename': exonB}, | |
| 879 ] | |
| 880 } | |
| 881 data = self._update_data(data) | |
| 882 return self.request('mergeExons', data) | |
| 883 | |
| 884 # def splitExon(): pass | |
| 885 | |
| 886 def deleteFeatures(self, uniquenames): | |
| 887 assert isinstance(uniquenames, collections.Iterable) | |
| 888 data = { | |
| 889 'features': [ | |
| 890 {'uniquename': x} for x in uniquenames | |
| 891 ] | |
| 892 } | |
| 893 data = self._update_data(data) | |
| 894 return self.request('deleteFeature', data) | |
| 895 | |
| 896 # def deleteExon(): pass | |
| 897 | |
| 898 # def makeIntron(self, uniquename, ): pass | |
| 899 | |
| 900 def getSequenceSearchTools(self): | |
| 901 return self.get('getSequenceSearchTools', {}) | |
| 902 | |
| 903 def getCannedComments(self): | |
| 904 return self.get('getCannedComments', {}) | |
| 905 | |
| 906 def searchSequence(self, searchTool, sequence, database): | |
| 907 data = { | |
| 908 'key': searchTool, | |
| 909 'residues': sequence, | |
| 910 'database_id': database, | |
| 911 } | |
| 912 return self.request('searchSequences', data) | |
| 913 | |
| 914 def getGff3(self, uniquenames): | |
| 915 assert isinstance(uniquenames, collections.Iterable) | |
| 916 data = { | |
| 917 'features': [ | |
| 918 {'uniquename': x} for x in uniquenames | |
| 919 ] | |
| 920 } | |
| 921 data = self._update_data(data) | |
| 922 return self.request('getGff3', data, isJson=False) | |
| 923 | |
| 924 | |
| 925 class GroupsClient(Client): | |
| 926 CLIENT_BASE = '/group/' | |
| 927 | |
| 928 def createGroup(self, name): | |
| 929 data = {'name': name} | |
| 930 return self.request('createGroup', data) | |
| 931 | |
| 932 def getOrganismPermissionsForGroup(self, group): | |
| 933 data = { | |
| 934 'id': group.groupId, | |
| 935 'name': group.name, | |
| 936 } | |
| 937 return self.request('getOrganismPermissionsForGroup', data) | |
| 938 | |
| 939 def loadGroup(self, group): | |
| 940 return self.loadGroupById(group.groupId) | |
| 941 | |
| 942 def loadGroupById(self, groupId): | |
| 943 res = self.request('loadGroups', {'groupId': groupId}) | |
| 944 if isinstance(res, list): | |
| 945 # We can only match one, right? | |
| 946 return GroupObj(**res[0]) | |
| 947 else: | |
| 948 return res | |
| 949 | |
| 950 def loadGroupByName(self, name): | |
| 951 res = self.request('loadGroups', {'name': name}) | |
| 952 if isinstance(res, list): | |
| 953 # We can only match one, right? | |
| 954 return GroupObj(**res[0]) | |
| 955 else: | |
| 956 return res | |
| 957 | |
| 958 def loadGroups(self, group=None): | |
| 959 res = self.request('loadGroups', {}) | |
| 960 data = [GroupObj(**x) for x in res] | |
| 961 if group is not None: | |
| 962 data = [x for x in data if x.name == group] | |
| 963 | |
| 964 return data | |
| 965 | |
| 966 def deleteGroup(self, group): | |
| 967 data = { | |
| 968 'id': group.groupId, | |
| 969 'name': group.name, | |
| 970 } | |
| 971 return self.request('deleteGroup', data) | |
| 972 | |
| 973 def updateGroup(self, group, newName): | |
| 974 # TODO: Sure would be nice if modifying ``group.name`` would invoke | |
| 975 # this? | |
| 976 data = { | |
| 977 'id': group.groupId, | |
| 978 'name': newName, | |
| 979 } | |
| 980 return self.request('updateGroup', data) | |
| 981 | |
| 982 def updateOrganismPermission(self, group, organismName, | |
| 983 administrate=False, write=False, read=False, | |
| 984 export=False): | |
| 985 data = { | |
| 986 'groupId': group.groupId, | |
| 987 'organism': organismName, | |
| 988 'ADMINISTRATE': administrate, | |
| 989 'WRITE': write, | |
| 990 'EXPORT': export, | |
| 991 'READ': read, | |
| 992 } | |
| 993 return self.request('updateOrganismPermission', data) | |
| 994 | |
| 995 def updateMembership(self, group, users): | |
| 996 data = { | |
| 997 'groupId': group.groupId, | |
| 998 'user': [user.email for user in users] | |
| 999 } | |
| 1000 return self.request('updateMembership', data) | |
| 1001 | |
| 1002 | |
| 1003 class IOClient(Client): | |
| 1004 CLIENT_BASE = '/IOService/' | |
| 1005 | |
| 1006 def write(self, exportType='FASTA', seqType='peptide', | |
| 1007 exportFormat='text', sequences=None, organism=None, | |
| 1008 output='text', exportAllSequences=False, | |
| 1009 exportGff3Fasta=False): | |
| 1010 if exportType not in ('FASTA', 'GFF3'): | |
| 1011 raise Exception("exportType must be one of FASTA, GFF3") | |
| 1012 | |
| 1013 if seqType not in ('peptide', 'cds', 'cdna', 'genomic'): | |
| 1014 raise Exception("seqType must be one of peptide, cds, dna, genomic") | |
| 1015 | |
| 1016 if exportFormat not in ('gzip', 'text'): | |
| 1017 raise Exception("exportFormat must be one of gzip, text") | |
| 1018 | |
| 1019 if output not in ('file', 'text'): | |
| 1020 raise Exception("output must be one of file, text") | |
| 1021 | |
| 1022 data = { | |
| 1023 'type': exportType, | |
| 1024 'seqType': seqType, | |
| 1025 'format': exportFormat, | |
| 1026 'sequences': sequences, | |
| 1027 'organism': organism, | |
| 1028 'output': output, | |
| 1029 'exportAllSequences': exportAllSequences, | |
| 1030 'exportGff3Fasta': exportGff3Fasta, | |
| 1031 } | |
| 1032 | |
| 1033 return self.request('write', data, isJson=output == 'file') | |
| 1034 | |
| 1035 def download(self, uuid, outputFormat='gzip'): | |
| 1036 | |
| 1037 if outputFormat.lower() not in ('gzip', 'text'): | |
| 1038 raise Exception("outputFormat must be one of file, text") | |
| 1039 | |
| 1040 data = { | |
| 1041 'format': outputFormat, | |
| 1042 'uuid': uuid, | |
| 1043 } | |
| 1044 return self.request('write', data) | |
| 1045 | |
| 1046 | |
| 1047 class StatusClient(Client): | |
| 1048 CLIENT_BASE = '/availableStatus/' | |
| 1049 | |
| 1050 def addStatus(self, value): | |
| 1051 data = { | |
| 1052 'value': value | |
| 1053 } | |
| 1054 | |
| 1055 return self.request('createStatus', data) | |
| 1056 | |
| 1057 def findAllStatuses(self): | |
| 1058 return self.request('showStatus', {}) | |
| 1059 | |
| 1060 def findStatusByValue(self, value): | |
| 1061 statuses = self.findAllStatuses() | |
| 1062 statuses = [x for x in statuses if x['value'] == value] | |
| 1063 if len(statuses) == 0: | |
| 1064 raise Exception("Unknown status value") | |
| 1065 else: | |
| 1066 return statuses[0] | |
| 1067 | |
| 1068 def findStatusById(self, id_number): | |
| 1069 statuses = self.findAllStatuses() | |
| 1070 statuses = [x for x in statuses if str(x['id']) == str(id_number)] | |
| 1071 if len(statuses) == 0: | |
| 1072 raise Exception("Unknown ID") | |
| 1073 else: | |
| 1074 return statuses[0] | |
| 1075 | |
| 1076 def updateStatus(self, id_number, new_value): | |
| 1077 data = { | |
| 1078 'id': id_number, | |
| 1079 'new_value': new_value | |
| 1080 } | |
| 1081 | |
| 1082 return self.request('updateStatus', data) | |
| 1083 | |
| 1084 def deleteStatus(self, id_number): | |
| 1085 data = { | |
| 1086 'id': id_number | |
| 1087 } | |
| 1088 | |
| 1089 return self.request('deleteStatus', data) | |
| 1090 | |
| 1091 | |
| 1092 class CannedCommentsClient(Client): | |
| 1093 CLIENT_BASE = '/cannedComment/' | |
| 1094 | |
| 1095 def addComment(self, comment, metadata=""): | |
| 1096 data = { | |
| 1097 'comment': comment, | |
| 1098 'metadata': metadata | |
| 1099 } | |
| 1100 | |
| 1101 return self.request('createComment', data) | |
| 1102 | |
| 1103 def findAllComments(self): | |
| 1104 return self.request('showComment', {}) | |
| 1105 | |
| 1106 def findCommentByValue(self, value): | |
| 1107 comments = self.findAllComments() | |
| 1108 comments = [x for x in comments if x['comment'] == value] | |
| 1109 if len(comments) == 0: | |
| 1110 raise Exception("Unknown comment") | |
| 1111 else: | |
| 1112 return comments[0] | |
| 1113 | |
| 1114 def findCommentById(self, id_number): | |
| 1115 comments = self.findAllComments() | |
| 1116 comments = [x for x in comments if str(x['id']) == str(id_number)] | |
| 1117 if len(comments) == 0: | |
| 1118 raise Exception("Unknown ID") | |
| 1119 else: | |
| 1120 return comments[0] | |
| 1121 | |
| 1122 def updateComment(self, id_number, new_value, metadata=None): | |
| 1123 data = { | |
| 1124 'id': id_number, | |
| 1125 'new_comment': new_value | |
| 1126 } | |
| 1127 | |
| 1128 if metadata is not None: | |
| 1129 data['metadata'] = metadata | |
| 1130 | |
| 1131 return self.request('updateComment', data) | |
| 1132 | |
| 1133 def deleteComment(self, id_number): | |
| 1134 data = { | |
| 1135 'id': id_number | |
| 1136 } | |
| 1137 | |
| 1138 return self.request('deleteComment', data) | |
| 1139 | |
| 1140 | |
| 1141 class CannedKeysClient(Client): | |
| 1142 CLIENT_BASE = '/cannedKey/' | |
| 1143 | |
| 1144 def addKey(self, key, metadata=""): | |
| 1145 data = { | |
| 1146 'key': key, | |
| 1147 'metadata': metadata | |
| 1148 } | |
| 1149 | |
| 1150 return self.request('createKey', data) | |
| 1151 | |
| 1152 def findAllKeys(self): | |
| 1153 return self.request('showKey', {}) | |
| 1154 | |
| 1155 def findKeyByValue(self, value): | |
| 1156 keys = self.findAllKeys() | |
| 1157 keys = [x for x in keys if x['label'] == value] | |
| 1158 if len(keys) == 0: | |
| 1159 raise Exception("Unknown key") | |
| 1160 else: | |
| 1161 return keys[0] | |
| 1162 | |
| 1163 def findKeyById(self, id_number): | |
| 1164 keys = self.findAllKeys() | |
| 1165 keys = [x for x in keys if str(x['id']) == str(id_number)] | |
| 1166 if len(keys) == 0: | |
| 1167 raise Exception("Unknown ID") | |
| 1168 else: | |
| 1169 return keys[0] | |
| 1170 | |
| 1171 def updateKey(self, id_number, new_key, metadata=None): | |
| 1172 data = { | |
| 1173 'id': id_number, | |
| 1174 'new_key': new_key | |
| 1175 } | |
| 1176 | |
| 1177 if metadata is not None: | |
| 1178 data['metadata'] = metadata | |
| 1179 | |
| 1180 return self.request('updateKey', data) | |
| 1181 | |
| 1182 def deleteKey(self, id_number): | |
| 1183 data = { | |
| 1184 'id': id_number | |
| 1185 } | |
| 1186 | |
| 1187 return self.request('deleteKey', data) | |
| 1188 | |
| 1189 | |
| 1190 class CannedValuesClient(Client): | |
| 1191 CLIENT_BASE = '/cannedValue/' | |
| 1192 | |
| 1193 def addValue(self, value, metadata=""): | |
| 1194 data = { | |
| 1195 'value': value, | |
| 1196 'metadata': metadata | |
| 1197 } | |
| 1198 | |
| 1199 return self.request('createValue', data) | |
| 1200 | |
| 1201 def findAllValues(self): | |
| 1202 return self.request('showValue', {}) | |
| 1203 | |
| 1204 def findValueByValue(self, value): | |
| 1205 values = self.findAllValues() | |
| 1206 values = [x for x in values if x['label'] == value] | |
| 1207 if len(values) == 0: | |
| 1208 raise Exception("Unknown value") | |
| 1209 else: | |
| 1210 return values[0] | |
| 1211 | |
| 1212 def findValueById(self, id_number): | |
| 1213 values = self.findAllValues() | |
| 1214 values = [x for x in values if str(x['id']) == str(id_number)] | |
| 1215 if len(values) == 0: | |
| 1216 raise Exception("Unknown ID") | |
| 1217 else: | |
| 1218 return values[0] | |
| 1219 | |
| 1220 def updateValue(self, id_number, new_value, metadata=None): | |
| 1221 data = { | |
| 1222 'id': id_number, | |
| 1223 'new_value': new_value | |
| 1224 } | |
| 1225 | |
| 1226 if metadata is not None: | |
| 1227 data['metadata'] = metadata | |
| 1228 | |
| 1229 return self.request('updateValue', data) | |
| 1230 | |
| 1231 def deleteValue(self, id_number): | |
| 1232 data = { | |
| 1233 'id': id_number | |
| 1234 } | |
| 1235 | |
| 1236 return self.request('deleteValue', data) | |
| 1237 | |
| 1238 | |
| 1239 class OrganismsClient(Client): | |
| 1240 CLIENT_BASE = '/organism/' | |
| 1241 | |
| 1242 def addOrganism(self, commonName, directory, blatdb=None, species=None, | |
| 1243 genus=None, public=False): | |
| 1244 data = { | |
| 1245 'commonName': commonName, | |
| 1246 'directory': directory, | |
| 1247 'publicMode': public, | |
| 1248 } | |
| 1249 | |
| 1250 if blatdb is not None: | |
| 1251 data['blatdb'] = blatdb | |
| 1252 if genus is not None: | |
| 1253 data['genus'] = genus | |
| 1254 if species is not None: | |
| 1255 data['species'] = species | |
| 1256 | |
| 1257 return self.request('addOrganism', data) | |
| 1258 | |
| 1259 def findAllOrganisms(self): | |
| 1260 return self.request('findAllOrganisms', {}) | |
| 1261 | |
| 1262 def findOrganismByCn(self, cn): | |
| 1263 orgs = self.findAllOrganisms() | |
| 1264 orgs = [x for x in orgs if x['commonName'] == cn] | |
| 1265 if len(orgs) == 0: | |
| 1266 raise Exception("Unknown common name") | |
| 1267 else: | |
| 1268 return orgs[0] | |
| 1269 | |
| 1270 def findOrganismById(self, id_number): | |
| 1271 orgs = self.findAllOrganisms() | |
| 1272 orgs = [x for x in orgs if str(x['id']) == str(id_number)] | |
| 1273 if len(orgs) == 0: | |
| 1274 raise Exception("Unknown ID") | |
| 1275 else: | |
| 1276 return orgs[0] | |
| 1277 | |
| 1278 def deleteOrganism(self, organismId): | |
| 1279 return self.request('deleteOrganism', {'id': organismId}) | |
| 1280 | |
| 1281 def deleteOrganismFeatures(self, organismId): | |
| 1282 return self.request('deleteOrganismFeatures', {'id': organismId}) | |
| 1283 | |
| 1284 def getSequencesForOrganism(self, commonName): | |
| 1285 return self.request('getSequencesForOrganism', {'organism': commonName}) | |
| 1286 | |
| 1287 def updateOrganismInfo(self, organismId, commonName, directory, blatdb=None, species=None, genus=None, public=False): | |
| 1288 data = { | |
| 1289 'id': organismId, | |
| 1290 'name': commonName, | |
| 1291 'directory': directory, | |
| 1292 'publicMode': public, | |
| 1293 } | |
| 1294 | |
| 1295 if blatdb is not None: | |
| 1296 data['blatdb'] = blatdb | |
| 1297 if genus is not None: | |
| 1298 data['genus'] = genus | |
| 1299 if species is not None: | |
| 1300 data['species'] = species | |
| 1301 | |
| 1302 return self.request('updateOrganismInfo', data) | |
| 1303 | |
| 1304 | |
| 1305 class UsersClient(Client): | |
| 1306 CLIENT_BASE = '/user/' | |
| 1307 | |
| 1308 # Real one | |
| 1309 # def getOrganismPermissionsForUser(self, user): | |
| 1310 # data = { | |
| 1311 # 'userId': user.userId, | |
| 1312 # } | |
| 1313 # return self.request('getOrganismPermissionsForUser', data) | |
| 1314 | |
| 1315 # Utter frigging hack | |
| 1316 def getOrganismPermissionsForUser(self, user): | |
| 1317 return self.loadUser(user).organismPermissions | |
| 1318 | |
| 1319 def updateOrganismPermission(self, user, organism, administrate=False, | |
| 1320 write=False, export=False, read=False): | |
| 1321 data = { | |
| 1322 'userId': user.userId, | |
| 1323 'organism': organism, | |
| 1324 'ADMINISTRATE': administrate, | |
| 1325 'WRITE': write, | |
| 1326 'EXPORT': export, | |
| 1327 'READ': read, | |
| 1328 } | |
| 1329 return self.request('updateOrganismPermission', data) | |
| 1330 | |
| 1331 def loadUser(self, user): | |
| 1332 return self.loadUserById(user.userId) | |
| 1333 | |
| 1334 def loadUserById(self, userId): | |
| 1335 res = self.request('loadUsers', {'userId': userId}) | |
| 1336 if isinstance(res, list): | |
| 1337 # We can only match one, right? | |
| 1338 return UserObj(**res[0]) | |
| 1339 else: | |
| 1340 return res | |
| 1341 | |
| 1342 def loadUsers(self, email=None): | |
| 1343 res = self.request('loadUsers', {}) | |
| 1344 data = [UserObj(**x) for x in res] | |
| 1345 if email is not None: | |
| 1346 data = [x for x in data if x.username == email] | |
| 1347 | |
| 1348 return data | |
| 1349 | |
| 1350 def addUserToGroup(self, group, user): | |
| 1351 data = {'group': group.name, 'userId': user.userId} | |
| 1352 return self.request('addUserToGroup', data) | |
| 1353 | |
| 1354 def removeUserFromGroup(self, group, user): | |
| 1355 data = {'group': group.name, 'userId': user.userId} | |
| 1356 return self.request('removeUserFromGroup', data) | |
| 1357 | |
| 1358 def createUser(self, email, firstName, lastName, newPassword, role="user", groups=None): | |
| 1359 data = { | |
| 1360 'firstName': firstName, | |
| 1361 'lastName': lastName, | |
| 1362 'email': email, | |
| 1363 'role': role, | |
| 1364 'groups': [] if groups is None else groups, | |
| 1365 # 'availableGroups': [], | |
| 1366 'newPassword': newPassword, | |
| 1367 # 'organismPermissions': [], | |
| 1368 } | |
| 1369 return self.request('createUser', data) | |
| 1370 | |
| 1371 def deleteUser(self, user): | |
| 1372 return self.request('deleteUser', {'userId': user.userId}) | |
| 1373 | |
| 1374 def updateUser(self, user, email, firstName, lastName, newPassword): | |
| 1375 data = { | |
| 1376 'userId': user.userId, | |
| 1377 'email': email, | |
| 1378 'firstName': firstName, | |
| 1379 'lastName': lastName, | |
| 1380 'newPassword': newPassword, | |
| 1381 } | |
| 1382 return self.request('updateUser', data) | |
| 1383 | |
| 1384 | |
| 1385 class RemoteRecord(Client): | |
| 1386 CLIENT_BASE = None | |
| 1387 | |
| 1388 def ParseRecord(self, cn): | |
| 1389 org = self._wa.organisms.findOrganismByCn(cn) | |
| 1390 self._wa.annotations.setSequence(org['commonName'], org['id']) | |
| 1391 | |
| 1392 data = io.StringIO(self._wa.io.write( | |
| 1393 exportType='GFF3', | |
| 1394 seqType='genomic', | |
| 1395 exportAllSequences=False, | |
| 1396 exportGff3Fasta=True, | |
| 1397 output="text", | |
| 1398 exportFormat="text", | |
| 1399 sequences=cn, | |
| 1400 )) | |
| 1401 data.seek(0) | |
| 1402 | |
| 1403 for record in GFF.parse(data): | |
| 1404 yield WebApolloSeqRecord(record, self._wa) | |
| 1405 | |
| 1406 | |
| 1407 class WebApolloSeqRecord(object): | |
| 1408 def __init__(self, sr, wa): | |
| 1409 self._sr = sr | |
| 1410 self._wa = wa | |
| 1411 | |
| 1412 def __dir__(self): | |
| 1413 return dir(self._sr) | |
| 1414 | |
| 1415 def __getattr__(self, key): | |
| 1416 if key in ('_sr', '_wa'): | |
| 1417 return self.__dict__[key] | |
| 1418 else: | |
| 1419 if key == 'features': | |
| 1420 return (WebApolloSeqFeature(x, self._wa) | |
| 1421 for x in self._sr.__dict__[key]) | |
| 1422 else: | |
| 1423 return self._sr.__dict__[key] | |
| 1424 | |
| 1425 def __setattr__(self, key, value): | |
| 1426 if key in ('_sd', '_wa'): | |
| 1427 self.__dict__[key] = value | |
| 1428 else: | |
| 1429 self._sr.__dict__[key] = value | |
| 1430 # Methods acting on the SeqRecord object | |
| 1431 | |
| 1432 | |
| 1433 class WebApolloSeqFeature(object): | |
| 1434 def __init__(self, sf, wa): | |
| 1435 self._sf = sf | |
| 1436 self._wa = wa | |
| 1437 | |
| 1438 def __dir__(self): | |
| 1439 return dir(self._sf) | |
| 1440 | |
| 1441 def __getattr__(self, key): | |
| 1442 if key in ('_sf', '_wa'): | |
| 1443 return self.__dict__[key] | |
| 1444 else: | |
| 1445 return self._sf.__dict__[key] | |
| 1446 | |
| 1447 def __setattr__(self, key, value): | |
| 1448 if key in ('_sf', '_wa'): | |
| 1449 self.__dict__[key] = value | |
| 1450 else: | |
| 1451 # Methods acting on the SeqFeature object | |
| 1452 if key == 'location': | |
| 1453 if value.strand != self._sf.location.strand: | |
| 1454 self.wa.annotations.flipStrand( | |
| 1455 self._sf.qualifiers['ID'][0] | |
| 1456 ) | |
| 1457 | |
| 1458 self.wa.annotations.setBoundaries( | |
| 1459 self._sf.qualifiers['ID'][0], | |
| 1460 value.start, | |
| 1461 value.end, | |
| 1462 ) | |
| 1463 | |
| 1464 self._sf.__dict__[key] = value | |
| 1465 else: | |
| 1466 self._sf.__dict__[key] = value | |
| 1467 | |
| 1468 | |
| 1469 def _tnType(feature): | |
| 1470 if feature.type in ('gene', 'mRNA', 'exon', 'CDS', 'terminator', 'tRNA'): | |
| 1471 return feature.type | |
| 1472 else: | |
| 1473 return 'exon' | |
| 1474 | |
| 1475 | |
| 1476 def _yieldFeatData(features): | |
| 1477 for f in features: | |
| 1478 current = { | |
| 1479 'location': { | |
| 1480 'strand': f.strand, | |
| 1481 'fmin': int(f.location.start), | |
| 1482 'fmax': int(f.location.end), | |
| 1483 }, | |
| 1484 'type': { | |
| 1485 'name': _tnType(f), | |
| 1486 'cv': { | |
| 1487 'name': 'sequence', | |
| 1488 } | |
| 1489 }, | |
| 1490 } | |
| 1491 if f.type in ('gene', 'mRNA'): | |
| 1492 current['name'] = f.qualifiers.get('Name', [f.id])[0] | |
| 1493 if hasattr(f, 'sub_features') and len(f.sub_features) > 0: | |
| 1494 current['children'] = [x for x in _yieldFeatData(f.sub_features)] | |
| 1495 | |
| 1496 yield current | |
| 1497 | |
| 1498 | |
| 1499 def featuresToFeatureSchema(features): | |
| 1500 compiled = [] | |
| 1501 for feature in features: | |
| 1502 # if feature.type != 'gene': | |
| 1503 # log.warn("Not able to handle %s features just yet...", feature.type) | |
| 1504 # continue | |
| 1505 | |
| 1506 for x in _yieldFeatData([feature]): | |
| 1507 compiled.append(x) | |
| 1508 return compiled | |
| 1509 | |
| 1510 | |
| 1511 def accessible_organisms(user, orgs): | |
| 1512 permissionMap = { | |
| 1513 x['organism']: x['permissions'] | |
| 1514 for x in user.organismPermissions | |
| 1515 if 'WRITE' in x['permissions'] or | |
| 1516 'READ' in x['permissions'] or | |
| 1517 'ADMINISTRATE' in x['permissions'] or | |
| 1518 user.role == 'ADMIN' | |
| 1519 } | |
| 1520 | |
| 1521 if 'error' in orgs: | |
| 1522 raise Exception("Error received from Apollo server: \"%s\"" % orgs['error']) | |
| 1523 | |
| 1524 return [ | |
| 1525 (org['commonName'], org['id'], False) | |
| 1526 for org in sorted(orgs, key=lambda x: x['commonName']) | |
| 1527 if org['commonName'] in permissionMap | |
| 1528 ] | |
| 1529 | |
| 1530 | |
| 1531 def galaxy_list_groups(trans, *args, **kwargs): | |
| 1532 email = trans.get_user().email | |
| 1533 wa = WebApolloInstance( | |
| 1534 os.environ['GALAXY_WEBAPOLLO_URL'], | |
| 1535 os.environ['GALAXY_WEBAPOLLO_USER'], | |
| 1536 os.environ['GALAXY_WEBAPOLLO_PASSWORD'] | |
| 1537 ) | |
| 1538 # Assert that the email exists in apollo | |
| 1539 try: | |
| 1540 gx_user = wa.requireUser(email) | |
| 1541 except UnknownUserException: | |
| 1542 return [] | |
| 1543 | |
| 1544 # Key for cached data | |
| 1545 cacheKey = 'groups-' + email | |
| 1546 # We don't want to trust "if key in cache" because between asking and fetch | |
| 1547 # it might through key error. | |
| 1548 if cacheKey not in cache: | |
| 1549 # However if it ISN'T there, we know we're safe to fetch + put in | |
| 1550 # there. | |
| 1551 data = _galaxy_list_groups(wa, gx_user, *args, **kwargs) | |
| 1552 cache[cacheKey] = data | |
| 1553 return data | |
| 1554 try: | |
| 1555 # The cache key may or may not be in the cache at this point, it | |
| 1556 # /likely/ is. However we take no chances that it wasn't evicted between | |
| 1557 # when we checked above and now, so we reference the object from the | |
| 1558 # cache in preparation to return. | |
| 1559 data = cache[cacheKey] | |
| 1560 return data | |
| 1561 except KeyError: | |
| 1562 # If access fails due to eviction, we will fail over and can ensure that | |
| 1563 # data is inserted. | |
| 1564 data = _galaxy_list_groups(wa, gx_user, *args, **kwargs) | |
| 1565 cache[cacheKey] = data | |
| 1566 return data | |
| 1567 | |
| 1568 | |
| 1569 def _galaxy_list_groups(wa, gx_user, *args, **kwargs): | |
| 1570 # Fetch the groups. | |
| 1571 group_data = [] | |
| 1572 for group in wa.groups.loadGroups(): | |
| 1573 # Reformat | |
| 1574 group_data.append((group.name, group.groupId, False)) | |
| 1575 return group_data | |
| 1576 | |
| 1577 | |
| 1578 def galaxy_list_orgs(trans, *args, **kwargs): | |
| 1579 email = trans.get_user().email | |
| 1580 wa = WebApolloInstance( | |
| 1581 os.environ['GALAXY_WEBAPOLLO_URL'], | |
| 1582 os.environ['GALAXY_WEBAPOLLO_USER'], | |
| 1583 os.environ['GALAXY_WEBAPOLLO_PASSWORD'] | |
| 1584 ) | |
| 1585 try: | |
| 1586 gx_user = wa.requireUser(email) | |
| 1587 except UnknownUserException: | |
| 1588 return [] | |
| 1589 | |
| 1590 # Key for cached data | |
| 1591 cacheKey = 'orgs-' + email | |
| 1592 if cacheKey not in cache: | |
| 1593 data = _galaxy_list_orgs(wa, gx_user, *args, **kwargs) | |
| 1594 cache[cacheKey] = data | |
| 1595 return data | |
| 1596 try: | |
| 1597 data = cache[cacheKey] | |
| 1598 return data | |
| 1599 except KeyError: | |
| 1600 data = _galaxy_list_orgs(wa, gx_user, *args, **kwargs) | |
| 1601 cache[cacheKey] = data | |
| 1602 return data | |
| 1603 | |
| 1604 | |
| 1605 def _galaxy_list_orgs(wa, gx_user, *args, **kwargs): | |
| 1606 # Fetch all organisms | |
| 1607 all_orgs = wa.organisms.findAllOrganisms() | |
| 1608 # Figure out which are accessible to the user | |
| 1609 orgs = accessible_organisms(gx_user, all_orgs) | |
| 1610 # Return org list | |
| 1611 return orgs | |
| 1612 | |
| 1613 | |
| 1614 def galaxy_list_users(trans, *args, **kwargs): | |
| 1615 email = trans.get_user().email | |
| 1616 wa = WebApolloInstance( | |
| 1617 os.environ['GALAXY_WEBAPOLLO_URL'], | |
| 1618 os.environ['GALAXY_WEBAPOLLO_USER'], | |
| 1619 os.environ['GALAXY_WEBAPOLLO_PASSWORD'] | |
| 1620 ) | |
| 1621 # Assert that the email exists in apollo | |
| 1622 try: | |
| 1623 gx_user = wa.requireUser(email) | |
| 1624 except UnknownUserException: | |
| 1625 return [] | |
| 1626 | |
| 1627 # Key for cached data | |
| 1628 cacheKey = 'users-' + email | |
| 1629 # We don't want to trust "if key in cache" because between asking and fetch | |
| 1630 # it might through key error. | |
| 1631 if cacheKey not in cache: | |
| 1632 # However if it ISN'T there, we know we're safe to fetch + put in | |
| 1633 # there. | |
| 1634 data = _galaxy_list_users(wa, gx_user, *args, **kwargs) | |
| 1635 cache[cacheKey] = data | |
| 1636 return data | |
| 1637 try: | |
| 1638 # The cache key may or may not be in the cache at this point, it | |
| 1639 # /likely/ is. However we take no chances that it wasn't evicted between | |
| 1640 # when we checked above and now, so we reference the object from the | |
| 1641 # cache in preparation to return. | |
| 1642 data = cache[cacheKey] | |
| 1643 return data | |
| 1644 except KeyError: | |
| 1645 # If access fails due to eviction, we will fail over and can ensure that | |
| 1646 # data is inserted. | |
| 1647 data = _galaxy_list_users(wa, gx_user, *args, **kwargs) | |
| 1648 cache[cacheKey] = data | |
| 1649 return data | |
| 1650 | |
| 1651 | |
| 1652 def _galaxy_list_users(wa, gx_user, *args, **kwargs): | |
| 1653 # Fetch the users. | |
| 1654 user_data = [] | |
| 1655 for user in wa.users.loadUsers(): | |
| 1656 # Reformat | |
| 1657 user_data.append((user.username, user.username, False)) | |
| 1658 return user_data | |
| 1659 | |
| 1660 | |
| 1661 # This is all for implementing the command line interface for testing. | |
| 1662 class obj(object): | |
| 1663 pass | |
| 1664 | |
| 1665 | |
| 1666 class fakeTrans(object): | |
| 1667 | |
| 1668 def __init__(self, username): | |
| 1669 self.un = username | |
| 1670 | |
| 1671 def get_user(self): | |
| 1672 o = obj() | |
| 1673 o.email = self.un | |
| 1674 return o | |
| 1675 | |
| 1676 | |
| 1677 def retry(closure, sleep=1, limit=5): | |
| 1678 """ | |
| 1679 Apollo has the bad habit of returning 500 errors if you call APIs | |
| 1680 too quickly, largely because of the unholy things that happen in | |
| 1681 grails. | |
| 1682 | |
| 1683 To deal with the fact that we cannot send an addComments call too | |
| 1684 quickly after a createFeature call, we have this function that will | |
| 1685 keep calling a closure until it works. | |
| 1686 """ | |
| 1687 count = 0 | |
| 1688 while True: | |
| 1689 count += 1 | |
| 1690 | |
| 1691 if count >= limit: | |
| 1692 return False | |
| 1693 try: | |
| 1694 # Try calling it | |
| 1695 closure() | |
| 1696 # If successful, exit | |
| 1697 return True | |
| 1698 except Exception as e: | |
| 1699 log.info(str(e)[0:100]) | |
| 1700 time.sleep(sleep) | |
| 1701 | |
| 1702 | |
| 1703 if __name__ == '__main__': | |
| 1704 parser = argparse.ArgumentParser(description='Test access to apollo server') | |
| 1705 parser.add_argument('email', help='Email of user to test') | |
| 1706 parser.add_argument('--action', choices=['org', 'group', 'users'], default='org', help='Data set to test, fetch a list of groups or users known to the requesting user.') | |
| 1707 args = parser.parse_args() | |
| 1708 | |
| 1709 trans = fakeTrans(args.email) | |
| 1710 if args.action == 'org': | |
| 1711 for f in galaxy_list_orgs(trans): | |
| 1712 print(f) | |
| 1713 elif args.action == 'group': | |
| 1714 for f in galaxy_list_groups(trans): | |
| 1715 print(f) | |
| 1716 else: | |
| 1717 for f in galaxy_list_users(trans): | |
| 1718 print(f) |
