Mercurial > repos > shellac > sam_consensus_v3
diff env/lib/python3.9/site-packages/schema_salad/metaschema.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 (2021-03-22) |
parents | |
children |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/env/lib/python3.9/site-packages/schema_salad/metaschema.py Mon Mar 22 18:12:50 2021 +0000 @@ -0,0 +1,2888 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +from urllib.parse import quote, urlsplit, urlunsplit +from urllib.request import pathname2url + +from ruamel import yaml +from ruamel.yaml.comments import CommentedMap + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher +from schema_salad.sourceline import SourceLine, add_lc_filename + +_vocab = {} # type: Dict[str, str] +_rvocab = {} # type: Dict[str, str] + + +class Savable: + @classmethod + def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Savable + pass + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, str] + pass + + +class LoadingOptions: + def __init__( + self, + fetcher=None, # type: Optional[Fetcher] + namespaces=None, # type: Optional[Dict[str, str]] + schemas=None, # type: Optional[Dict[str, str]] + fileuri=None, # type: Optional[str] + copyfrom=None, # type: Optional[LoadingOptions] + original_doc=None, # type: Optional[Any] + ): # type: (...) -> None + self.idx = {} # type: Dict[str, Dict[str, Any]] + self.fileuri = fileuri # type: Optional[str] + self.namespaces = namespaces + self.schemas = schemas + self.original_doc = original_doc + if copyfrom is not None: + self.idx = copyfrom.idx + if fetcher is None: + fetcher = copyfrom.fetcher + if fileuri is None: + self.fileuri = copyfrom.fileuri + if namespaces is None: + self.namespaces = copyfrom.namespaces + if schemas is None: + self.schemas = copyfrom.schemas + + if fetcher is None: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + else: + self.fetcher = fetcher + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + return _document_load_by_url( + fieldtype, + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]), + loadingOptions, + ) + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + val = loadingOptions.fetcher.fetch_text( + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + ) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Union[Dict[str, str], List[Union[Dict[str, str], List[Any], None]], None] + + +def save( + val, # type: Optional[Union[Savable, MutableSequence[Savable]]] + top=True, # type: bool + base_url="", # type: str + relative_uris=True, # type: bool +): # type: (...) -> save_type + + if isinstance(val, Savable): + return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + if isinstance(val, MutableSequence): + return [ + save(v, top=False, base_url=base_url, relative_uris=relative_uris) + for v in val + ] + if isinstance(val, MutableMapping): + newdict = {} + for key in val: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris + ) + return newdict + return val + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in ["http", "https", "file"]) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term '{url}' not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException("Expected a list") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols): + # type: (Sequence[str]) -> None + self.symbols = symbols + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException(f"Expected one of {self.symbols}") + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + if "pattern" in d: + new_dict["pattern"] = d.pop("pattern") + else: + raise ValidationException( + "Missing pattern in secondaryFiles specification entry: {}".format( + d + ) + ) + new_dict["required"] = ( + d.pop("required") if "required" in d else None + ) + + if len(d): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + d + ) + ) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Savable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException("Expected a dict") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException("Expected a str") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates): + # type: (Sequence[_Loader]) -> None + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append( + ValidationException(f"tried {t.__class__.__name__} but", None, [e]) + ) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url( + group1, baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load(loader, doc, baseuri, loadingOptions): + # type: (_Loader, Any, str, LoadingOptions) -> Any + if isinstance(doc, str): + return _document_load_by_url( + loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions + ) + + if isinstance(doc, MutableMapping): + if "$namespaces" in doc or "$schemas" in doc: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + ) + doc = {k: v for k, v in doc.items() if k not in ["$namespaces", "$schemas"]} + + if "$base" in doc: + baseuri = doc["$base"] + + if "$graph" in doc: + return loader.load(doc["$graph"], baseuri, loadingOptions) + else: + return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri) + + if isinstance(doc, MutableSequence): + return loader.load(doc, baseuri, loadingOptions) + + raise ValidationException("Oops, we shouldn't be here!") + + +def _document_load_by_url(loader, url, loadingOptions): + # type: (_Loader, str, LoadingOptions) -> Any + if url in loadingOptions.idx: + return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) + + text = loadingOptions.fetcher.fetch_text(url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(url) + result = yaml.main.round_trip_load(textIO, preserve_quotes=True) + add_lc_filename(result, url) + + loadingOptions.idx[url] = result + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) + + return _document_load(loader, result, url, loadingOptions) + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + else: + return f"file://{urlpath}{frag}" + + +def prefix_url(url, namespaces): # type: (str, Dict[str, str]) -> str + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris): + # type: (str, str, bool, Optional[int], bool) -> Union[str, List[str]] + if not relative_uris or uri == base_url: + return uri + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, str): + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url) + + +class Documented(Savable): + pass + + +class RecordField(Documented): + """ +A field of a record. + """ + def __init__( + self, + name, # type: Any + type, # type: Any + doc=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> RecordField + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + return cls(doc=doc, name=name, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['doc', 'name', 'type']) + + +class RecordSchema(Savable): + def __init__( + self, + type, # type: Any + fields=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> RecordSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'fields' in _doc: + try: + fields = load_field(_doc.get( + 'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, 'fields', str), + [e] + ) + ) + else: + fields = None + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + return cls(fields=fields, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.fields is not None: + r['fields'] = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['fields', 'type']) + + +class EnumSchema(Savable): + """ +Define an enumerated type. + + """ + def __init__( + self, + symbols, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> EnumSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + symbols = load_field(_doc.get( + 'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, 'symbols', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `symbols`, `type`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + return cls(symbols=symbols, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.symbols is not None: + u = save_relative_uri( + self.symbols, + base_url, + True, + None, + relative_uris) + if u: + r['symbols'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['symbols', 'type']) + + +class ArraySchema(Savable): + def __init__( + self, + items, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> ArraySchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field(_doc.get( + 'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, 'items', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + return cls(items=items, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.items is not None: + u = save_relative_uri( + self.items, + base_url, + False, + 2, + relative_uris) + if u: + r['items'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['items', 'type']) + + +class JsonldPredicate(Savable): + """ +Attached to a record field to define how the parent record field is handled for +URI resolution and JSON-LD context generation. + + """ + def __init__( + self, + _id=None, # type: Any + _type=None, # type: Any + _container=None, # type: Any + identity=None, # type: Any + noLinkCheck=None, # type: Any + mapSubject=None, # type: Any + mapPredicate=None, # type: Any + refScope=None, # type: Any + typeDSL=None, # type: Any + secondaryFilesDSL=None, # type: Any + subscope=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self._id = _id + self._type = _type + self._container = _container + self.identity = identity + self.noLinkCheck = noLinkCheck + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + self.refScope = refScope + self.typeDSL = typeDSL + self.secondaryFilesDSL = secondaryFilesDSL + self.subscope = subscope + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> JsonldPredicate + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if '_id' in _doc: + try: + _id = load_field(_doc.get( + '_id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `_id` field is not valid because:", + SourceLine(_doc, '_id', str), + [e] + ) + ) + else: + _id = None + if '_type' in _doc: + try: + _type = load_field(_doc.get( + '_type'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `_type` field is not valid because:", + SourceLine(_doc, '_type', str), + [e] + ) + ) + else: + _type = None + if '_container' in _doc: + try: + _container = load_field(_doc.get( + '_container'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `_container` field is not valid because:", + SourceLine(_doc, '_container', str), + [e] + ) + ) + else: + _container = None + if 'identity' in _doc: + try: + identity = load_field(_doc.get( + 'identity'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `identity` field is not valid because:", + SourceLine(_doc, 'identity', str), + [e] + ) + ) + else: + identity = None + if 'noLinkCheck' in _doc: + try: + noLinkCheck = load_field(_doc.get( + 'noLinkCheck'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `noLinkCheck` field is not valid because:", + SourceLine(_doc, 'noLinkCheck', str), + [e] + ) + ) + else: + noLinkCheck = None + if 'mapSubject' in _doc: + try: + mapSubject = load_field(_doc.get( + 'mapSubject'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `mapSubject` field is not valid because:", + SourceLine(_doc, 'mapSubject', str), + [e] + ) + ) + else: + mapSubject = None + if 'mapPredicate' in _doc: + try: + mapPredicate = load_field(_doc.get( + 'mapPredicate'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `mapPredicate` field is not valid because:", + SourceLine(_doc, 'mapPredicate', str), + [e] + ) + ) + else: + mapPredicate = None + if 'refScope' in _doc: + try: + refScope = load_field(_doc.get( + 'refScope'), union_of_None_type_or_inttype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `refScope` field is not valid because:", + SourceLine(_doc, 'refScope', str), + [e] + ) + ) + else: + refScope = None + if 'typeDSL' in _doc: + try: + typeDSL = load_field(_doc.get( + 'typeDSL'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `typeDSL` field is not valid because:", + SourceLine(_doc, 'typeDSL', str), + [e] + ) + ) + else: + typeDSL = None + if 'secondaryFilesDSL' in _doc: + try: + secondaryFilesDSL = load_field(_doc.get( + 'secondaryFilesDSL'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `secondaryFilesDSL` field is not valid because:", + SourceLine(_doc, 'secondaryFilesDSL', str), + [e] + ) + ) + else: + secondaryFilesDSL = None + if 'subscope' in _doc: + try: + subscope = load_field(_doc.get( + 'subscope'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `subscope` field is not valid because:", + SourceLine(_doc, 'subscope', str), + [e] + ) + ) + else: + subscope = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'JsonldPredicate'", None, _errors__) + return cls(_id=_id, _type=_type, _container=_container, identity=identity, noLinkCheck=noLinkCheck, mapSubject=mapSubject, mapPredicate=mapPredicate, refScope=refScope, typeDSL=typeDSL, secondaryFilesDSL=secondaryFilesDSL, subscope=subscope, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self._id is not None: + u = save_relative_uri( + self._id, + base_url, + True, + None, + relative_uris) + if u: + r['_id'] = u + + if self._type is not None: + r['_type'] = save( + self._type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self._container is not None: + r['_container'] = save( + self._container, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.identity is not None: + r['identity'] = save( + self.identity, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.noLinkCheck is not None: + r['noLinkCheck'] = save( + self.noLinkCheck, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.mapSubject is not None: + r['mapSubject'] = save( + self.mapSubject, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.mapPredicate is not None: + r['mapPredicate'] = save( + self.mapPredicate, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.refScope is not None: + r['refScope'] = save( + self.refScope, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.typeDSL is not None: + r['typeDSL'] = save( + self.typeDSL, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.secondaryFilesDSL is not None: + r['secondaryFilesDSL'] = save( + self.secondaryFilesDSL, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.subscope is not None: + r['subscope'] = save( + self.subscope, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['_id', '_type', '_container', 'identity', 'noLinkCheck', 'mapSubject', 'mapPredicate', 'refScope', 'typeDSL', 'secondaryFilesDSL', 'subscope']) + + +class SpecializeDef(Savable): + def __init__( + self, + specializeFrom, # type: Any + specializeTo, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.specializeFrom = specializeFrom + self.specializeTo = specializeTo + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> SpecializeDef + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + specializeFrom = load_field(_doc.get( + 'specializeFrom'), uri_strtype_False_False_1, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specializeFrom` field is not valid because:", + SourceLine(_doc, 'specializeFrom', str), + [e] + ) + ) + try: + specializeTo = load_field(_doc.get( + 'specializeTo'), uri_strtype_False_False_1, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specializeTo` field is not valid because:", + SourceLine(_doc, 'specializeTo', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SpecializeDef'", None, _errors__) + return cls(specializeFrom=specializeFrom, specializeTo=specializeTo, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.specializeFrom is not None: + u = save_relative_uri( + self.specializeFrom, + base_url, + False, + 1, + relative_uris) + if u: + r['specializeFrom'] = u + + if self.specializeTo is not None: + u = save_relative_uri( + self.specializeTo, + base_url, + False, + 1, + relative_uris) + if u: + r['specializeTo'] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['specializeFrom', 'specializeTo']) + + +class NamedType(Savable): + pass + + +class DocType(Documented): + pass + + +class SchemaDefinedType(DocType): + """ +Abstract base for schema-defined types. + + """ + pass + + +class SaladRecordField(RecordField): + """ +A field of a record. + """ + def __init__( + self, + name, # type: Any + type, # type: Any + doc=None, # type: Any + jsonldPredicate=None, # type: Any + default=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.jsonldPredicate = jsonldPredicate + self.default = default + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> SaladRecordField + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + if 'jsonldPredicate' in _doc: + try: + jsonldPredicate = load_field(_doc.get( + 'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `jsonldPredicate` field is not valid because:", + SourceLine(_doc, 'jsonldPredicate', str), + [e] + ) + ) + else: + jsonldPredicate = None + if 'default' in _doc: + try: + default = load_field(_doc.get( + 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, 'default', str), + [e] + ) + ) + else: + default = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SaladRecordField'", None, _errors__) + return cls(doc=doc, name=name, type=type, jsonldPredicate=jsonldPredicate, default=default, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.jsonldPredicate is not None: + r['jsonldPredicate'] = save( + self.jsonldPredicate, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.default is not None: + r['default'] = save( + self.default, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['doc', 'name', 'type', 'jsonldPredicate', 'default']) + + +class SaladRecordSchema(NamedType, RecordSchema, SchemaDefinedType): + def __init__( + self, + name, # type: Any + type, # type: Any + inVocab=None, # type: Any + fields=None, # type: Any + doc=None, # type: Any + docParent=None, # type: Any + docChild=None, # type: Any + docAfter=None, # type: Any + jsonldPredicate=None, # type: Any + documentRoot=None, # type: Any + abstract=None, # type: Any + extends=None, # type: Any + specialize=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.inVocab = inVocab + self.fields = fields + self.type = type + self.doc = doc + self.docParent = docParent + self.docChild = docChild + self.docAfter = docAfter + self.jsonldPredicate = jsonldPredicate + self.documentRoot = documentRoot + self.abstract = abstract + self.extends = extends + self.specialize = specialize + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> SaladRecordSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'inVocab' in _doc: + try: + inVocab = load_field(_doc.get( + 'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inVocab` field is not valid because:", + SourceLine(_doc, 'inVocab', str), + [e] + ) + ) + else: + inVocab = None + if 'fields' in _doc: + try: + fields = load_field(_doc.get( + 'fields'), idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, 'fields', str), + [e] + ) + ) + else: + fields = None + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'docParent' in _doc: + try: + docParent = load_field(_doc.get( + 'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docParent` field is not valid because:", + SourceLine(_doc, 'docParent', str), + [e] + ) + ) + else: + docParent = None + if 'docChild' in _doc: + try: + docChild = load_field(_doc.get( + 'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docChild` field is not valid because:", + SourceLine(_doc, 'docChild', str), + [e] + ) + ) + else: + docChild = None + if 'docAfter' in _doc: + try: + docAfter = load_field(_doc.get( + 'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docAfter` field is not valid because:", + SourceLine(_doc, 'docAfter', str), + [e] + ) + ) + else: + docAfter = None + if 'jsonldPredicate' in _doc: + try: + jsonldPredicate = load_field(_doc.get( + 'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `jsonldPredicate` field is not valid because:", + SourceLine(_doc, 'jsonldPredicate', str), + [e] + ) + ) + else: + jsonldPredicate = None + if 'documentRoot' in _doc: + try: + documentRoot = load_field(_doc.get( + 'documentRoot'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `documentRoot` field is not valid because:", + SourceLine(_doc, 'documentRoot', str), + [e] + ) + ) + else: + documentRoot = None + if 'abstract' in _doc: + try: + abstract = load_field(_doc.get( + 'abstract'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `abstract` field is not valid because:", + SourceLine(_doc, 'abstract', str), + [e] + ) + ) + else: + abstract = None + if 'extends' in _doc: + try: + extends = load_field(_doc.get( + 'extends'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `extends` field is not valid because:", + SourceLine(_doc, 'extends', str), + [e] + ) + ) + else: + extends = None + if 'specialize' in _doc: + try: + specialize = load_field(_doc.get( + 'specialize'), idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `specialize` field is not valid because:", + SourceLine(_doc, 'specialize', str), + [e] + ) + ) + else: + specialize = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SaladRecordSchema'", None, _errors__) + return cls(name=name, inVocab=inVocab, fields=fields, type=type, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, abstract=abstract, extends=extends, specialize=specialize, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.inVocab is not None: + r['inVocab'] = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.fields is not None: + r['fields'] = save( + self.fields, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.docParent is not None: + u = save_relative_uri( + self.docParent, + self.name, + False, + None, + relative_uris) + if u: + r['docParent'] = u + + if self.docChild is not None: + u = save_relative_uri( + self.docChild, + self.name, + False, + None, + relative_uris) + if u: + r['docChild'] = u + + if self.docAfter is not None: + u = save_relative_uri( + self.docAfter, + self.name, + False, + None, + relative_uris) + if u: + r['docAfter'] = u + + if self.jsonldPredicate is not None: + r['jsonldPredicate'] = save( + self.jsonldPredicate, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.documentRoot is not None: + r['documentRoot'] = save( + self.documentRoot, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.abstract is not None: + r['abstract'] = save( + self.abstract, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.extends is not None: + u = save_relative_uri( + self.extends, + self.name, + False, + 1, + relative_uris) + if u: + r['extends'] = u + + if self.specialize is not None: + r['specialize'] = save( + self.specialize, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['name', 'inVocab', 'fields', 'type', 'doc', 'docParent', 'docChild', 'docAfter', 'jsonldPredicate', 'documentRoot', 'abstract', 'extends', 'specialize']) + + +class SaladEnumSchema(NamedType, EnumSchema, SchemaDefinedType): + """ +Define an enumerated type. + + """ + def __init__( + self, + name, # type: Any + symbols, # type: Any + type, # type: Any + inVocab=None, # type: Any + doc=None, # type: Any + docParent=None, # type: Any + docChild=None, # type: Any + docAfter=None, # type: Any + jsonldPredicate=None, # type: Any + documentRoot=None, # type: Any + extends=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.inVocab = inVocab + self.symbols = symbols + self.type = type + self.doc = doc + self.docParent = docParent + self.docChild = docChild + self.docAfter = docAfter + self.jsonldPredicate = jsonldPredicate + self.documentRoot = documentRoot + self.extends = extends + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> SaladEnumSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'inVocab' in _doc: + try: + inVocab = load_field(_doc.get( + 'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inVocab` field is not valid because:", + SourceLine(_doc, 'inVocab', str), + [e] + ) + ) + else: + inVocab = None + try: + symbols = load_field(_doc.get( + 'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, 'symbols', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'docParent' in _doc: + try: + docParent = load_field(_doc.get( + 'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docParent` field is not valid because:", + SourceLine(_doc, 'docParent', str), + [e] + ) + ) + else: + docParent = None + if 'docChild' in _doc: + try: + docChild = load_field(_doc.get( + 'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docChild` field is not valid because:", + SourceLine(_doc, 'docChild', str), + [e] + ) + ) + else: + docChild = None + if 'docAfter' in _doc: + try: + docAfter = load_field(_doc.get( + 'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docAfter` field is not valid because:", + SourceLine(_doc, 'docAfter', str), + [e] + ) + ) + else: + docAfter = None + if 'jsonldPredicate' in _doc: + try: + jsonldPredicate = load_field(_doc.get( + 'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `jsonldPredicate` field is not valid because:", + SourceLine(_doc, 'jsonldPredicate', str), + [e] + ) + ) + else: + jsonldPredicate = None + if 'documentRoot' in _doc: + try: + documentRoot = load_field(_doc.get( + 'documentRoot'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `documentRoot` field is not valid because:", + SourceLine(_doc, 'documentRoot', str), + [e] + ) + ) + else: + documentRoot = None + if 'extends' in _doc: + try: + extends = load_field(_doc.get( + 'extends'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `extends` field is not valid because:", + SourceLine(_doc, 'extends', str), + [e] + ) + ) + else: + extends = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SaladEnumSchema'", None, _errors__) + return cls(name=name, inVocab=inVocab, symbols=symbols, type=type, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, extends=extends, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.inVocab is not None: + r['inVocab'] = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.symbols is not None: + u = save_relative_uri( + self.symbols, + self.name, + True, + None, + relative_uris) + if u: + r['symbols'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.docParent is not None: + u = save_relative_uri( + self.docParent, + self.name, + False, + None, + relative_uris) + if u: + r['docParent'] = u + + if self.docChild is not None: + u = save_relative_uri( + self.docChild, + self.name, + False, + None, + relative_uris) + if u: + r['docChild'] = u + + if self.docAfter is not None: + u = save_relative_uri( + self.docAfter, + self.name, + False, + None, + relative_uris) + if u: + r['docAfter'] = u + + if self.jsonldPredicate is not None: + r['jsonldPredicate'] = save( + self.jsonldPredicate, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.documentRoot is not None: + r['documentRoot'] = save( + self.documentRoot, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.extends is not None: + u = save_relative_uri( + self.extends, + self.name, + False, + 1, + relative_uris) + if u: + r['extends'] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['name', 'inVocab', 'symbols', 'type', 'doc', 'docParent', 'docChild', 'docAfter', 'jsonldPredicate', 'documentRoot', 'extends']) + + +class Documentation(NamedType, DocType): + """ +A documentation section. This type exists to facilitate self-documenting +schemas but has no role in formal validation. + + """ + def __init__( + self, + name, # type: Any + type, # type: Any + inVocab=None, # type: Any + doc=None, # type: Any + docParent=None, # type: Any + docChild=None, # type: Any + docAfter=None, # type: Any + extension_fields=None, # type: Optional[Dict[str, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.inVocab = inVocab + self.doc = doc + self.docParent = docParent + self.docChild = docChild + self.docAfter = docAfter + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Documentation + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'inVocab' in _doc: + try: + inVocab = load_field(_doc.get( + 'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `inVocab` field is not valid because:", + SourceLine(_doc, 'inVocab', str), + [e] + ) + ) + else: + inVocab = None + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'docParent' in _doc: + try: + docParent = load_field(_doc.get( + 'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docParent` field is not valid because:", + SourceLine(_doc, 'docParent', str), + [e] + ) + ) + else: + docParent = None + if 'docChild' in _doc: + try: + docChild = load_field(_doc.get( + 'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docChild` field is not valid because:", + SourceLine(_doc, 'docChild', str), + [e] + ) + ) + else: + docChild = None + if 'docAfter' in _doc: + try: + docAfter = load_field(_doc.get( + 'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `docAfter` field is not valid because:", + SourceLine(_doc, 'docAfter', str), + [e] + ) + ) + else: + docAfter = None + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader_2, baseuri, loadingOptions) + except ValidationException as e: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + "", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`".format(k), + SourceLine(_doc, k, str) + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Documentation'", None, _errors__) + return cls(name=name, inVocab=inVocab, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, type=type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, str, bool) -> Dict[str, Any] + r = yaml.comments.CommentedMap() # type: Dict[str, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.inVocab is not None: + r['inVocab'] = save( + self.inVocab, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.docParent is not None: + u = save_relative_uri( + self.docParent, + self.name, + False, + None, + relative_uris) + if u: + r['docParent'] = u + + if self.docChild is not None: + u = save_relative_uri( + self.docChild, + self.name, + False, + None, + relative_uris) + if u: + r['docChild'] = u + + if self.docAfter is not None: + u = save_relative_uri( + self.docAfter, + self.name, + False, + None, + relative_uris) + if u: + r['docAfter'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(['name', 'inVocab', 'doc', 'docParent', 'docChild', 'docAfter', 'type']) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "DocType": "https://w3id.org/cwl/salad#DocType", + "Documentation": "https://w3id.org/cwl/salad#Documentation", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "JsonldPredicate": "https://w3id.org/cwl/salad#JsonldPredicate", + "NamedType": "https://w3id.org/cwl/salad#NamedType", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "SaladEnumSchema": "https://w3id.org/cwl/salad#SaladEnumSchema", + "SaladRecordField": "https://w3id.org/cwl/salad#SaladRecordField", + "SaladRecordSchema": "https://w3id.org/cwl/salad#SaladRecordSchema", + "SchemaDefinedType": "https://w3id.org/cwl/salad#SchemaDefinedType", + "SpecializeDef": "https://w3id.org/cwl/salad#SpecializeDef", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "documentation": "https://w3id.org/cwl/salad#documentation", + "double": "http://www.w3.org/2001/XMLSchema#double", + "enum": "https://w3id.org/cwl/salad#enum", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "string": "http://www.w3.org/2001/XMLSchema#string", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/salad#DocType": "DocType", + "https://w3id.org/cwl/salad#Documentation": "Documentation", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/salad#JsonldPredicate": "JsonldPredicate", + "https://w3id.org/cwl/salad#NamedType": "NamedType", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/salad#SaladEnumSchema": "SaladEnumSchema", + "https://w3id.org/cwl/salad#SaladRecordField": "SaladRecordField", + "https://w3id.org/cwl/salad#SaladRecordSchema": "SaladRecordSchema", + "https://w3id.org/cwl/salad#SchemaDefinedType": "SchemaDefinedType", + "https://w3id.org/cwl/salad#SpecializeDef": "SpecializeDef", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/salad#documentation": "documentation", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/salad#enum": "enum", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "http://www.w3.org/2001/XMLSchema#string": "string", +} + +strtype = _PrimitiveLoader((str, str)) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +DocumentedLoader = _RecordLoader(Documented) +PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",)) +AnyLoader = _EnumLoader(("Any",)) +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +JsonldPredicateLoader = _RecordLoader(JsonldPredicate) +SpecializeDefLoader = _RecordLoader(SpecializeDef) +NamedTypeLoader = _RecordLoader(NamedType) +DocTypeLoader = _RecordLoader(DocType) +SchemaDefinedTypeLoader = _RecordLoader(SchemaDefinedType) +SaladRecordFieldLoader = _RecordLoader(SaladRecordField) +SaladRecordSchemaLoader = _RecordLoader(SaladRecordSchema) +SaladEnumSchemaLoader = _RecordLoader(SaladEnumSchema) +DocumentationLoader = _RecordLoader(Documentation) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,)) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,)) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,)) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,)) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type') +enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",)) +typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",)) +typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2) +enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",)) +typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2) +union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,)) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None) +union_of_None_type_or_booltype = _UnionLoader((None_type, booltype,)) +union_of_None_type_or_inttype = _UnionLoader((None_type, inttype,)) +uri_strtype_False_False_1 = _URILoader(strtype, False, False, 1) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader(union_of_None_type_or_strtype, False, False, None) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, None) +union_of_None_type_or_strtype_or_JsonldPredicateLoader = _UnionLoader((None_type, strtype, JsonldPredicateLoader,)) +union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,)) +array_of_SaladRecordFieldLoader = _ArrayLoader(SaladRecordFieldLoader) +union_of_None_type_or_array_of_SaladRecordFieldLoader = _UnionLoader((None_type, array_of_SaladRecordFieldLoader,)) +idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_SaladRecordFieldLoader, 'name', 'type') +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1) +array_of_SpecializeDefLoader = _ArrayLoader(SpecializeDefLoader) +union_of_None_type_or_array_of_SpecializeDefLoader = _UnionLoader((None_type, array_of_SpecializeDefLoader,)) +idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader = _IdMapLoader(union_of_None_type_or_array_of_SpecializeDefLoader, 'specializeFrom', 'specializeTo') +enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader = _EnumLoader(("documentation",)) +typedsl_enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader_2 = _TypeDSLLoader(enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader, 2) +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader((SaladRecordSchemaLoader, SaladEnumSchemaLoader, DocumentationLoader,)) +array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader) +union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader((SaladRecordSchemaLoader, SaladEnumSchemaLoader, DocumentationLoader, array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader,)) + + +def load_document(doc, baseuri=None, loadingOptions=None): + # type: (Any, Optional[str], Optional[LoadingOptions]) -> Any + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + return _document_load(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader, doc, baseuri, loadingOptions) + + +def load_document_by_string(string, uri, loadingOptions=None): + # type: (Any, str, Optional[LoadingOptions]) -> Any + result = yaml.main.round_trip_load(string, preserve_quotes=True) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + loadingOptions.idx[uri] = result + + return _document_load(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader, result, uri, loadingOptions) + + +def load_document_by_yaml(yaml, uri, loadingOptions=None): + # type: (Any, str, Optional[LoadingOptions]) -> Any + '''Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.round_trip_load with preserve_quotes=True + ''' + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + loadingOptions.idx[uri] = yaml + + return _document_load(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader, yaml, uri, loadingOptions)