diff env/lib/python3.7/site-packages/schema_salad/metaschema.py @ 0:26e78fe6e8c4 draft

"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
author shellac
date Sat, 02 May 2020 07:14:21 -0400
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/env/lib/python3.7/site-packages/schema_salad/metaschema.py	Sat May 02 07:14:21 2020 -0400
@@ -0,0 +1,2822 @@
+#
+# This file was autogenerated using schema-salad-tool --codegen=python
+# The code itself is released under the Apache 2.0 license and the help text is
+# subject to the license of the original schema.
+#
+import copy
+import os
+import re
+import uuid  # pylint: disable=unused-import # noqa: F401
+from typing import (
+    Any,
+    Dict,
+    List,
+    MutableMapping,
+    MutableSequence,
+    Optional,
+    Sequence,
+    Tuple,
+    Type,
+    Union,
+)
+
+from six import iteritems, string_types, text_type
+from six.moves import StringIO, urllib
+from typing_extensions import Text  # pylint: disable=unused-import
+
+from ruamel import yaml
+from ruamel.yaml.comments import CommentedMap
+from schema_salad.ref_resolver import Fetcher
+from schema_salad.sourceline import SourceLine, add_lc_filename
+from schema_salad.exceptions import SchemaSaladException, ValidationException
+
+# move to a regular typing import when Python 3.3-3.6 is no longer supported
+
+_vocab = {}  # type: Dict[Text, Text]
+_rvocab = {}  # type: Dict[Text, Text]
+
+
+class Savable(object):
+    @classmethod
+    def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Savable
+        pass
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Text]
+        pass
+
+
+class LoadingOptions(object):
+    def __init__(
+        self,
+        fetcher=None,  # type: Optional[Fetcher]
+        namespaces=None,  # type: Optional[Dict[Text, Text]]
+        fileuri=None,  # type: Optional[Text]
+        copyfrom=None,  # type: Optional[LoadingOptions]
+        schemas=None,  # type: Optional[List[Text]]
+        original_doc=None,  # type: Optional[Any]
+    ):  # type: (...) -> None
+        self.idx = {}  # type: Dict[Text, Text]
+        self.fileuri = fileuri  # type: Optional[Text]
+        self.namespaces = namespaces
+        self.schemas = schemas
+        self.original_doc = original_doc
+        if copyfrom is not None:
+            self.idx = copyfrom.idx
+            if fetcher is None:
+                fetcher = copyfrom.fetcher
+            if fileuri is None:
+                self.fileuri = copyfrom.fileuri
+            if namespaces is None:
+                self.namespaces = copyfrom.namespaces
+            if namespaces is None:
+                schemas = copyfrom.schemas
+
+        if fetcher is None:
+            import requests
+            from cachecontrol.wrapper import CacheControl
+            from cachecontrol.caches import FileCache
+            from schema_salad.ref_resolver import DefaultFetcher
+
+            if "HOME" in os.environ:
+                session = CacheControl(
+                    requests.Session(),
+                    cache=FileCache(
+                        os.path.join(os.environ["HOME"], ".cache", "salad")
+                    ),
+                )
+            elif "TMPDIR" in os.environ:
+                session = CacheControl(
+                    requests.Session(),
+                    cache=FileCache(
+                        os.path.join(os.environ["TMPDIR"], ".cache", "salad")
+                    ),
+                )
+            else:
+                session = CacheControl(
+                    requests.Session(), cache=FileCache("/tmp", ".cache", "salad")
+                )
+            self.fetcher = DefaultFetcher({}, session)  # type: Fetcher
+        else:
+            self.fetcher = fetcher
+
+        self.vocab = _vocab
+        self.rvocab = _rvocab
+
+        if namespaces is not None:
+            self.vocab = self.vocab.copy()
+            self.rvocab = self.rvocab.copy()
+            for k, v in iteritems(namespaces):
+                self.vocab[k] = v
+                self.rvocab[v] = k
+
+
+def load_field(val, fieldtype, baseuri, loadingOptions):
+    # type: (Union[Text, Dict[Text, Text]], _Loader, Text, LoadingOptions) -> Any
+    if isinstance(val, MutableMapping):
+        if "$import" in val:
+            if loadingOptions.fileuri is None:
+                raise SchemaSaladException("Cannot load $import without fileuri")
+            return _document_load_by_url(
+                fieldtype,
+                loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]),
+                loadingOptions,
+            )
+        elif "$include" in val:
+            if loadingOptions.fileuri is None:
+                raise SchemaSaladException("Cannot load $import without fileuri")
+            val = loadingOptions.fetcher.fetch_text(
+                loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"])
+            )
+    return fieldtype.load(val, baseuri, loadingOptions)
+
+
+save_type = Union[
+    Dict[Text, Text], List[Union[Dict[Text, Text], List[Any], None]], None
+]
+
+
+def save(
+    val,  # type: Optional[Union[Savable, MutableSequence[Savable]]]
+    top=True,  # type: bool
+    base_url="",  # type: Text
+    relative_uris=True,  # type: bool
+):  # type: (...) -> save_type
+
+    if isinstance(val, Savable):
+        return val.save(top=top, base_url=base_url, relative_uris=relative_uris)
+    if isinstance(val, MutableSequence):
+        return [
+            save(v, top=False, base_url=base_url, relative_uris=relative_uris)
+            for v in val
+        ]
+    if isinstance(val, MutableMapping):
+        newdict = {}
+        for key in val:
+            newdict[key] = save(
+                val[key], top=False, base_url=base_url, relative_uris=relative_uris
+            )
+        return newdict
+    return val
+
+
+def expand_url(
+    url,  # type: Union[str, Text]
+    base_url,  # type: Union[str, Text]
+    loadingOptions,  # type: LoadingOptions
+    scoped_id=False,  # type: bool
+    vocab_term=False,  # type: bool
+    scoped_ref=None,  # type: Optional[int]
+):
+    # type: (...) -> Text
+
+    if not isinstance(url, string_types):
+        return url
+
+    url = Text(url)
+
+    if url in (u"@id", u"@type"):
+        return url
+
+    if vocab_term and url in loadingOptions.vocab:
+        return url
+
+    if bool(loadingOptions.vocab) and u":" in url:
+        prefix = url.split(u":")[0]
+        if prefix in loadingOptions.vocab:
+            url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :]
+
+    split = urllib.parse.urlsplit(url)
+
+    if (
+        (bool(split.scheme) and split.scheme in [u"http", u"https", u"file"])
+        or url.startswith(u"$(")
+        or url.startswith(u"${")
+    ):
+        pass
+    elif scoped_id and not bool(split.fragment):
+        splitbase = urllib.parse.urlsplit(base_url)
+        frg = u""
+        if bool(splitbase.fragment):
+            frg = splitbase.fragment + u"/" + split.path
+        else:
+            frg = split.path
+        pt = splitbase.path if splitbase.path != "" else "/"
+        url = urllib.parse.urlunsplit(
+            (splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)
+        )
+    elif scoped_ref is not None and not bool(split.fragment):
+        splitbase = urllib.parse.urlsplit(base_url)
+        sp = splitbase.fragment.split(u"/")
+        n = scoped_ref
+        while n > 0 and len(sp) > 0:
+            sp.pop()
+            n -= 1
+        sp.append(url)
+        url = urllib.parse.urlunsplit(
+            (
+                splitbase.scheme,
+                splitbase.netloc,
+                splitbase.path,
+                splitbase.query,
+                u"/".join(sp),
+            )
+        )
+    else:
+        url = loadingOptions.fetcher.urljoin(base_url, url)
+
+    if vocab_term:
+        split = urllib.parse.urlsplit(url)
+        if bool(split.scheme):
+            if url in loadingOptions.rvocab:
+                return loadingOptions.rvocab[url]
+        else:
+            raise ValidationException("Term '{}' not in vocabulary".format(url))
+
+    return url
+
+
+class _Loader(object):
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        pass
+
+
+class _AnyLoader(_Loader):
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if doc is not None:
+            return doc
+        raise ValidationException("Expected non-null")
+
+
+class _PrimitiveLoader(_Loader):
+    def __init__(self, tp):
+        # type: (Union[type, Tuple[Type[Text], Type[Text]]]) -> None
+        self.tp = tp
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if not isinstance(doc, self.tp):
+            raise ValidationException(
+                "Expected a {} but got {}".format(
+                    self.tp.__class__.__name__, doc.__class__.__name__
+                )
+            )
+        return doc
+
+    def __repr__(self):  # type: () -> str
+        return str(self.tp)
+
+
+class _ArrayLoader(_Loader):
+    def __init__(self, items):
+        # type: (_Loader) -> None
+        self.items = items
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if not isinstance(doc, MutableSequence):
+            raise ValidationException("Expected a list")
+        r = []  # type: List[Any]
+        errors = []  # type: List[SchemaSaladException]
+        for i in range(0, len(doc)):
+            try:
+                lf = load_field(
+                    doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions
+                )
+                if isinstance(lf, MutableSequence):
+                    r.extend(lf)
+                else:
+                    r.append(lf)
+            except ValidationException as e:
+                errors.append(e.with_sourceline(SourceLine(doc, i, str)))
+        if errors:
+            raise ValidationException("", None, errors)
+        return r
+
+    def __repr__(self):  # type: () -> str
+        return "array<{}>".format(self.items)
+
+
+class _EnumLoader(_Loader):
+    def __init__(self, symbols):
+        # type: (Sequence[Text]) -> None
+        self.symbols = symbols
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if doc in self.symbols:
+            return doc
+        else:
+            raise ValidationException("Expected one of {}".format(self.symbols))
+
+
+class _RecordLoader(_Loader):
+    def __init__(self, classtype):
+        # type: (Type[Savable]) -> None
+        self.classtype = classtype
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if not isinstance(doc, MutableMapping):
+            raise ValidationException("Expected a dict")
+        return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot)
+
+    def __repr__(self):  # type: () -> str
+        return str(self.classtype)
+
+
+class _UnionLoader(_Loader):
+    def __init__(self, alternates):
+        # type: (Sequence[_Loader]) -> None
+        self.alternates = alternates
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        errors = []
+        for t in self.alternates:
+            try:
+                return t.load(doc, baseuri, loadingOptions, docRoot=docRoot)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        u"tried {} but".format(t.__class__.__name__), None, [e]
+                    )
+                )
+        raise ValidationException("", None, errors, u"-")
+
+    def __repr__(self):  # type: () -> str
+        return " | ".join(str(a) for a in self.alternates)
+
+
+class _URILoader(_Loader):
+    def __init__(self, inner, scoped_id, vocab_term, scoped_ref):
+        # type: (_Loader, bool, bool, Union[int, None]) -> None
+        self.inner = inner
+        self.scoped_id = scoped_id
+        self.vocab_term = vocab_term
+        self.scoped_ref = scoped_ref
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if isinstance(doc, MutableSequence):
+            doc = [
+                expand_url(
+                    i,
+                    baseuri,
+                    loadingOptions,
+                    self.scoped_id,
+                    self.vocab_term,
+                    self.scoped_ref,
+                )
+                for i in doc
+            ]
+        if isinstance(doc, string_types):
+            doc = expand_url(
+                doc,
+                baseuri,
+                loadingOptions,
+                self.scoped_id,
+                self.vocab_term,
+                self.scoped_ref,
+            )
+        return self.inner.load(doc, baseuri, loadingOptions)
+
+
+class _TypeDSLLoader(_Loader):
+    typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$")
+
+    def __init__(self, inner, refScope):
+        # type: (_Loader, Union[int, None]) -> None
+        self.inner = inner
+        self.refScope = refScope
+
+    def resolve(self, doc, baseuri, loadingOptions):
+        # type: (Any, Text, LoadingOptions) -> Any
+        m = self.typeDSLregex.match(doc)
+        if m:
+            first = expand_url(
+                m.group(1), baseuri, loadingOptions, False, True, self.refScope
+            )
+            second = third = None
+            if bool(m.group(2)):
+                second = {"type": "array", "items": first}
+                # second = CommentedMap((("type", "array"),
+                #                       ("items", first)))
+                # second.lc.add_kv_line_col("type", lc)
+                # second.lc.add_kv_line_col("items", lc)
+                # second.lc.filename = filename
+            if bool(m.group(3)):
+                third = [u"null", second or first]
+                # third = CommentedSeq([u"null", second or first])
+                # third.lc.add_kv_line_col(0, lc)
+                # third.lc.add_kv_line_col(1, lc)
+                # third.lc.filename = filename
+            doc = third or second or first
+        return doc
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if isinstance(doc, MutableSequence):
+            r = []  # type: List[Any]
+            for d in doc:
+                if isinstance(d, string_types):
+                    resolved = self.resolve(d, baseuri, loadingOptions)
+                    if isinstance(resolved, MutableSequence):
+                        for i in resolved:
+                            if i not in r:
+                                r.append(i)
+                    else:
+                        if resolved not in r:
+                            r.append(resolved)
+                else:
+                    r.append(d)
+            doc = r
+        elif isinstance(doc, string_types):
+            doc = self.resolve(doc, baseuri, loadingOptions)
+
+        return self.inner.load(doc, baseuri, loadingOptions)
+
+
+class _IdMapLoader(_Loader):
+    def __init__(self, inner, mapSubject, mapPredicate):
+        # type: (_Loader, Text, Union[Text, None]) -> None
+        self.inner = inner
+        self.mapSubject = mapSubject
+        self.mapPredicate = mapPredicate
+
+    def load(self, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any
+        if isinstance(doc, MutableMapping):
+            r = []  # type: List[Any]
+            for k in sorted(doc.keys()):
+                val = doc[k]
+                if isinstance(val, CommentedMap):
+                    v = copy.copy(val)
+                    v.lc.data = val.lc.data
+                    v.lc.filename = val.lc.filename
+                    v[self.mapSubject] = k
+                    r.append(v)
+                elif isinstance(val, MutableMapping):
+                    v2 = copy.copy(val)
+                    v2[self.mapSubject] = k
+                    r.append(v2)
+                else:
+                    if self.mapPredicate:
+                        v3 = {self.mapPredicate: val}
+                        v3[self.mapSubject] = k
+                        r.append(v3)
+                    else:
+                        raise ValidationException("No mapPredicate")
+            doc = r
+        return self.inner.load(doc, baseuri, loadingOptions)
+
+
+def _document_load(loader, doc, baseuri, loadingOptions):
+    # type: (_Loader, Any, Text, LoadingOptions) -> Any
+    if isinstance(doc, string_types):
+        return _document_load_by_url(
+            loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions
+        )
+
+    if isinstance(doc, MutableMapping):
+        if "$namespaces" in doc:
+            loadingOptions = LoadingOptions(
+                copyfrom=loadingOptions, namespaces=doc["$namespaces"]
+            )
+            doc = {k: v for k, v in doc.items() if k != "$namespaces"}
+
+        if "$schemas" in doc:
+            loadingOptions = LoadingOptions(
+                copyfrom=loadingOptions, schemas=doc["$schemas"]
+            )
+            doc = {k: v for k, v in doc.items() if k != "$schemas"}
+
+        if "$base" in doc:
+            baseuri = doc["$base"]
+
+        if "$graph" in doc:
+            return loader.load(doc["$graph"], baseuri, loadingOptions)
+        else:
+            return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri)
+
+    if isinstance(doc, MutableSequence):
+        return loader.load(doc, baseuri, loadingOptions)
+
+    raise ValidationException("Oops, we shouldn't be here!")
+
+
+def _document_load_by_url(loader, url, loadingOptions):
+    # type: (_Loader, Text, LoadingOptions) -> Any
+    if url in loadingOptions.idx:
+        return _document_load(loader, loadingOptions.idx[url], url, loadingOptions)
+
+    text = loadingOptions.fetcher.fetch_text(url)
+    if isinstance(text, bytes):
+        textIO = StringIO(text.decode("utf-8"))
+    else:
+        textIO = StringIO(text)
+    textIO.name = str(url)
+    result = yaml.round_trip_load(textIO, preserve_quotes=True)
+    add_lc_filename(result, url)
+
+    loadingOptions.idx[url] = result
+
+    loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url)
+
+    return _document_load(loader, result, url, loadingOptions)
+
+
+def file_uri(path, split_frag=False):  # type: (str, bool) -> str
+    if path.startswith("file://"):
+        return path
+    if split_frag:
+        pathsp = path.split("#", 2)
+        frag = "#" + urllib.parse.quote(str(pathsp[1])) if len(pathsp) == 2 else ""
+        urlpath = urllib.request.pathname2url(str(pathsp[0]))
+    else:
+        urlpath = urllib.request.pathname2url(path)
+        frag = ""
+    if urlpath.startswith("//"):
+        return "file:{}{}".format(urlpath, frag)
+    else:
+        return "file://{}{}".format(urlpath, frag)
+
+
+def prefix_url(url, namespaces):  # type: (Text, Dict[Text, Text]) -> Text
+    for k, v in namespaces.items():
+        if url.startswith(v):
+            return k + ":" + url[len(v) :]
+    return url
+
+
+def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris):
+    # type: (Text, Text, bool, Optional[int], bool) -> Union[Text, List[Text]]
+    if not relative_uris:
+        return uri
+    if isinstance(uri, MutableSequence):
+        return [
+            save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris)
+            for u in uri
+        ]
+    elif isinstance(uri, text_type):
+        urisplit = urllib.parse.urlsplit(uri)
+        basesplit = urllib.parse.urlsplit(base_url)
+        if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc:
+            if urisplit.path != basesplit.path:
+                p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path))
+                if urisplit.fragment:
+                    p = p + "#" + urisplit.fragment
+                return p
+
+            basefrag = basesplit.fragment + "/"
+            if ref_scope:
+                sp = basefrag.split("/")
+                i = 0
+                while i < ref_scope:
+                    sp.pop()
+                    i += 1
+                basefrag = "/".join(sp)
+
+            if urisplit.fragment.startswith(basefrag):
+                return urisplit.fragment[len(basefrag) :]
+            else:
+                return urisplit.fragment
+        return uri
+    else:
+        return save(uri, top=False, base_url=base_url)
+
+
+class Documented(Savable):
+    pass
+
+
+class RecordField(Documented):
+    """
+A field of a record.
+    """
+    def __init__(
+        self,
+        doc,  # type: Any
+        name,  # type: Any
+        type,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.doc = doc
+        self.name = name
+        self.type = type
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordField
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'name' in _doc:
+            try:
+                name = load_field(_doc.get(
+                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `name` field is not valid because:",
+                        SourceLine(_doc, 'name', str),
+                        [e]
+                    )
+                )
+        else:
+            name = None
+
+        if name is None:
+            if docRoot is not None:
+                name = docRoot
+            else:
+                raise ValidationException("Missing name")
+        baseuri = name
+        if 'doc' in _doc:
+            try:
+                doc = load_field(_doc.get(
+                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `doc` field is not valid because:",
+                        SourceLine(_doc, 'doc', str),
+                        [e]
+                    )
+                )
+        else:
+            doc = None
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `doc`, `name`, `type`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'RecordField'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(doc, name, type, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.name is not None:
+            u = save_relative_uri(
+                self.name,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['name'] = u
+
+        if self.doc is not None:
+            r['doc'] = save(
+                self.doc,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['doc', 'name', 'type'])
+
+
+class RecordSchema(Savable):
+    def __init__(
+        self,
+        fields,  # type: Any
+        type,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.fields = fields
+        self.type = type
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordSchema
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'fields' in _doc:
+            try:
+                fields = load_field(_doc.get(
+                    'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `fields` field is not valid because:",
+                        SourceLine(_doc, 'fields', str),
+                        [e]
+                    )
+                )
+        else:
+            fields = None
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `fields`, `type`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'RecordSchema'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(fields, type, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.fields is not None:
+            r['fields'] = save(
+                self.fields,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['fields', 'type'])
+
+
+class EnumSchema(Savable):
+    """
+Define an enumerated type.
+
+    """
+    def __init__(
+        self,
+        symbols,  # type: Any
+        type,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.symbols = symbols
+        self.type = type
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> EnumSchema
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        try:
+            symbols = load_field(_doc.get(
+                'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `symbols` field is not valid because:",
+                    SourceLine(_doc, 'symbols', str),
+                    [e]
+                )
+            )
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `symbols`, `type`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'EnumSchema'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(symbols, type, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.symbols is not None:
+            u = save_relative_uri(
+                self.symbols,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['symbols'] = u
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['symbols', 'type'])
+
+
+class ArraySchema(Savable):
+    def __init__(
+        self,
+        items,  # type: Any
+        type,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.items = items
+        self.type = type
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> ArraySchema
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        try:
+            items = load_field(_doc.get(
+                'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `items` field is not valid because:",
+                    SourceLine(_doc, 'items', str),
+                    [e]
+                )
+            )
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `items`, `type`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'ArraySchema'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(items, type, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.items is not None:
+            u = save_relative_uri(
+                self.items,
+                base_url,
+                False,
+                2,
+                relative_uris)
+            if u:
+                r['items'] = u
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['items', 'type'])
+
+
+class JsonldPredicate(Savable):
+    """
+Attached to a record field to define how the parent record field is handled for
+URI resolution and JSON-LD context generation.
+
+    """
+    def __init__(
+        self,
+        _id,  # type: Any
+        _type,  # type: Any
+        _container,  # type: Any
+        identity,  # type: Any
+        noLinkCheck,  # type: Any
+        mapSubject,  # type: Any
+        mapPredicate,  # type: Any
+        refScope,  # type: Any
+        typeDSL,  # type: Any
+        secondaryFilesDSL,  # type: Any
+        subscope,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self._id = _id
+        self._type = _type
+        self._container = _container
+        self.identity = identity
+        self.noLinkCheck = noLinkCheck
+        self.mapSubject = mapSubject
+        self.mapPredicate = mapPredicate
+        self.refScope = refScope
+        self.typeDSL = typeDSL
+        self.secondaryFilesDSL = secondaryFilesDSL
+        self.subscope = subscope
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> JsonldPredicate
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if '_id' in _doc:
+            try:
+                _id = load_field(_doc.get(
+                    '_id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `_id` field is not valid because:",
+                        SourceLine(_doc, '_id', str),
+                        [e]
+                    )
+                )
+        else:
+            _id = None
+        if '_type' in _doc:
+            try:
+                _type = load_field(_doc.get(
+                    '_type'), union_of_None_type_or_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `_type` field is not valid because:",
+                        SourceLine(_doc, '_type', str),
+                        [e]
+                    )
+                )
+        else:
+            _type = None
+        if '_container' in _doc:
+            try:
+                _container = load_field(_doc.get(
+                    '_container'), union_of_None_type_or_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `_container` field is not valid because:",
+                        SourceLine(_doc, '_container', str),
+                        [e]
+                    )
+                )
+        else:
+            _container = None
+        if 'identity' in _doc:
+            try:
+                identity = load_field(_doc.get(
+                    'identity'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `identity` field is not valid because:",
+                        SourceLine(_doc, 'identity', str),
+                        [e]
+                    )
+                )
+        else:
+            identity = None
+        if 'noLinkCheck' in _doc:
+            try:
+                noLinkCheck = load_field(_doc.get(
+                    'noLinkCheck'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `noLinkCheck` field is not valid because:",
+                        SourceLine(_doc, 'noLinkCheck', str),
+                        [e]
+                    )
+                )
+        else:
+            noLinkCheck = None
+        if 'mapSubject' in _doc:
+            try:
+                mapSubject = load_field(_doc.get(
+                    'mapSubject'), union_of_None_type_or_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `mapSubject` field is not valid because:",
+                        SourceLine(_doc, 'mapSubject', str),
+                        [e]
+                    )
+                )
+        else:
+            mapSubject = None
+        if 'mapPredicate' in _doc:
+            try:
+                mapPredicate = load_field(_doc.get(
+                    'mapPredicate'), union_of_None_type_or_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `mapPredicate` field is not valid because:",
+                        SourceLine(_doc, 'mapPredicate', str),
+                        [e]
+                    )
+                )
+        else:
+            mapPredicate = None
+        if 'refScope' in _doc:
+            try:
+                refScope = load_field(_doc.get(
+                    'refScope'), union_of_None_type_or_inttype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `refScope` field is not valid because:",
+                        SourceLine(_doc, 'refScope', str),
+                        [e]
+                    )
+                )
+        else:
+            refScope = None
+        if 'typeDSL' in _doc:
+            try:
+                typeDSL = load_field(_doc.get(
+                    'typeDSL'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `typeDSL` field is not valid because:",
+                        SourceLine(_doc, 'typeDSL', str),
+                        [e]
+                    )
+                )
+        else:
+            typeDSL = None
+        if 'secondaryFilesDSL' in _doc:
+            try:
+                secondaryFilesDSL = load_field(_doc.get(
+                    'secondaryFilesDSL'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `secondaryFilesDSL` field is not valid because:",
+                        SourceLine(_doc, 'secondaryFilesDSL', str),
+                        [e]
+                    )
+                )
+        else:
+            secondaryFilesDSL = None
+        if 'subscope' in _doc:
+            try:
+                subscope = load_field(_doc.get(
+                    'subscope'), union_of_None_type_or_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `subscope` field is not valid because:",
+                        SourceLine(_doc, 'subscope', str),
+                        [e]
+                    )
+                )
+        else:
+            subscope = None
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'JsonldPredicate'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(_id, _type, _container, identity, noLinkCheck, mapSubject, mapPredicate, refScope, typeDSL, secondaryFilesDSL, subscope, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self._id is not None:
+            u = save_relative_uri(
+                self._id,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['_id'] = u
+
+        if self._type is not None:
+            r['_type'] = save(
+                self._type,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self._container is not None:
+            r['_container'] = save(
+                self._container,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.identity is not None:
+            r['identity'] = save(
+                self.identity,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.noLinkCheck is not None:
+            r['noLinkCheck'] = save(
+                self.noLinkCheck,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.mapSubject is not None:
+            r['mapSubject'] = save(
+                self.mapSubject,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.mapPredicate is not None:
+            r['mapPredicate'] = save(
+                self.mapPredicate,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.refScope is not None:
+            r['refScope'] = save(
+                self.refScope,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.typeDSL is not None:
+            r['typeDSL'] = save(
+                self.typeDSL,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.secondaryFilesDSL is not None:
+            r['secondaryFilesDSL'] = save(
+                self.secondaryFilesDSL,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if self.subscope is not None:
+            r['subscope'] = save(
+                self.subscope,
+                top=False,
+                base_url=base_url,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['_id', '_type', '_container', 'identity', 'noLinkCheck', 'mapSubject', 'mapPredicate', 'refScope', 'typeDSL', 'secondaryFilesDSL', 'subscope'])
+
+
+class SpecializeDef(Savable):
+    def __init__(
+        self,
+        specializeFrom,  # type: Any
+        specializeTo,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.specializeFrom = specializeFrom
+        self.specializeTo = specializeTo
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> SpecializeDef
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        try:
+            specializeFrom = load_field(_doc.get(
+                'specializeFrom'), uri_strtype_False_False_1, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `specializeFrom` field is not valid because:",
+                    SourceLine(_doc, 'specializeFrom', str),
+                    [e]
+                )
+            )
+        try:
+            specializeTo = load_field(_doc.get(
+                'specializeTo'), uri_strtype_False_False_1, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `specializeTo` field is not valid because:",
+                    SourceLine(_doc, 'specializeTo', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `specializeFrom`, `specializeTo`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'SpecializeDef'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(specializeFrom, specializeTo, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.specializeFrom is not None:
+            u = save_relative_uri(
+                self.specializeFrom,
+                base_url,
+                False,
+                1,
+                relative_uris)
+            if u:
+                r['specializeFrom'] = u
+
+        if self.specializeTo is not None:
+            u = save_relative_uri(
+                self.specializeTo,
+                base_url,
+                False,
+                1,
+                relative_uris)
+            if u:
+                r['specializeTo'] = u
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['specializeFrom', 'specializeTo'])
+
+
+class NamedType(Savable):
+    pass
+
+
+class DocType(Documented):
+    pass
+
+
+class SchemaDefinedType(DocType):
+    """
+Abstract base for schema-defined types.
+
+    """
+    pass
+
+
+class SaladRecordField(RecordField):
+    """
+A field of a record.
+    """
+    def __init__(
+        self,
+        doc,  # type: Any
+        name,  # type: Any
+        type,  # type: Any
+        jsonldPredicate,  # type: Any
+        default,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.doc = doc
+        self.name = name
+        self.type = type
+        self.jsonldPredicate = jsonldPredicate
+        self.default = default
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> SaladRecordField
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'name' in _doc:
+            try:
+                name = load_field(_doc.get(
+                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `name` field is not valid because:",
+                        SourceLine(_doc, 'name', str),
+                        [e]
+                    )
+                )
+        else:
+            name = None
+
+        if name is None:
+            if docRoot is not None:
+                name = docRoot
+            else:
+                raise ValidationException("Missing name")
+        baseuri = name
+        if 'doc' in _doc:
+            try:
+                doc = load_field(_doc.get(
+                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `doc` field is not valid because:",
+                        SourceLine(_doc, 'doc', str),
+                        [e]
+                    )
+                )
+        else:
+            doc = None
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+        if 'jsonldPredicate' in _doc:
+            try:
+                jsonldPredicate = load_field(_doc.get(
+                    'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `jsonldPredicate` field is not valid because:",
+                        SourceLine(_doc, 'jsonldPredicate', str),
+                        [e]
+                    )
+                )
+        else:
+            jsonldPredicate = None
+        if 'default' in _doc:
+            try:
+                default = load_field(_doc.get(
+                    'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `default` field is not valid because:",
+                        SourceLine(_doc, 'default', str),
+                        [e]
+                    )
+                )
+        else:
+            default = None
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'SaladRecordField'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(doc, name, type, jsonldPredicate, default, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.name is not None:
+            u = save_relative_uri(
+                self.name,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['name'] = u
+
+        if self.doc is not None:
+            r['doc'] = save(
+                self.doc,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.jsonldPredicate is not None:
+            r['jsonldPredicate'] = save(
+                self.jsonldPredicate,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.default is not None:
+            r['default'] = save(
+                self.default,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['doc', 'name', 'type', 'jsonldPredicate', 'default'])
+
+
+class SaladRecordSchema(NamedType, RecordSchema, SchemaDefinedType):
+    def __init__(
+        self,
+        name,  # type: Any
+        inVocab,  # type: Any
+        fields,  # type: Any
+        type,  # type: Any
+        doc,  # type: Any
+        docParent,  # type: Any
+        docChild,  # type: Any
+        docAfter,  # type: Any
+        jsonldPredicate,  # type: Any
+        documentRoot,  # type: Any
+        abstract,  # type: Any
+        extends,  # type: Any
+        specialize,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.name = name
+        self.inVocab = inVocab
+        self.fields = fields
+        self.type = type
+        self.doc = doc
+        self.docParent = docParent
+        self.docChild = docChild
+        self.docAfter = docAfter
+        self.jsonldPredicate = jsonldPredicate
+        self.documentRoot = documentRoot
+        self.abstract = abstract
+        self.extends = extends
+        self.specialize = specialize
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> SaladRecordSchema
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'name' in _doc:
+            try:
+                name = load_field(_doc.get(
+                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `name` field is not valid because:",
+                        SourceLine(_doc, 'name', str),
+                        [e]
+                    )
+                )
+        else:
+            name = None
+
+        if name is None:
+            if docRoot is not None:
+                name = docRoot
+            else:
+                raise ValidationException("Missing name")
+        baseuri = name
+        if 'inVocab' in _doc:
+            try:
+                inVocab = load_field(_doc.get(
+                    'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `inVocab` field is not valid because:",
+                        SourceLine(_doc, 'inVocab', str),
+                        [e]
+                    )
+                )
+        else:
+            inVocab = None
+        if 'fields' in _doc:
+            try:
+                fields = load_field(_doc.get(
+                    'fields'), idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `fields` field is not valid because:",
+                        SourceLine(_doc, 'fields', str),
+                        [e]
+                    )
+                )
+        else:
+            fields = None
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+        if 'doc' in _doc:
+            try:
+                doc = load_field(_doc.get(
+                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `doc` field is not valid because:",
+                        SourceLine(_doc, 'doc', str),
+                        [e]
+                    )
+                )
+        else:
+            doc = None
+        if 'docParent' in _doc:
+            try:
+                docParent = load_field(_doc.get(
+                    'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docParent` field is not valid because:",
+                        SourceLine(_doc, 'docParent', str),
+                        [e]
+                    )
+                )
+        else:
+            docParent = None
+        if 'docChild' in _doc:
+            try:
+                docChild = load_field(_doc.get(
+                    'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docChild` field is not valid because:",
+                        SourceLine(_doc, 'docChild', str),
+                        [e]
+                    )
+                )
+        else:
+            docChild = None
+        if 'docAfter' in _doc:
+            try:
+                docAfter = load_field(_doc.get(
+                    'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docAfter` field is not valid because:",
+                        SourceLine(_doc, 'docAfter', str),
+                        [e]
+                    )
+                )
+        else:
+            docAfter = None
+        if 'jsonldPredicate' in _doc:
+            try:
+                jsonldPredicate = load_field(_doc.get(
+                    'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `jsonldPredicate` field is not valid because:",
+                        SourceLine(_doc, 'jsonldPredicate', str),
+                        [e]
+                    )
+                )
+        else:
+            jsonldPredicate = None
+        if 'documentRoot' in _doc:
+            try:
+                documentRoot = load_field(_doc.get(
+                    'documentRoot'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `documentRoot` field is not valid because:",
+                        SourceLine(_doc, 'documentRoot', str),
+                        [e]
+                    )
+                )
+        else:
+            documentRoot = None
+        if 'abstract' in _doc:
+            try:
+                abstract = load_field(_doc.get(
+                    'abstract'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `abstract` field is not valid because:",
+                        SourceLine(_doc, 'abstract', str),
+                        [e]
+                    )
+                )
+        else:
+            abstract = None
+        if 'extends' in _doc:
+            try:
+                extends = load_field(_doc.get(
+                    'extends'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `extends` field is not valid because:",
+                        SourceLine(_doc, 'extends', str),
+                        [e]
+                    )
+                )
+        else:
+            extends = None
+        if 'specialize' in _doc:
+            try:
+                specialize = load_field(_doc.get(
+                    'specialize'), idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `specialize` field is not valid because:",
+                        SourceLine(_doc, 'specialize', str),
+                        [e]
+                    )
+                )
+        else:
+            specialize = None
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'SaladRecordSchema'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(name, inVocab, fields, type, doc, docParent, docChild, docAfter, jsonldPredicate, documentRoot, abstract, extends, specialize, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.name is not None:
+            u = save_relative_uri(
+                self.name,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['name'] = u
+
+        if self.inVocab is not None:
+            r['inVocab'] = save(
+                self.inVocab,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.fields is not None:
+            r['fields'] = save(
+                self.fields,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.doc is not None:
+            r['doc'] = save(
+                self.doc,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.docParent is not None:
+            u = save_relative_uri(
+                self.docParent,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docParent'] = u
+
+        if self.docChild is not None:
+            u = save_relative_uri(
+                self.docChild,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docChild'] = u
+
+        if self.docAfter is not None:
+            u = save_relative_uri(
+                self.docAfter,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docAfter'] = u
+
+        if self.jsonldPredicate is not None:
+            r['jsonldPredicate'] = save(
+                self.jsonldPredicate,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.documentRoot is not None:
+            r['documentRoot'] = save(
+                self.documentRoot,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.abstract is not None:
+            r['abstract'] = save(
+                self.abstract,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.extends is not None:
+            u = save_relative_uri(
+                self.extends,
+                self.name,
+                False,
+                1,
+                relative_uris)
+            if u:
+                r['extends'] = u
+
+        if self.specialize is not None:
+            r['specialize'] = save(
+                self.specialize,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['name', 'inVocab', 'fields', 'type', 'doc', 'docParent', 'docChild', 'docAfter', 'jsonldPredicate', 'documentRoot', 'abstract', 'extends', 'specialize'])
+
+
+class SaladEnumSchema(NamedType, EnumSchema, SchemaDefinedType):
+    """
+Define an enumerated type.
+
+    """
+    def __init__(
+        self,
+        name,  # type: Any
+        inVocab,  # type: Any
+        symbols,  # type: Any
+        type,  # type: Any
+        doc,  # type: Any
+        docParent,  # type: Any
+        docChild,  # type: Any
+        docAfter,  # type: Any
+        jsonldPredicate,  # type: Any
+        documentRoot,  # type: Any
+        extends,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.name = name
+        self.inVocab = inVocab
+        self.symbols = symbols
+        self.type = type
+        self.doc = doc
+        self.docParent = docParent
+        self.docChild = docChild
+        self.docAfter = docAfter
+        self.jsonldPredicate = jsonldPredicate
+        self.documentRoot = documentRoot
+        self.extends = extends
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> SaladEnumSchema
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'name' in _doc:
+            try:
+                name = load_field(_doc.get(
+                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `name` field is not valid because:",
+                        SourceLine(_doc, 'name', str),
+                        [e]
+                    )
+                )
+        else:
+            name = None
+
+        if name is None:
+            if docRoot is not None:
+                name = docRoot
+            else:
+                raise ValidationException("Missing name")
+        baseuri = name
+        if 'inVocab' in _doc:
+            try:
+                inVocab = load_field(_doc.get(
+                    'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `inVocab` field is not valid because:",
+                        SourceLine(_doc, 'inVocab', str),
+                        [e]
+                    )
+                )
+        else:
+            inVocab = None
+        try:
+            symbols = load_field(_doc.get(
+                'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `symbols` field is not valid because:",
+                    SourceLine(_doc, 'symbols', str),
+                    [e]
+                )
+            )
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+        if 'doc' in _doc:
+            try:
+                doc = load_field(_doc.get(
+                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `doc` field is not valid because:",
+                        SourceLine(_doc, 'doc', str),
+                        [e]
+                    )
+                )
+        else:
+            doc = None
+        if 'docParent' in _doc:
+            try:
+                docParent = load_field(_doc.get(
+                    'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docParent` field is not valid because:",
+                        SourceLine(_doc, 'docParent', str),
+                        [e]
+                    )
+                )
+        else:
+            docParent = None
+        if 'docChild' in _doc:
+            try:
+                docChild = load_field(_doc.get(
+                    'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docChild` field is not valid because:",
+                        SourceLine(_doc, 'docChild', str),
+                        [e]
+                    )
+                )
+        else:
+            docChild = None
+        if 'docAfter' in _doc:
+            try:
+                docAfter = load_field(_doc.get(
+                    'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docAfter` field is not valid because:",
+                        SourceLine(_doc, 'docAfter', str),
+                        [e]
+                    )
+                )
+        else:
+            docAfter = None
+        if 'jsonldPredicate' in _doc:
+            try:
+                jsonldPredicate = load_field(_doc.get(
+                    'jsonldPredicate'), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `jsonldPredicate` field is not valid because:",
+                        SourceLine(_doc, 'jsonldPredicate', str),
+                        [e]
+                    )
+                )
+        else:
+            jsonldPredicate = None
+        if 'documentRoot' in _doc:
+            try:
+                documentRoot = load_field(_doc.get(
+                    'documentRoot'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `documentRoot` field is not valid because:",
+                        SourceLine(_doc, 'documentRoot', str),
+                        [e]
+                    )
+                )
+        else:
+            documentRoot = None
+        if 'extends' in _doc:
+            try:
+                extends = load_field(_doc.get(
+                    'extends'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `extends` field is not valid because:",
+                        SourceLine(_doc, 'extends', str),
+                        [e]
+                    )
+                )
+        else:
+            extends = None
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'SaladEnumSchema'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(name, inVocab, symbols, type, doc, docParent, docChild, docAfter, jsonldPredicate, documentRoot, extends, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.name is not None:
+            u = save_relative_uri(
+                self.name,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['name'] = u
+
+        if self.inVocab is not None:
+            r['inVocab'] = save(
+                self.inVocab,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.symbols is not None:
+            u = save_relative_uri(
+                self.symbols,
+                self.name,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['symbols'] = u
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.doc is not None:
+            r['doc'] = save(
+                self.doc,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.docParent is not None:
+            u = save_relative_uri(
+                self.docParent,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docParent'] = u
+
+        if self.docChild is not None:
+            u = save_relative_uri(
+                self.docChild,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docChild'] = u
+
+        if self.docAfter is not None:
+            u = save_relative_uri(
+                self.docAfter,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docAfter'] = u
+
+        if self.jsonldPredicate is not None:
+            r['jsonldPredicate'] = save(
+                self.jsonldPredicate,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.documentRoot is not None:
+            r['documentRoot'] = save(
+                self.documentRoot,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.extends is not None:
+            u = save_relative_uri(
+                self.extends,
+                self.name,
+                False,
+                1,
+                relative_uris)
+            if u:
+                r['extends'] = u
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['name', 'inVocab', 'symbols', 'type', 'doc', 'docParent', 'docChild', 'docAfter', 'jsonldPredicate', 'documentRoot', 'extends'])
+
+
+class Documentation(NamedType, DocType):
+    """
+A documentation section.  This type exists to facilitate self-documenting
+schemas but has no role in formal validation.
+
+    """
+    def __init__(
+        self,
+        name,  # type: Any
+        inVocab,  # type: Any
+        doc,  # type: Any
+        docParent,  # type: Any
+        docChild,  # type: Any
+        docAfter,  # type: Any
+        type,  # type: Any
+        extension_fields=None,  # type: Optional[Dict[Text, Any]]
+        loadingOptions=None  # type: Optional[LoadingOptions]
+    ):  # type: (...) -> None
+
+        if extension_fields:
+            self.extension_fields = extension_fields
+        else:
+            self.extension_fields = yaml.comments.CommentedMap()
+        if loadingOptions:
+            self.loadingOptions = loadingOptions
+        else:
+            self.loadingOptions = LoadingOptions()
+        self.name = name
+        self.inVocab = inVocab
+        self.doc = doc
+        self.docParent = docParent
+        self.docChild = docChild
+        self.docAfter = docAfter
+        self.type = type
+
+    @classmethod
+    def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None):
+        # type: (Any, Text, LoadingOptions, Optional[Text]) -> Documentation
+
+        _doc = copy.copy(doc)
+        if hasattr(doc, 'lc'):
+            _doc.lc.data = doc.lc.data
+            _doc.lc.filename = doc.lc.filename
+        errors = []
+        if 'name' in _doc:
+            try:
+                name = load_field(_doc.get(
+                    'name'), uri_strtype_True_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `name` field is not valid because:",
+                        SourceLine(_doc, 'name', str),
+                        [e]
+                    )
+                )
+        else:
+            name = None
+
+        if name is None:
+            if docRoot is not None:
+                name = docRoot
+            else:
+                raise ValidationException("Missing name")
+        baseuri = name
+        if 'inVocab' in _doc:
+            try:
+                inVocab = load_field(_doc.get(
+                    'inVocab'), union_of_None_type_or_booltype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `inVocab` field is not valid because:",
+                        SourceLine(_doc, 'inVocab', str),
+                        [e]
+                    )
+                )
+        else:
+            inVocab = None
+        if 'doc' in _doc:
+            try:
+                doc = load_field(_doc.get(
+                    'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `doc` field is not valid because:",
+                        SourceLine(_doc, 'doc', str),
+                        [e]
+                    )
+                )
+        else:
+            doc = None
+        if 'docParent' in _doc:
+            try:
+                docParent = load_field(_doc.get(
+                    'docParent'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docParent` field is not valid because:",
+                        SourceLine(_doc, 'docParent', str),
+                        [e]
+                    )
+                )
+        else:
+            docParent = None
+        if 'docChild' in _doc:
+            try:
+                docChild = load_field(_doc.get(
+                    'docChild'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docChild` field is not valid because:",
+                        SourceLine(_doc, 'docChild', str),
+                        [e]
+                    )
+                )
+        else:
+            docChild = None
+        if 'docAfter' in _doc:
+            try:
+                docAfter = load_field(_doc.get(
+                    'docAfter'), uri_union_of_None_type_or_strtype_False_False_None, baseuri, loadingOptions)
+            except ValidationException as e:
+                errors.append(
+                    ValidationException(
+                        "the `docAfter` field is not valid because:",
+                        SourceLine(_doc, 'docAfter', str),
+                        [e]
+                    )
+                )
+        else:
+            docAfter = None
+        try:
+            type = load_field(_doc.get(
+                'type'), typedsl_enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader_2, baseuri, loadingOptions)
+        except ValidationException as e:
+            errors.append(
+                ValidationException(
+                    "the `type` field is not valid because:",
+                    SourceLine(_doc, 'type', str),
+                    [e]
+                )
+            )
+
+        extension_fields = yaml.comments.CommentedMap()
+        for k in _doc.keys():
+            if k not in cls.attrs:
+                if ":" in k:
+                    ex = expand_url(k,
+                                    u"",
+                                    loadingOptions,
+                                    scoped_id=False,
+                                    vocab_term=False)
+                    extension_fields[ex] = _doc[k]
+                else:
+                    errors.append(
+                        ValidationException(
+                            "invalid field `%s`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`" % (k),
+                            SourceLine(_doc, k, str)
+                        )
+                    )
+                    break
+
+        if errors:
+            raise ValidationException("Trying 'Documentation'", None, errors)
+        loadingOptions = copy.deepcopy(loadingOptions)
+        loadingOptions.original_doc = _doc
+        return cls(name, inVocab, doc, docParent, docChild, docAfter, type, extension_fields=extension_fields, loadingOptions=loadingOptions)
+
+    def save(self, top=False, base_url="", relative_uris=True):
+        # type: (bool, Text, bool) -> Dict[Text, Any]
+        r = yaml.comments.CommentedMap()  # type: Dict[Text, Any]
+        for ef in self.extension_fields:
+            r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
+
+        if self.name is not None:
+            u = save_relative_uri(
+                self.name,
+                base_url,
+                True,
+                None,
+                relative_uris)
+            if u:
+                r['name'] = u
+
+        if self.inVocab is not None:
+            r['inVocab'] = save(
+                self.inVocab,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.doc is not None:
+            r['doc'] = save(
+                self.doc,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if self.docParent is not None:
+            u = save_relative_uri(
+                self.docParent,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docParent'] = u
+
+        if self.docChild is not None:
+            u = save_relative_uri(
+                self.docChild,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docChild'] = u
+
+        if self.docAfter is not None:
+            u = save_relative_uri(
+                self.docAfter,
+                self.name,
+                False,
+                None,
+                relative_uris)
+            if u:
+                r['docAfter'] = u
+
+        if self.type is not None:
+            r['type'] = save(
+                self.type,
+                top=False,
+                base_url=self.name,
+                relative_uris=relative_uris)
+
+        if top and self.loadingOptions.namespaces:
+            r["$namespaces"] = self.loadingOptions.namespaces
+
+        return r
+
+    attrs = frozenset(['name', 'inVocab', 'doc', 'docParent', 'docChild', 'docAfter', 'type'])
+
+
+_vocab = {
+    "Any": "https://w3id.org/cwl/salad#Any",
+    "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema",
+    "DocType": "https://w3id.org/cwl/salad#DocType",
+    "Documentation": "https://w3id.org/cwl/salad#Documentation",
+    "Documented": "https://w3id.org/cwl/salad#Documented",
+    "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema",
+    "JsonldPredicate": "https://w3id.org/cwl/salad#JsonldPredicate",
+    "NamedType": "https://w3id.org/cwl/salad#NamedType",
+    "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType",
+    "RecordField": "https://w3id.org/cwl/salad#RecordField",
+    "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema",
+    "SaladEnumSchema": "https://w3id.org/cwl/salad#SaladEnumSchema",
+    "SaladRecordField": "https://w3id.org/cwl/salad#SaladRecordField",
+    "SaladRecordSchema": "https://w3id.org/cwl/salad#SaladRecordSchema",
+    "SchemaDefinedType": "https://w3id.org/cwl/salad#SchemaDefinedType",
+    "SpecializeDef": "https://w3id.org/cwl/salad#SpecializeDef",
+    "array": "https://w3id.org/cwl/salad#array",
+    "boolean": "http://www.w3.org/2001/XMLSchema#boolean",
+    "documentation": "https://w3id.org/cwl/salad#documentation",
+    "double": "http://www.w3.org/2001/XMLSchema#double",
+    "enum": "https://w3id.org/cwl/salad#enum",
+    "float": "http://www.w3.org/2001/XMLSchema#float",
+    "int": "http://www.w3.org/2001/XMLSchema#int",
+    "long": "http://www.w3.org/2001/XMLSchema#long",
+    "null": "https://w3id.org/cwl/salad#null",
+    "record": "https://w3id.org/cwl/salad#record",
+    "string": "http://www.w3.org/2001/XMLSchema#string",
+}
+_rvocab = {
+    "https://w3id.org/cwl/salad#Any": "Any",
+    "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema",
+    "https://w3id.org/cwl/salad#DocType": "DocType",
+    "https://w3id.org/cwl/salad#Documentation": "Documentation",
+    "https://w3id.org/cwl/salad#Documented": "Documented",
+    "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema",
+    "https://w3id.org/cwl/salad#JsonldPredicate": "JsonldPredicate",
+    "https://w3id.org/cwl/salad#NamedType": "NamedType",
+    "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType",
+    "https://w3id.org/cwl/salad#RecordField": "RecordField",
+    "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema",
+    "https://w3id.org/cwl/salad#SaladEnumSchema": "SaladEnumSchema",
+    "https://w3id.org/cwl/salad#SaladRecordField": "SaladRecordField",
+    "https://w3id.org/cwl/salad#SaladRecordSchema": "SaladRecordSchema",
+    "https://w3id.org/cwl/salad#SchemaDefinedType": "SchemaDefinedType",
+    "https://w3id.org/cwl/salad#SpecializeDef": "SpecializeDef",
+    "https://w3id.org/cwl/salad#array": "array",
+    "http://www.w3.org/2001/XMLSchema#boolean": "boolean",
+    "https://w3id.org/cwl/salad#documentation": "documentation",
+    "http://www.w3.org/2001/XMLSchema#double": "double",
+    "https://w3id.org/cwl/salad#enum": "enum",
+    "http://www.w3.org/2001/XMLSchema#float": "float",
+    "http://www.w3.org/2001/XMLSchema#int": "int",
+    "http://www.w3.org/2001/XMLSchema#long": "long",
+    "https://w3id.org/cwl/salad#null": "null",
+    "https://w3id.org/cwl/salad#record": "record",
+    "http://www.w3.org/2001/XMLSchema#string": "string",
+}
+
+strtype = _PrimitiveLoader((str, text_type))
+inttype = _PrimitiveLoader(int)
+floattype = _PrimitiveLoader(float)
+booltype = _PrimitiveLoader(bool)
+None_type = _PrimitiveLoader(type(None))
+Any_type = _AnyLoader()
+DocumentedLoader = _RecordLoader(Documented)
+PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",))
+AnyLoader = _EnumLoader(("Any",))
+RecordFieldLoader = _RecordLoader(RecordField)
+RecordSchemaLoader = _RecordLoader(RecordSchema)
+EnumSchemaLoader = _RecordLoader(EnumSchema)
+ArraySchemaLoader = _RecordLoader(ArraySchema)
+JsonldPredicateLoader = _RecordLoader(JsonldPredicate)
+SpecializeDefLoader = _RecordLoader(SpecializeDef)
+NamedTypeLoader = _RecordLoader(NamedType)
+DocTypeLoader = _RecordLoader(DocType)
+SchemaDefinedTypeLoader = _RecordLoader(SchemaDefinedType)
+SaladRecordFieldLoader = _RecordLoader(SaladRecordField)
+SaladRecordSchemaLoader = _RecordLoader(SaladRecordSchema)
+SaladEnumSchemaLoader = _RecordLoader(SaladEnumSchema)
+DocumentationLoader = _RecordLoader(Documentation)
+array_of_strtype = _ArrayLoader(strtype)
+union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,))
+uri_strtype_True_False_None = _URILoader(strtype, True, False, None)
+union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,))
+array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype)
+union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,))
+typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2)
+array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader)
+union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,))
+idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type')
+enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",))
+typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2)
+uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None)
+enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",))
+typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2)
+uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2)
+enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",))
+typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2)
+union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,))
+uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None)
+union_of_None_type_or_booltype = _UnionLoader((None_type, booltype,))
+union_of_None_type_or_inttype = _UnionLoader((None_type, inttype,))
+uri_strtype_False_False_1 = _URILoader(strtype, False, False, 1)
+uri_union_of_None_type_or_strtype_False_False_None = _URILoader(union_of_None_type_or_strtype, False, False, None)
+uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, None)
+union_of_None_type_or_strtype_or_JsonldPredicateLoader = _UnionLoader((None_type, strtype, JsonldPredicateLoader,))
+union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,))
+array_of_SaladRecordFieldLoader = _ArrayLoader(SaladRecordFieldLoader)
+union_of_None_type_or_array_of_SaladRecordFieldLoader = _UnionLoader((None_type, array_of_SaladRecordFieldLoader,))
+idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_SaladRecordFieldLoader, 'name', 'type')
+uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1)
+array_of_SpecializeDefLoader = _ArrayLoader(SpecializeDefLoader)
+union_of_None_type_or_array_of_SpecializeDefLoader = _UnionLoader((None_type, array_of_SpecializeDefLoader,))
+idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader = _IdMapLoader(union_of_None_type_or_array_of_SpecializeDefLoader, 'specializeFrom', 'specializeTo')
+enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader = _EnumLoader(("documentation",))
+typedsl_enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader_2 = _TypeDSLLoader(enum_056429f0e9355680bd9b2411dc96a69c7ff2e76bLoader, 2)
+union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader((SaladRecordSchemaLoader, SaladEnumSchemaLoader, DocumentationLoader,))
+array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _ArrayLoader(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader)
+union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader = _UnionLoader((SaladRecordSchemaLoader, SaladEnumSchemaLoader, DocumentationLoader, array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader,))
+
+
+def load_document(doc, baseuri=None, loadingOptions=None):
+    # type: (Any, Optional[Text], Optional[LoadingOptions]) -> Any
+    if baseuri is None:
+        baseuri = file_uri(os.getcwd()) + "/"
+    if loadingOptions is None:
+        loadingOptions = LoadingOptions()
+    return _document_load(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader, doc, baseuri, loadingOptions)
+
+
+def load_document_by_string(string, uri, loadingOptions=None):
+    # type: (Any, Text, Optional[LoadingOptions]) -> Any
+    result = yaml.round_trip_load(string, preserve_quotes=True)
+    add_lc_filename(result, uri)
+
+    if loadingOptions is None:
+        loadingOptions = LoadingOptions(fileuri=uri)
+    loadingOptions.idx[uri] = result
+
+    return _document_load(union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_DocumentationLoader, result, uri, loadingOptions)