import base64
import datetime
import json
import logging
import os
import re
import socket
from typing import TYPE_CHECKING, overload
from urllib.error import HTTPError
from urllib.request import urlopen
import yaml
from json2xml.json2xml import Json2xml
from pyramid.httpexceptions import HTTPNotFound, HTTPOk
from pyramid_storage.extensions import resolve_extensions
from pywps.inout.formats import FORMATS, Format
from requests.exceptions import ConnectionError
from weaver.base import Constants, classproperty
if TYPE_CHECKING:
from typing import Any, AnyStr, Dict, List, Optional, Tuple, TypeVar, Union
from typing_extensions import Literal
from weaver.base import PropertyDataTypeT
from weaver.typedefs import AnyRequestType, JSON
[docs]
FileModeSteamType = Literal["r", "w", "a", "r+", "w+"]
FileModeEncoding = Literal["r", "w", "a", "rb", "wb", "ab", "r+", "w+", "a+", "r+b", "w+b", "a+b"]
DataStrT = TypeVar("DataStrT")
[docs]
LOGGER = logging.getLogger(__name__)
[docs]
class AcceptLanguage(Constants):
"""
Supported languages.
"""
@classmethod
[docs]
def offers(cls):
# type: () -> List[str]
"""
Languages offered by the application.
"""
languages = AcceptLanguage.values()
languages += list({lang.split("-")[0] for lang in languages})
return languages
[docs]
class ContentType(Constants):
"""
Supported ``Content-Type`` values.
Media-Type nomenclature::
<type> "/" [x- | <tree> "."] <subtype> ["+" suffix] *[";" parameter=value]
"""
[docs]
APP_DIR = "application/directory"
[docs]
APP_CWL = "application/cwl"
[docs]
APP_CWL_JSON = "application/cwl+json"
[docs]
APP_CWL_YAML = "application/cwl+yaml"
[docs]
APP_CWL_X = "application/x-cwl" # backward compatible format, others are official
[docs]
APP_FORM = "application/x-www-form-urlencoded"
[docs]
APP_GEOJSON = "application/geo+json"
[docs]
APP_GZIP = "application/gzip"
[docs]
APP_HDF5 = "application/x-hdf5"
[docs]
APP_JSON = "application/json"
[docs]
APP_OAS_JSON = "application/vnd.oai.openapi+json; version=3.0"
[docs]
APP_OGC_PKG_JSON = "application/ogcapppkg+json"
[docs]
APP_OGC_PKG_YAML = "application/ogcapppkg+yaml"
[docs]
APP_NETCDF = "application/x-netcdf"
[docs]
APP_OCTET_STREAM = "application/octet-stream"
[docs]
APP_PDF = "application/pdf"
[docs]
APP_TAR = "application/x-tar" # map to existing gzip for CWL
[docs]
APP_TAR_GZ = "application/tar+gzip" # map to existing gzip for CWL
[docs]
APP_VDN_GEOJSON = "application/vnd.geo+json"
[docs]
APP_XML = "application/xml"
[docs]
APP_YAML = "application/x-yaml"
[docs]
APP_ZIP = "application/zip"
[docs]
IMAGE_GEOTIFF = "image/tiff; subtype=geotiff"
[docs]
IMAGE_OGC_GEOTIFF = "image/tiff; application=geotiff"
[docs]
IMAGE_JPEG = "image/jpeg"
[docs]
IMAGE_GIF = "image/gif"
[docs]
IMAGE_PNG = "image/png"
[docs]
IMAGE_TIFF = "image/tiff"
[docs]
MULTI_PART_FORM = "multipart/form-data"
[docs]
TEXT_ENRICHED = "text/enriched"
[docs]
TEXT_HTML = "text/html"
[docs]
TEXT_PLAIN = "text/plain"
[docs]
TEXT_RICHTEXT = "text/richtext"
[docs]
TEXT_XML = "text/xml"
[docs]
VIDEO_MPEG = "video/mpeg"
# special handling
[docs]
ANY_JSON = {
APP_JSON, APP_YAML,
APP_GEOJSON, APP_VDN_GEOJSON,
APP_CWL, APP_CWL_JSON, APP_CWL_X, APP_CWL_YAML,
APP_OAS_JSON,
APP_OGC_PKG_JSON, APP_OGC_PKG_YAML,
}
[docs]
ANY_CWL = {APP_CWL, APP_CWL_JSON, APP_CWL_YAML, APP_CWL_X}
[docs]
ANY_XML = {APP_XML, TEXT_XML}
[docs]
class ContentEncoding(Constants):
"""
Supported ``Content-Encoding`` values.
.. note::
Value ``binary`` is kept for convenience and backward compatibility with older definitions.
It will default to the same encoding strategy as if ``base64`` was specified explicitly.
Value ``binary`` is not part of :rfc:`4648`, but remains a common occurrence that dates from
when ``format: binary`` was the approach employed to represent binary (JSON-schema Draft-04 and prior)
instead of what is now recommended using ``contentEncoding: base64`` (JSON-schema Draft-07).
.. seealso::
- https://github.com/json-schema-org/json-schema-spec/issues/803
- https://github.com/json-schema-org/json-schema-spec/pull/862
"""
@staticmethod
[docs]
def is_text(encoding):
# type: (Any) -> bool
"""
Indicates if the ``Content-Encoding`` value can be categorized as textual data.
"""
return ContentEncoding.get(encoding) in [ContentEncoding.UTF_8, None]
@staticmethod
[docs]
def is_binary(encoding):
# type: (Any) -> bool
"""
Indicates if the ``Content-Encoding`` value can be categorized as binary data.
"""
return not ContentEncoding.is_text(encoding)
@staticmethod
[docs]
def open_parameters(encoding, mode="r"):
# type: (Any, FileModeSteamType) -> Tuple[FileModeEncoding, Literal["UTF-8", None]]
"""
Obtains relevant ``mode`` and ``encoding`` parameters for :func:`open` using the specified ``Content-Encoding``.
"""
is_text = ContentEncoding.is_text(encoding)
return (mode, ContentEncoding.UTF_8) if is_text else (f"{mode}b", None)
@staticmethod
@overload
[docs]
def encode(data, encoding=BASE64, binary=True):
# type: (AnyStr, ContentEncoding, Literal[True]) -> bytes
...
@staticmethod
@overload
def encode(data, encoding=BASE64, binary=False):
# type: (AnyStr, ContentEncoding, Literal[False]) -> str
...
@staticmethod
@overload
def encode(data, encoding=BASE64, binary=None):
# type: (DataStrT, ContentEncoding, Literal[None]) -> DataStrT
...
@staticmethod
def encode(data, encoding=BASE64, binary=None):
# type: (AnyStr, ContentEncoding, Optional[bool]) -> AnyStr
"""
Encodes the data to the requested encoding and convert it to the string-like data type representation.
:param data: Data to encode.
:param encoding: Target encoding method.
:param binary:
If unspecified, the string-like type will be the same as the input data.
Otherwise, convert the encoded data to :class:`str` or :class:`bytes` accordingly.
:return: Encoded and converted data.
"""
data_type = type(data)
out_type = data_type if binary is None else (bytes if binary else str)
enc_type = ContentEncoding.get(encoding, default=ContentEncoding.UTF_8)
enc_func = {
(str, str, ContentEncoding.UTF_8): lambda _: _,
(str, bytes, ContentEncoding.UTF_8): lambda s: s.encode(),
(bytes, bytes, ContentEncoding.UTF_8): lambda _: _,
(bytes, str, ContentEncoding.UTF_8): lambda s: s.decode(),
(str, str, ContentEncoding.BASE16): lambda s: base64.b16encode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE16): lambda s: base64.b16encode(s.encode()),
(bytes, str, ContentEncoding.BASE16): lambda s: base64.b16encode(s).decode(),
(bytes, bytes, ContentEncoding.BASE16): lambda s: base64.b16encode(s),
(str, str, ContentEncoding.BASE32): lambda s: base64.b32encode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE32): lambda s: base64.b32encode(s.encode()),
(bytes, str, ContentEncoding.BASE32): lambda s: base64.b32encode(s).decode(),
(bytes, bytes, ContentEncoding.BASE32): lambda s: base64.b32encode(s),
(str, str, ContentEncoding.BASE64): lambda s: base64.b64encode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE64): lambda s: base64.b64encode(s.encode()),
(bytes, str, ContentEncoding.BASE64): lambda s: base64.b64encode(s).decode(),
(bytes, bytes, ContentEncoding.BASE64): lambda s: base64.b64encode(s),
(str, str, ContentEncoding.BINARY): lambda s: base64.b64encode(s.encode()).decode(),
(str, bytes, ContentEncoding.BINARY): lambda s: base64.b64encode(s.encode()),
(bytes, str, ContentEncoding.BINARY): lambda s: base64.b64encode(s).decode(),
(bytes, bytes, ContentEncoding.BINARY): lambda s: base64.b64encode(s),
}
return enc_func[(data_type, out_type, enc_type)](data)
@staticmethod
@overload
[docs]
def decode(data, encoding=BASE64, binary=True):
# type: (AnyStr, ContentEncoding, Literal[True]) -> bytes
...
@staticmethod
@overload
def decode(data, encoding=BASE64, binary=False):
# type: (AnyStr, ContentEncoding, Literal[False]) -> str
...
@staticmethod
@overload
def decode(data, encoding=BASE64, binary=None):
# type: (DataStrT, ContentEncoding, Literal[None]) -> DataStrT
...
@staticmethod
def decode(data, encoding=BASE64, binary=None):
# type: (AnyStr, ContentEncoding, Optional[bool]) -> AnyStr
"""
Decodes the data from the specified encoding and convert it to the string-like data type representation.
:param data: Data to decode.
:param encoding: Expected source encoding.
:param binary:
If unspecified, the string-like type will be the same as the input data.
Otherwise, convert the decoded data to :class:`str` or :class:`bytes` accordingly.
:return: Decoded and converted data.
"""
data_type = type(data)
out_type = data_type if binary is None else (bytes if binary else str)
enc_type = ContentEncoding.get(encoding, default=ContentEncoding.UTF_8)
dec_func = {
(str, str, ContentEncoding.UTF_8): lambda _: _,
(str, bytes, ContentEncoding.UTF_8): lambda s: s.encode(),
(bytes, bytes, ContentEncoding.UTF_8): lambda _: _,
(bytes, str, ContentEncoding.UTF_8): lambda s: s.decode(),
(str, str, ContentEncoding.BASE16): lambda s: base64.b16decode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE16): lambda s: base64.b16decode(s.encode()),
(bytes, str, ContentEncoding.BASE16): lambda s: base64.b16decode(s).decode(),
(bytes, bytes, ContentEncoding.BASE16): lambda s: base64.b16decode(s),
(str, str, ContentEncoding.BASE32): lambda s: base64.b32decode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE32): lambda s: base64.b32decode(s.encode()),
(bytes, str, ContentEncoding.BASE32): lambda s: base64.b32decode(s).decode(),
(bytes, bytes, ContentEncoding.BASE32): lambda s: base64.b32decode(s),
(str, str, ContentEncoding.BASE64): lambda s: base64.b64decode(s.encode()).decode(),
(str, bytes, ContentEncoding.BASE64): lambda s: base64.b64decode(s.encode()),
(bytes, str, ContentEncoding.BASE64): lambda s: base64.b64decode(s).decode(),
(bytes, bytes, ContentEncoding.BASE64): lambda s: base64.b64decode(s),
(str, str, ContentEncoding.BINARY): lambda s: base64.b64decode(s.encode()).decode(),
(str, bytes, ContentEncoding.BINARY): lambda s: base64.b64decode(s.encode()),
(bytes, str, ContentEncoding.BINARY): lambda s: base64.b64decode(s).decode(),
(bytes, bytes, ContentEncoding.BINARY): lambda s: base64.b64decode(s),
}
return dec_func[(data_type, out_type, enc_type)](data)
[docs]
class SchemaRole(Constants):
[docs]
JSON_SCHEMA = "https://www.w3.org/2019/wot/json-schema"
# explicit media-type to extension when not literally written in item after '/' (excluding 'x-' prefix)
[docs]
_CONTENT_TYPE_EXTENSION_OVERRIDES = {
ContentType.APP_VDN_GEOJSON: ".geojson", # pywps 4.4 default extension without vdn prefix
ContentType.APP_NETCDF: ".nc",
ContentType.APP_GZIP: ".gz",
ContentType.APP_TAR_GZ: ".tar.gz",
ContentType.APP_YAML: ".yml",
ContentType.IMAGE_TIFF: ".tif", # common alternate to .tiff
ContentType.ANY: ".*", # any for glob
ContentType.APP_DIR: "/", # force href to finish with explicit '/' to mark directory
ContentType.APP_OCTET_STREAM: "",
ContentType.APP_FORM: "",
ContentType.MULTI_PART_FORM: "",
}
[docs]
_CONTENT_TYPE_EXCLUDE = [
ContentType.APP_OCTET_STREAM,
ContentType.APP_FORM,
ContentType.MULTI_PART_FORM,
]
[docs]
_EXTENSION_CONTENT_TYPES_OVERRIDES = {
".text": ContentType.TEXT_PLAIN, # common alias to .txt, especially when using format query
".tiff": ContentType.IMAGE_TIFF, # avoid defaulting to subtype geotiff
".yaml": ContentType.APP_YAML, # common alternative to .yml
}
[docs]
_CONTENT_TYPE_EXTENSION_MAPPING = {} # type: Dict[str, str]
_CONTENT_TYPE_EXTENSION_MAPPING.update(_CONTENT_TYPE_EXTENSION_OVERRIDES)
# extend with all known pywps formats
[docs]
_CONTENT_TYPE_FORMAT_MAPPING = {
# content-types here are fully defined with extra parameters (e.g.: geotiff as subtype of tiff)
fmt.mime_type: fmt
for _, fmt in FORMATS._asdict().items() # noqa: W0212
if fmt.mime_type not in _CONTENT_TYPE_EXCLUDE
} # type: Dict[str, Format]
# back-propagate changes from new formats
_CONTENT_TYPE_EXTENSION_MAPPING.update({
ctype: fmt.extension
for ctype, fmt in _CONTENT_TYPE_FORMAT_MAPPING.items() # noqa: W0212
if ctype not in _CONTENT_TYPE_EXTENSION_MAPPING
})
# apply any remaining local types not explicitly or indirectly added by FORMATS
[docs]
_CONTENT_TYPE_EXT_PATTERN = re.compile(r"^[a-z]+/(x-)?(?P<ext>([a-z]+)).*$")
_CONTENT_TYPE_LOCALS_MISSING = [
(ctype, _CONTENT_TYPE_EXT_PATTERN.match(ctype))
for name, ctype in locals().items()
if name.startswith("ContentType.")
and isinstance(ctype, str)
and ctype not in _CONTENT_TYPE_EXCLUDE
and ctype not in _CONTENT_TYPE_FORMAT_MAPPING
and ctype not in _CONTENT_TYPE_EXTENSION_MAPPING
]
[docs]
_CONTENT_TYPE_LOCALS_MISSING = sorted(
[
(ctype, f".{re_ext['ext']}")
for ctype, re_ext in _CONTENT_TYPE_LOCALS_MISSING if re_ext
],
key=lambda typ: typ[0]
)
# update and back-propagate generated local types
_CONTENT_TYPE_EXTENSION_MAPPING.update(_CONTENT_TYPE_LOCALS_MISSING)
# extend additional types
# FIXME: disabled for security reasons
# _CONTENT_TYPE_EXTENSION_MAPPING.update({
# ctype: ext
# for ext, ctype in mimetypes.types_map.items()
# if ctype not in _CONTENT_TYPE_EXCLUDE
# and ctype not in _CONTENT_TYPE_EXTENSION_MAPPING
# })
_CONTENT_TYPE_FORMAT_MAPPING.update({
ctype: Format(ctype, extension=ext)
for ctype, ext in _CONTENT_TYPE_LOCALS_MISSING
if ctype not in _CONTENT_TYPE_EXCLUDE
})
_CONTENT_TYPE_FORMAT_MAPPING.update({
ctype: Format(ctype, extension=ext)
for ctype, ext in _CONTENT_TYPE_EXTENSION_MAPPING.items()
if ctype not in _CONTENT_TYPE_EXCLUDE
and ctype not in _CONTENT_TYPE_FORMAT_MAPPING
})
[docs]
_EXTENSION_CONTENT_TYPES_MAPPING = {
# because the same extension can represent multiple distinct Content-Types,
# derive the simplest (shortest) one by default for guessing generic Content-Type
ext: ctype for ctype, ext in reversed(sorted(
_CONTENT_TYPE_EXTENSION_MAPPING.items(),
key=lambda typ_ext: len(typ_ext[0])
))
}
_EXTENSION_CONTENT_TYPES_MAPPING.update(_EXTENSION_CONTENT_TYPES_OVERRIDES)
# file types that can contain textual characters
[docs]
_CONTENT_TYPE_CHAR_TYPES = [
"application",
"multipart",
"text",
]
# redirect type resolution semantically equivalent CWL validators
# should only be used to map CWL 'format' field if they are not already resolved through existing IANA/EDAM reference
[docs]
_CONTENT_TYPE_SYNONYM_MAPPING = {
ContentType.APP_TAR: ContentType.APP_GZIP,
ContentType.APP_TAR_GZ: ContentType.APP_GZIP,
}
# Mappings for "CWL->File->Format"
# - IANA contains most standard media-types, but some special/vendor-specific types are missing
# (application/x-hdf5, application/x-netcdf, etc.).
# - EDAM contains many field-specific schemas, but don't have an implicit URL definition (uses 'format_<id>' instead).
# - OpenGIS contains many OGC/Geospatial Media-Types and glossary of related terms, but since it includes many items
# that are not necessarily Media-Types, URI resolutions are not attempted at random to avoid invalid references.
# search:
# - IANA: https://www.iana.org/assignments/media-types/media-types.xhtml
# - EDAM-classes: http://bioportal.bioontology.org/ontologies/EDAM/?p=classes (section 'Format')
# - EDAM-browser: https://ifb-elixirfr.github.io/edam-browser/
# - OpenGIS vocabulary: https://defs.opengis.net/vocprez/object?uri=http://www.opengis.net/def/glossary
[docs]
IANA_NAMESPACE = "iana"
[docs]
IANA_NAMESPACE_URL = "https://www.iana.org/assignments/media-types/"
[docs]
IANA_NAMESPACE_DEFINITION = {IANA_NAMESPACE: IANA_NAMESPACE_URL}
# Generic entries in IANA Media-Type namespace registry that don't have an explicit endpoint,
# but are defined regardless. Avoid unnecessary HTTP NotFound toward those missing endpoints.
# (see items that don't have a link in 'Template' column in lists under 'IANA_NAMESPACE_URL')
# types to enforce to IANA in case another equivalent is known in other following mappings
# duplicates in other mappings are left defined in case they are employed by a user to ensure their detection
# but prefer the IANA resolution with is the primary reference for Media-Types
[docs]
IANA_MAPPING = {
ContentType.APP_JSON: ContentType.APP_JSON,
# CWL now has an official IANA definition:
# https://www.iana.org/assignments/media-types/application/cwl
ContentType.APP_CWL: ContentType.APP_CWL,
ContentType.APP_CWL_JSON: ContentType.APP_CWL,
ContentType.APP_CWL_YAML: ContentType.APP_CWL,
ContentType.APP_CWL_X: ContentType.APP_CWL,
}
[docs]
EDAM_NAMESPACE = "edam"
[docs]
EDAM_NAMESPACE_URL = "http://edamontology.org/"
[docs]
EDAM_NAMESPACE_DEFINITION = {EDAM_NAMESPACE: EDAM_NAMESPACE_URL}
[docs]
EDAM_SCHEMA = "http://edamontology.org/EDAM_1.24.owl"
[docs]
EDAM_MAPPING = {
# preserve CWL EDAM definitions for backward compatibility in case they were used in deployed processes
ContentType.APP_CWL: "format_3857",
ContentType.APP_CWL_JSON: "format_3857",
ContentType.APP_CWL_YAML: "format_3857",
ContentType.APP_CWL_X: "format_3857",
ContentType.IMAGE_GIF: "format_3467",
ContentType.IMAGE_JPEG: "format_3579",
ContentType.APP_HDF5: "format_3590",
ContentType.APP_JSON: "format_3464",
ContentType.APP_YAML: "format_3750",
ContentType.TEXT_PLAIN: "format_1964",
}
# Official links to be employed in definitions must be formed as:
# http://www.opengis.net/def/...
# But they should be redirected to full definitions as:
# https://defs.opengis.net/vocprez/object?uri=http://www.opengis.net/def/...
# See common locations:
# https://www.opengis.net/def/media-type
[docs]
OPENGIS_NAMESPACE = "opengis"
[docs]
OPENGIS_NAMESPACE_URL = "http://www.opengis.net/"
[docs]
OPENGIS_NAMESPACE_DEFINITION = {OPENGIS_NAMESPACE: OPENGIS_NAMESPACE_URL}
# shorthand notation directly scoped under OGC Media-Types to allow: 'ogc:<media-type-id>'
[docs]
OGC_NAMESPACE_URL = f"{OPENGIS_NAMESPACE_URL}def/media-type/ogc/1.0/"
[docs]
OGC_NAMESPACE_DEFINITION = {OGC_NAMESPACE: OGC_NAMESPACE_URL}
[docs]
OGC_MAPPING = {
ContentType.IMAGE_GEOTIFF: "geotiff",
ContentType.IMAGE_OGC_GEOTIFF: "geotiff",
ContentType.APP_NETCDF: "netcdf",
}
[docs]
def get_allowed_extensions():
# type: () -> List[str]
"""
Obtain the complete list of extensions that are permitted for processing by the application.
.. note::
This is employed for security reasons. Files can still be specified with another allowed extension, but
it will not automatically inherit properties applicable to scripts and executables.
If a specific file type is refused due to its extension, a PR can be submitted to add it explicitly.
"""
groups = [
"archives",
"audio",
"data",
"documents",
# "executables",
"images",
# "scripts",
"text",
"video",
]
base = set(resolve_extensions("+".join(groups)))
extra = {ext[1:] for ext in _EXTENSION_CONTENT_TYPES_MAPPING if ext and "*" not in ext}
return list(base | extra)
[docs]
def get_extension(media_type, dot=True):
# type: (str, bool) -> str
"""
Retrieves the extension corresponding to :paramref:`media_type` if explicitly defined, or by parsing it.
"""
def _handle_dot(_ext):
# type: (str) -> str
if dot and not _ext.startswith(".") and _ext: # don't add for empty extension
return f".{_ext}"
if not dot and _ext.startswith("."):
return _ext[1:]
return _ext
fmt = _CONTENT_TYPE_FORMAT_MAPPING.get(media_type)
if fmt:
if not fmt.extension.startswith("."):
return fmt.extension
return _handle_dot(fmt.extension)
ext = _CONTENT_TYPE_EXTENSION_MAPPING.get(media_type)
if ext:
return _handle_dot(ext)
ctype = clean_media_type_format(media_type, strip_parameters=True)
if not ctype:
return ""
ext_default = f".{ctype.split('/')[-1].replace('x-', '')}"
ext = _CONTENT_TYPE_EXTENSION_MAPPING.get(ctype, ext_default)
return _handle_dot(ext)
[docs]
def get_content_type(extension, charset=None, default=None):
# type: (str, Optional[str], Optional[str]) -> Optional[str]
"""
Retrieves the Content-Type corresponding to the specified extension if it can be matched.
:param extension: Extension for which to attempt finding a known Content-Type.
:param charset: Charset to apply to the Content-Type as needed if extension was matched.
:param default: Default Content-Type to return if no extension is matched.
:return: Matched or default Content-Type.
"""
ctype = None
if not extension:
return default
if not extension.startswith("."):
ctype = _EXTENSION_CONTENT_TYPES_MAPPING.get(extension)
if not ctype:
extension = f".{extension}"
if not ctype:
ctype = _EXTENSION_CONTENT_TYPES_MAPPING.get(extension)
if not ctype:
return default
return add_content_type_charset(ctype, charset)
[docs]
def add_content_type_charset(content_type, charset):
# type: (Union[str, ContentType], Optional[str]) -> str
"""
Apply the specific charset to the content-type with some validation in case of conflicting definitions.
:param content_type: Desired Content-Type.
:param charset: Desired charset parameter.
:return: updated content-type with charset.
"""
# no parameters in Media-Type, but explicit Content-Type with charset could exist as needed
if charset and "charset=" in content_type:
return re.sub(r"charset\=[A-Za-z0-9\_\-]+", f"charset={charset}", content_type)
# make sure to never include by mistake if the represented type cannot be characters
if charset and any(content_type.startswith(f"{_type}/") for _type in _CONTENT_TYPE_CHAR_TYPES):
return f"{content_type}; charset={charset}"
return content_type
[docs]
def json_default_handler(obj):
# type: (Any) -> Union[JSON, str, None]
if isinstance(obj, (datetime.date, datetime.datetime)):
return obj.isoformat()
raise TypeError(f"Type {type(obj)} not serializable.")
[docs]
def repr_json(data, force_string=True, ensure_ascii=False, indent=2, **kwargs):
# type: (Any, bool, bool, Optional[int], **Any) -> Union[JSON, str, None]
"""
Ensure that the input data can be serialized as JSON to return it formatted representation as such.
If formatting as JSON fails, returns the data as string representation or ``None`` accordingly.
"""
if data is None:
return None
default = kwargs.pop("default", None)
if default is None:
default = json_default_handler
try:
if isinstance(data, str):
return data # avoid adding additional quotes
data_str = json.dumps(data, indent=indent, ensure_ascii=ensure_ascii, default=default, **kwargs)
return data_str if force_string else data
except Exception: # noqa: W0703 # nosec: B110
return str(data)
if TYPE_CHECKING:
from weaver.typedefs import Literal