Module geoengine.datasets

Module for working with datasets and source definitions

Expand source code
'''
Module for working with datasets and source definitions
'''

from __future__ import annotations
from abc import abstractmethod
from datetime import datetime
from typing import Dict, List, NamedTuple, Optional, Tuple, Union, Generic, TypeVar

from enum import Enum
from uuid import UUID

import json
from typing_extensions import Literal, TypedDict
from attr import dataclass
import numpy as np
import geopandas as gpd
import requests as req

from geoengine.error import GeoEngineException, InputException
from geoengine.auth import get_session
from geoengine.types import GdalDatasetParameters, RasterResultDescriptor, TimeStep, TimeStepGranularity, VectorDataType


_OrgSourceDurationDictT = TypeVar('_OrgSourceDurationDictT', str, Union[str, int, TimeStepGranularity])


class OgrSourceTimeFormat:
    '''Base class for OGR time formats'''

    @abstractmethod
    def to_dict(self) -> Dict[str, str]:
        pass

    @classmethod
    def seconds(cls) -> SecondsOgrSourceTimeFormat:
        return SecondsOgrSourceTimeFormat()

    @classmethod
    def auto(cls) -> AutoOgrSourceTimeFormat:
        return AutoOgrSourceTimeFormat()

    @classmethod
    def custom(cls, format_string: str) -> CustomOgrSourceTimeFormat:
        return CustomOgrSourceTimeFormat(format_string)


@dataclass
class SecondsOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''An OGR time format specified in seconds (UNIX time)'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "seconds"
        }


@dataclass
class AutoOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''An auto detection OGR time format'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "auto"
        }


@dataclass
class CustomOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''A custom OGR time format'''

    custom_format: str

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "custom",
            "customFormat": self.custom_format
        }


class OgrSourceDuration(Generic[_OrgSourceDurationDictT]):
    '''Base class for the duration part of a OGR time format'''

    @abstractmethod
    def to_dict(self) -> Dict[str, _OrgSourceDurationDictT]:
        pass

    @classmethod
    def zero(cls) -> ZeroOgrSourceDurationSpec:
        return ZeroOgrSourceDurationSpec()

    @classmethod
    def infinite(cls) -> InfiniteOgrSourceDurationSpec:
        return InfiniteOgrSourceDurationSpec()

    @classmethod
    def value(
            cls,
            value: int,
            granularity: TimeStepGranularity = TimeStepGranularity.SECONDS) -> ValueOgrSourceDurationSpec:
        '''Returns the value of the duration'''
        return ValueOgrSourceDurationSpec(TimeStep(value, granularity))


@dataclass
class ValueOgrSourceDurationSpec(OgrSourceDuration):
    '''A fixed value for a source duration'''

    step: TimeStep

    def to_dict(self) -> Dict[str, Union[str, int, TimeStepGranularity]]:
        return {
            "type": "value",
            "step": self.step.step,
            "granularity": self.step.granularity.value
        }


@dataclass
class ZeroOgrSourceDurationSpec(OgrSourceDuration):
    '''An instant, i.e. no duration'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "type": "zero",
        }


@dataclass
class InfiniteOgrSourceDurationSpec(OgrSourceDuration):
    '''An open-ended time duration'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "type": "infinite",
        }


class OgrSourceDatasetTimeType:
    '''A time type specification for OGR dataset definitions'''

    @abstractmethod
    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        pass

    @classmethod
    def none(cls) -> NoneOgrSourceDatasetTimeType:
        return NoneOgrSourceDatasetTimeType()

    @classmethod
    def start(cls,
              start_field: str,
              start_format: OgrSourceTimeFormat,
              duration: OgrSourceDuration) -> StartOgrSourceDatasetTimeType:
        '''Specify a start column and a fixed duration'''
        return StartOgrSourceDatasetTimeType(start_field, start_format, duration)

    @classmethod
    def start_end(cls,
                  start_field: str,
                  start_format: OgrSourceTimeFormat,
                  end_field: str,
                  end_format: OgrSourceTimeFormat) -> StartEndOgrSourceDatasetTimeType:
        '''The dataset contains start and end column'''
        return StartEndOgrSourceDatasetTimeType(start_field, start_format, end_field, end_format)

    @classmethod
    def start_duration(cls,
                       start_field: str,
                       start_format: OgrSourceTimeFormat,
                       duration_field: str) -> StartDurationOgrSourceDatasetTimeType:
        '''The dataset contains start and a duration column'''
        return StartDurationOgrSourceDatasetTimeType(start_field, start_format, duration_field)


@dataclass
class NoneOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''Specify no time information'''

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "none",
        }


@dataclass
class StartOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''Specify a start column and a fixed duration'''

    start_field: str
    start_format: OgrSourceTimeFormat
    duration: OgrSourceDuration

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "duration": self.duration.to_dict()
        }


@dataclass
class StartEndOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''The dataset contains start and end column'''

    start_field: str
    start_format: OgrSourceTimeFormat
    end_field: str
    end_format: OgrSourceTimeFormat

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start+end",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "endField": self.end_field,
            "endFormat": self.end_format.to_dict(),
        }


@dataclass
class StartDurationOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''The dataset contains start and a duration column'''

    start_field: str
    start_format: OgrSourceTimeFormat
    duration_field: str

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start+duration",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "durationField": self.duration_field
        }


class OgrOnError(Enum):
    IGNORE = "ignore"
    ABORT = "abort"


class DatasetId:
    '''A wrapper for a dataset id'''

    __dataset_id: UUID

    def __init__(self, dataset_id: UUID) -> None:
        self.__dataset_id = dataset_id

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> DatasetId:
        '''Parse a http response to an `DatasetId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return DatasetId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__dataset_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two dataset ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__dataset_id == other.__dataset_id  # pylint: disable=protected-access


class MetaDataDefinition(TypedDict):  # pylint: disable=too-few-public-methods
    '''Super class for all metadata definitions'''


class GdalMetaDataStatic(MetaDataDefinition):
    '''Static metadata for GDAL datasets'''

    type: Literal["GdalStatic"]
    time: Optional[Tuple[datetime, datetime]]
    params: GdalDatasetParameters
    resultDescriptor: RasterResultDescriptor


class DateTimeParseFormat(TypedDict):
    '''A format for parsing date time strings'''

    fmt: str
    hasTz: bool
    hasTime: bool


class TimeReference(Enum):
    '''The reference for a time placeholder'''

    START = "Start"
    END = "End"


class GdalSourceTimePlaceholder(TypedDict):
    '''A placeholder for a time value in a file name'''
    format: DateTimeParseFormat
    reference: TimeReference


class GdalMetaDataRegular(MetaDataDefinition):
    '''Metadata for regular GDAL datasets'''

    type: Literal["GdalMetaDataRegular"]
    resultDescriptor: RasterResultDescriptor
    params: GdalDatasetParameters
    timePlaceholders: Dict[str, GdalSourceTimePlaceholder]
    dataTime: Tuple[datetime, datetime]
    step: TimeStep


class GdalMetadataNetCdfCf(MetaDataDefinition):
    '''Metadata for NetCDF CF datasets'''

    type: Literal["GdalMetadataNetCdfCf"]
    resultDescriptor: RasterResultDescriptor
    params: GdalDatasetParameters
    start: datetime
    end: datetime
    step: TimeStep
    bandOffset: int


class UploadId:
    '''A wrapper for an upload id'''

    __upload_id: UUID

    def __init__(self, upload_id: UUID) -> None:
        self.__upload_id = upload_id

    @ classmethod
    def from_response(cls, response: Dict[str, str]) -> UploadId:
        '''Parse a http response to an `UploadId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return UploadId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__upload_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two upload ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__upload_id == other.__upload_id  # pylint: disable=protected-access


class VolumeId:
    '''A wrapper for an volume id'''

    __volume_id: UUID

    def __init__(self, volume_id: UUID) -> None:
        self.__volume_id = volume_id

    @ classmethod
    def from_response(cls, response: Dict[str, str]) -> UploadId:
        '''Parse a http response to an `ColumeId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return UploadId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__volume_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two volume ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__volume_id == other.__volume_id  # pylint: disable=protected-access


def pandas_dtype_to_column_type(dtype: np.dtype) -> str:
    '''Convert a pandas `dtype` to a column type'''

    if np.issubdtype(dtype, np.integer):
        return 'int'

    if np.issubdtype(dtype, np.floating):
        return 'float'

    if str(dtype) == 'object':
        return 'text'

    raise InputException(
        f'pandas dtype {dtype} has no corresponding column type')


def upload_dataframe(
        df: gpd.GeoDataFrame,
        name: str = "Upload from Python",
        time: OgrSourceDatasetTimeType = OgrSourceDatasetTimeType.none(),
        on_error: OgrOnError = OgrOnError.ABORT,
        timeout: int = 3600) -> DatasetId:
    '''
    Uploads a given dataframe to Geo Engine and returns the id of the created dataset
    '''

    if len(df) == 0:
        raise InputException("Cannot upload empty dataframe")

    if df.crs is None:
        raise InputException("Dataframe must have a specified crs")

    session = get_session()

    df_json = df.to_json()

    response = req.post(f'{session.server_url}/upload',
                        files={"geo.json": df_json},
                        headers=session.auth_header,
                        timeout=timeout).json()

    if 'error' in response:
        raise GeoEngineException(response)

    upload_id = UploadId.from_response(response)

    vector_type = VectorDataType.from_geopandas_type_name(df.geom_type[0])

    columns = {key: {'dataType': pandas_dtype_to_column_type(value), 'measurement': {'type': 'unitless'}}
               for (key, value) in df.dtypes.items()
               if str(value) != 'geometry'}

    floats = [key for (key, value) in columns.items() if value['dataType'] == 'float']
    ints = [key for (key, value) in columns.items() if value['dataType'] == 'int']
    texts = [key for (key, value) in columns.items() if value['dataType'] == 'text']

    create = {
        "dataPath": {
            "upload": str(upload_id)
        },
        "definition": {
            "properties": {
                "name": name,
                "description": "",
                "sourceOperator": "OgrSource"
            },
            "metaData": {
                "type": "OgrMetaData",
                "loadingInfo": {
                    "fileName": "geo.json",
                    "layerName": "geo",
                    "dataType": vector_type.value,
                    "time": time.to_dict(),
                    "columns": {
                        "x": "",
                        "float": floats,
                        "int": ints,
                        "text": texts
                    },
                    "onError": on_error.value
                },
                "resultDescriptor": {
                    "type": "vector",
                    "dataType": vector_type.value,
                    "columns": columns,
                    "spatialReference": df.crs.to_string()
                }
            }
        }
    }

    response = req.post(f'{session.server_url}/dataset',
                        json=create, headers=session.auth_header,
                        timeout=timeout
                        ).json()

    if 'error' in response:
        raise GeoEngineException(response)

    return DatasetId(response["id"])


class StoredDataset(NamedTuple):
    '''The result of a store dataset request is a combination of `upload_id` and `dataset_id`'''

    dataset_id: DatasetId
    upload_id: UploadId

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> StoredDataset:
        '''Parse a http response to an `StoredDataset`'''
        if 'dataset' not in response and 'upload' not in response:
            raise GeoEngineException(response)

        return StoredDataset(
            dataset_id=DatasetId(UUID(response['dataset'])),
            upload_id=UploadId(UUID(response['upload']))
        )


@dataclass
class Volume:
    '''A volume'''

    name: str
    path: str

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> Volume:
        '''Parse a http response to an `Volume`'''
        return Volume(response['name'], response['path'])


def volumes(timeout: int = 60) -> List[Volume]:
    '''Returns a list of all volumes'''

    session = get_session()

    response = req.get(f'{session.server_url}/dataset/volumes',
                       headers=session.admin_auth_header,
                       timeout=timeout
                       ).json()

    return [Volume.from_response(v) for v in response]


def add_public_raster_dataset(volume_id: VolumeId, name: str, meta_data: MetaDataDefinition,
                              timeout: int = 60) -> DatasetId:
    '''Adds a public raster dataset to the Geo Engine'''

    create = {
        "dataPath": {
            "volume": str(volume_id)
        },
        "definition": {
            "properties": {
                "name": name,
                "description": "",
                "sourceOperator": "GdalSource"
            },
            "metaData": meta_data
        }
    }

    data = json.dumps(create, default=dict)

    session = get_session()

    headers = session.admin_auth_header
    headers['Content-Type'] = 'application/json'

    response = req.post(f'{session.server_url}/dataset',
                        data=data, headers=headers,
                        timeout=timeout
                        ).json()

    if 'error' in response:
        raise GeoEngineException(response)

    return DatasetId(response["id"])


def delete_dataset(dataset_id: DatasetId, timeout: int = 60) -> None:
    '''Delete a dataset. The dataset must be owned by the caller.'''

    session = get_session()

    response = req.delete(f'{session.server_url}/dataset/{dataset_id}',
                          headers=session.admin_or_normal_auth_header,
                          timeout=timeout)

    if response.status_code != 200:
        error_json = response.json()
        raise GeoEngineException(error_json)

Functions

def add_public_raster_dataset(volume_id: VolumeId, name: str, meta_data: MetaDataDefinition, timeout: int = 60) ‑> DatasetId

Adds a public raster dataset to the Geo Engine

Expand source code
def add_public_raster_dataset(volume_id: VolumeId, name: str, meta_data: MetaDataDefinition,
                              timeout: int = 60) -> DatasetId:
    '''Adds a public raster dataset to the Geo Engine'''

    create = {
        "dataPath": {
            "volume": str(volume_id)
        },
        "definition": {
            "properties": {
                "name": name,
                "description": "",
                "sourceOperator": "GdalSource"
            },
            "metaData": meta_data
        }
    }

    data = json.dumps(create, default=dict)

    session = get_session()

    headers = session.admin_auth_header
    headers['Content-Type'] = 'application/json'

    response = req.post(f'{session.server_url}/dataset',
                        data=data, headers=headers,
                        timeout=timeout
                        ).json()

    if 'error' in response:
        raise GeoEngineException(response)

    return DatasetId(response["id"])
def delete_dataset(dataset_id: DatasetId, timeout: int = 60) ‑> None

Delete a dataset. The dataset must be owned by the caller.

Expand source code
def delete_dataset(dataset_id: DatasetId, timeout: int = 60) -> None:
    '''Delete a dataset. The dataset must be owned by the caller.'''

    session = get_session()

    response = req.delete(f'{session.server_url}/dataset/{dataset_id}',
                          headers=session.admin_or_normal_auth_header,
                          timeout=timeout)

    if response.status_code != 200:
        error_json = response.json()
        raise GeoEngineException(error_json)
def pandas_dtype_to_column_type(dtype: np.dtype) ‑> str

Convert a pandas dtype to a column type

Expand source code
def pandas_dtype_to_column_type(dtype: np.dtype) -> str:
    '''Convert a pandas `dtype` to a column type'''

    if np.issubdtype(dtype, np.integer):
        return 'int'

    if np.issubdtype(dtype, np.floating):
        return 'float'

    if str(dtype) == 'object':
        return 'text'

    raise InputException(
        f'pandas dtype {dtype} has no corresponding column type')
def upload_dataframe(df: gpd.GeoDataFrame, name: str = 'Upload from Python', time: OgrSourceDatasetTimeType = NoneOgrSourceDatasetTimeType(), on_error: OgrOnError = OgrOnError.ABORT, timeout: int = 3600) ‑> DatasetId

Uploads a given dataframe to Geo Engine and returns the id of the created dataset

Expand source code
def upload_dataframe(
        df: gpd.GeoDataFrame,
        name: str = "Upload from Python",
        time: OgrSourceDatasetTimeType = OgrSourceDatasetTimeType.none(),
        on_error: OgrOnError = OgrOnError.ABORT,
        timeout: int = 3600) -> DatasetId:
    '''
    Uploads a given dataframe to Geo Engine and returns the id of the created dataset
    '''

    if len(df) == 0:
        raise InputException("Cannot upload empty dataframe")

    if df.crs is None:
        raise InputException("Dataframe must have a specified crs")

    session = get_session()

    df_json = df.to_json()

    response = req.post(f'{session.server_url}/upload',
                        files={"geo.json": df_json},
                        headers=session.auth_header,
                        timeout=timeout).json()

    if 'error' in response:
        raise GeoEngineException(response)

    upload_id = UploadId.from_response(response)

    vector_type = VectorDataType.from_geopandas_type_name(df.geom_type[0])

    columns = {key: {'dataType': pandas_dtype_to_column_type(value), 'measurement': {'type': 'unitless'}}
               for (key, value) in df.dtypes.items()
               if str(value) != 'geometry'}

    floats = [key for (key, value) in columns.items() if value['dataType'] == 'float']
    ints = [key for (key, value) in columns.items() if value['dataType'] == 'int']
    texts = [key for (key, value) in columns.items() if value['dataType'] == 'text']

    create = {
        "dataPath": {
            "upload": str(upload_id)
        },
        "definition": {
            "properties": {
                "name": name,
                "description": "",
                "sourceOperator": "OgrSource"
            },
            "metaData": {
                "type": "OgrMetaData",
                "loadingInfo": {
                    "fileName": "geo.json",
                    "layerName": "geo",
                    "dataType": vector_type.value,
                    "time": time.to_dict(),
                    "columns": {
                        "x": "",
                        "float": floats,
                        "int": ints,
                        "text": texts
                    },
                    "onError": on_error.value
                },
                "resultDescriptor": {
                    "type": "vector",
                    "dataType": vector_type.value,
                    "columns": columns,
                    "spatialReference": df.crs.to_string()
                }
            }
        }
    }

    response = req.post(f'{session.server_url}/dataset',
                        json=create, headers=session.auth_header,
                        timeout=timeout
                        ).json()

    if 'error' in response:
        raise GeoEngineException(response)

    return DatasetId(response["id"])
def volumes(timeout: int = 60) ‑> List[Volume]

Returns a list of all volumes

Expand source code
def volumes(timeout: int = 60) -> List[Volume]:
    '''Returns a list of all volumes'''

    session = get_session()

    response = req.get(f'{session.server_url}/dataset/volumes',
                       headers=session.admin_auth_header,
                       timeout=timeout
                       ).json()

    return [Volume.from_response(v) for v in response]

Classes

class AutoOgrSourceTimeFormat

An auto detection OGR time format

Method generated by attrs for class AutoOgrSourceTimeFormat.

Expand source code
@dataclass
class AutoOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''An auto detection OGR time format'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "auto"
        }

Ancestors

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
def to_dict(self) -> Dict[str, str]:
    return {
        "format": "auto"
    }
class CustomOgrSourceTimeFormat (custom_format: str)

A custom OGR time format

Method generated by attrs for class CustomOgrSourceTimeFormat.

Expand source code
@dataclass
class CustomOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''A custom OGR time format'''

    custom_format: str

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "custom",
            "customFormat": self.custom_format
        }

Ancestors

Class variables

var custom_format : str

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
def to_dict(self) -> Dict[str, str]:
    return {
        "format": "custom",
        "customFormat": self.custom_format
    }
class DatasetId (dataset_id: UUID)

A wrapper for a dataset id

Expand source code
class DatasetId:
    '''A wrapper for a dataset id'''

    __dataset_id: UUID

    def __init__(self, dataset_id: UUID) -> None:
        self.__dataset_id = dataset_id

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> DatasetId:
        '''Parse a http response to an `DatasetId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return DatasetId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__dataset_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two dataset ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__dataset_id == other.__dataset_id  # pylint: disable=protected-access

Static methods

def from_response(response: Dict[str, str]) ‑> DatasetId

Parse a http response to an DatasetId

Expand source code
@classmethod
def from_response(cls, response: Dict[str, str]) -> DatasetId:
    '''Parse a http response to an `DatasetId`'''
    if 'id' not in response:
        raise GeoEngineException(response)

    return DatasetId(UUID(response['id']))
class DateTimeParseFormat (*args, **kwargs)

A format for parsing date time strings

Expand source code
class DateTimeParseFormat(TypedDict):
    '''A format for parsing date time strings'''

    fmt: str
    hasTz: bool
    hasTime: bool

Ancestors

  • builtins.dict

Class variables

var fmt : str
var hasTime : bool
var hasTz : bool
class GdalMetaDataRegular (*args, **kwargs)

Metadata for regular GDAL datasets

Expand source code
class GdalMetaDataRegular(MetaDataDefinition):
    '''Metadata for regular GDAL datasets'''

    type: Literal["GdalMetaDataRegular"]
    resultDescriptor: RasterResultDescriptor
    params: GdalDatasetParameters
    timePlaceholders: Dict[str, GdalSourceTimePlaceholder]
    dataTime: Tuple[datetime, datetime]
    step: TimeStep

Ancestors

  • builtins.dict

Class variables

var dataTime : Tuple[datetime.datetime, datetime.datetime]
var paramsGdalDatasetParameters
var resultDescriptorRasterResultDescriptor
var stepTimeStep
var timePlaceholders : Dict[str, GdalSourceTimePlaceholder]
var type : Literal['GdalMetaDataRegular']
class GdalMetaDataStatic (*args, **kwargs)

Static metadata for GDAL datasets

Expand source code
class GdalMetaDataStatic(MetaDataDefinition):
    '''Static metadata for GDAL datasets'''

    type: Literal["GdalStatic"]
    time: Optional[Tuple[datetime, datetime]]
    params: GdalDatasetParameters
    resultDescriptor: RasterResultDescriptor

Ancestors

  • builtins.dict

Class variables

var paramsGdalDatasetParameters
var resultDescriptorRasterResultDescriptor
var time : Optional[Tuple[datetime.datetime, datetime.datetime]]
var type : Literal['GdalStatic']
class GdalMetadataNetCdfCf (*args, **kwargs)

Metadata for NetCDF CF datasets

Expand source code
class GdalMetadataNetCdfCf(MetaDataDefinition):
    '''Metadata for NetCDF CF datasets'''

    type: Literal["GdalMetadataNetCdfCf"]
    resultDescriptor: RasterResultDescriptor
    params: GdalDatasetParameters
    start: datetime
    end: datetime
    step: TimeStep
    bandOffset: int

Ancestors

  • builtins.dict

Class variables

var bandOffset : int
var end : datetime.datetime
var paramsGdalDatasetParameters
var resultDescriptorRasterResultDescriptor
var start : datetime.datetime
var stepTimeStep
var type : Literal['GdalMetadataNetCdfCf']
class GdalSourceTimePlaceholder (*args, **kwargs)

A placeholder for a time value in a file name

Expand source code
class GdalSourceTimePlaceholder(TypedDict):
    '''A placeholder for a time value in a file name'''
    format: DateTimeParseFormat
    reference: TimeReference

Ancestors

  • builtins.dict

Class variables

var formatDateTimeParseFormat
var referenceTimeReference
class InfiniteOgrSourceDurationSpec

An open-ended time duration

Method generated by attrs for class InfiniteOgrSourceDurationSpec.

Expand source code
@dataclass
class InfiniteOgrSourceDurationSpec(OgrSourceDuration):
    '''An open-ended time duration'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "type": "infinite",
        }

Ancestors

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
def to_dict(self) -> Dict[str, str]:
    return {
        "type": "infinite",
    }

Inherited members

class MetaDataDefinition (*args, **kwargs)

Super class for all metadata definitions

Expand source code
class MetaDataDefinition(TypedDict):  # pylint: disable=too-few-public-methods
    '''Super class for all metadata definitions'''

Ancestors

  • builtins.dict
class NoneOgrSourceDatasetTimeType

Specify no time information

Method generated by attrs for class NoneOgrSourceDatasetTimeType.

Expand source code
@dataclass
class NoneOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''Specify no time information'''

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "none",
        }

Ancestors

Methods

def to_dict(self) ‑> Dict[str, Union[str, Dict[str, str]]]
Expand source code
def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
    return {
        "type": "none",
    }

Inherited members

class OgrOnError (value, names=None, *, module=None, qualname=None, type=None, start=1)

An enumeration.

Expand source code
class OgrOnError(Enum):
    IGNORE = "ignore"
    ABORT = "abort"

Ancestors

  • enum.Enum

Class variables

var ABORT
var IGNORE
class OgrSourceDatasetTimeType

A time type specification for OGR dataset definitions

Expand source code
class OgrSourceDatasetTimeType:
    '''A time type specification for OGR dataset definitions'''

    @abstractmethod
    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        pass

    @classmethod
    def none(cls) -> NoneOgrSourceDatasetTimeType:
        return NoneOgrSourceDatasetTimeType()

    @classmethod
    def start(cls,
              start_field: str,
              start_format: OgrSourceTimeFormat,
              duration: OgrSourceDuration) -> StartOgrSourceDatasetTimeType:
        '''Specify a start column and a fixed duration'''
        return StartOgrSourceDatasetTimeType(start_field, start_format, duration)

    @classmethod
    def start_end(cls,
                  start_field: str,
                  start_format: OgrSourceTimeFormat,
                  end_field: str,
                  end_format: OgrSourceTimeFormat) -> StartEndOgrSourceDatasetTimeType:
        '''The dataset contains start and end column'''
        return StartEndOgrSourceDatasetTimeType(start_field, start_format, end_field, end_format)

    @classmethod
    def start_duration(cls,
                       start_field: str,
                       start_format: OgrSourceTimeFormat,
                       duration_field: str) -> StartDurationOgrSourceDatasetTimeType:
        '''The dataset contains start and a duration column'''
        return StartDurationOgrSourceDatasetTimeType(start_field, start_format, duration_field)

Subclasses

Static methods

def none() ‑> NoneOgrSourceDatasetTimeType
Expand source code
@classmethod
def none(cls) -> NoneOgrSourceDatasetTimeType:
    return NoneOgrSourceDatasetTimeType()
def start(start_field: str, start_format: OgrSourceTimeFormat, duration: OgrSourceDuration) ‑> StartOgrSourceDatasetTimeType

Specify a start column and a fixed duration

Expand source code
@classmethod
def start(cls,
          start_field: str,
          start_format: OgrSourceTimeFormat,
          duration: OgrSourceDuration) -> StartOgrSourceDatasetTimeType:
    '''Specify a start column and a fixed duration'''
    return StartOgrSourceDatasetTimeType(start_field, start_format, duration)
def start_duration(start_field: str, start_format: OgrSourceTimeFormat, duration_field: str) ‑> StartDurationOgrSourceDatasetTimeType

The dataset contains start and a duration column

Expand source code
@classmethod
def start_duration(cls,
                   start_field: str,
                   start_format: OgrSourceTimeFormat,
                   duration_field: str) -> StartDurationOgrSourceDatasetTimeType:
    '''The dataset contains start and a duration column'''
    return StartDurationOgrSourceDatasetTimeType(start_field, start_format, duration_field)
def start_end(start_field: str, start_format: OgrSourceTimeFormat, end_field: str, end_format: OgrSourceTimeFormat) ‑> StartEndOgrSourceDatasetTimeType

The dataset contains start and end column

Expand source code
@classmethod
def start_end(cls,
              start_field: str,
              start_format: OgrSourceTimeFormat,
              end_field: str,
              end_format: OgrSourceTimeFormat) -> StartEndOgrSourceDatasetTimeType:
    '''The dataset contains start and end column'''
    return StartEndOgrSourceDatasetTimeType(start_field, start_format, end_field, end_format)

Methods

def to_dict(self) ‑> Dict[str, Union[str, Dict[str, str]]]
Expand source code
@abstractmethod
def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
    pass
class OgrSourceDuration

Base class for the duration part of a OGR time format

Expand source code
class OgrSourceDuration(Generic[_OrgSourceDurationDictT]):
    '''Base class for the duration part of a OGR time format'''

    @abstractmethod
    def to_dict(self) -> Dict[str, _OrgSourceDurationDictT]:
        pass

    @classmethod
    def zero(cls) -> ZeroOgrSourceDurationSpec:
        return ZeroOgrSourceDurationSpec()

    @classmethod
    def infinite(cls) -> InfiniteOgrSourceDurationSpec:
        return InfiniteOgrSourceDurationSpec()

    @classmethod
    def value(
            cls,
            value: int,
            granularity: TimeStepGranularity = TimeStepGranularity.SECONDS) -> ValueOgrSourceDurationSpec:
        '''Returns the value of the duration'''
        return ValueOgrSourceDurationSpec(TimeStep(value, granularity))

Ancestors

  • typing.Generic

Subclasses

Static methods

def infinite() ‑> InfiniteOgrSourceDurationSpec
Expand source code
@classmethod
def infinite(cls) -> InfiniteOgrSourceDurationSpec:
    return InfiniteOgrSourceDurationSpec()
def value(value: int, granularity: TimeStepGranularity = TimeStepGranularity.SECONDS) ‑> ValueOgrSourceDurationSpec

Returns the value of the duration

Expand source code
@classmethod
def value(
        cls,
        value: int,
        granularity: TimeStepGranularity = TimeStepGranularity.SECONDS) -> ValueOgrSourceDurationSpec:
    '''Returns the value of the duration'''
    return ValueOgrSourceDurationSpec(TimeStep(value, granularity))
def zero() ‑> ZeroOgrSourceDurationSpec
Expand source code
@classmethod
def zero(cls) -> ZeroOgrSourceDurationSpec:
    return ZeroOgrSourceDurationSpec()

Methods

def to_dict(self) ‑> Dict[str, ~_OrgSourceDurationDictT]
Expand source code
@abstractmethod
def to_dict(self) -> Dict[str, _OrgSourceDurationDictT]:
    pass
class OgrSourceTimeFormat

Base class for OGR time formats

Expand source code
class OgrSourceTimeFormat:
    '''Base class for OGR time formats'''

    @abstractmethod
    def to_dict(self) -> Dict[str, str]:
        pass

    @classmethod
    def seconds(cls) -> SecondsOgrSourceTimeFormat:
        return SecondsOgrSourceTimeFormat()

    @classmethod
    def auto(cls) -> AutoOgrSourceTimeFormat:
        return AutoOgrSourceTimeFormat()

    @classmethod
    def custom(cls, format_string: str) -> CustomOgrSourceTimeFormat:
        return CustomOgrSourceTimeFormat(format_string)

Subclasses

Static methods

def auto() ‑> AutoOgrSourceTimeFormat
Expand source code
@classmethod
def auto(cls) -> AutoOgrSourceTimeFormat:
    return AutoOgrSourceTimeFormat()
def custom(format_string: str) ‑> CustomOgrSourceTimeFormat
Expand source code
@classmethod
def custom(cls, format_string: str) -> CustomOgrSourceTimeFormat:
    return CustomOgrSourceTimeFormat(format_string)
def seconds() ‑> SecondsOgrSourceTimeFormat
Expand source code
@classmethod
def seconds(cls) -> SecondsOgrSourceTimeFormat:
    return SecondsOgrSourceTimeFormat()

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
@abstractmethod
def to_dict(self) -> Dict[str, str]:
    pass
class SecondsOgrSourceTimeFormat

An OGR time format specified in seconds (UNIX time)

Method generated by attrs for class SecondsOgrSourceTimeFormat.

Expand source code
@dataclass
class SecondsOgrSourceTimeFormat(OgrSourceTimeFormat):
    '''An OGR time format specified in seconds (UNIX time)'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "format": "seconds"
        }

Ancestors

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
def to_dict(self) -> Dict[str, str]:
    return {
        "format": "seconds"
    }
class StartDurationOgrSourceDatasetTimeType (start_field: str, start_format: OgrSourceTimeFormat, duration_field: str)

The dataset contains start and a duration column

Method generated by attrs for class StartDurationOgrSourceDatasetTimeType.

Expand source code
@dataclass
class StartDurationOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''The dataset contains start and a duration column'''

    start_field: str
    start_format: OgrSourceTimeFormat
    duration_field: str

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start+duration",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "durationField": self.duration_field
        }

Ancestors

Class variables

var duration_field : str
var start_field : str
var start_formatOgrSourceTimeFormat

Methods

def to_dict(self) ‑> Dict[str, Union[str, Dict[str, str]]]
Expand source code
def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
    return {
        "type": "start+duration",
        "startField": self.start_field,
        "startFormat": self.start_format.to_dict(),
        "durationField": self.duration_field
    }

Inherited members

class StartEndOgrSourceDatasetTimeType (start_field: str, start_format: OgrSourceTimeFormat, end_field: str, end_format: OgrSourceTimeFormat)

The dataset contains start and end column

Method generated by attrs for class StartEndOgrSourceDatasetTimeType.

Expand source code
@dataclass
class StartEndOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''The dataset contains start and end column'''

    start_field: str
    start_format: OgrSourceTimeFormat
    end_field: str
    end_format: OgrSourceTimeFormat

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start+end",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "endField": self.end_field,
            "endFormat": self.end_format.to_dict(),
        }

Ancestors

Class variables

var end_field : str
var end_formatOgrSourceTimeFormat
var start_field : str
var start_formatOgrSourceTimeFormat

Methods

def to_dict(self) ‑> Dict[str, Union[str, Dict[str, str]]]
Expand source code
def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
    return {
        "type": "start+end",
        "startField": self.start_field,
        "startFormat": self.start_format.to_dict(),
        "endField": self.end_field,
        "endFormat": self.end_format.to_dict(),
    }

Inherited members

class StartOgrSourceDatasetTimeType (start_field: str, start_format: OgrSourceTimeFormat, duration: OgrSourceDuration)

Specify a start column and a fixed duration

Method generated by attrs for class StartOgrSourceDatasetTimeType.

Expand source code
@dataclass
class StartOgrSourceDatasetTimeType(OgrSourceDatasetTimeType):
    '''Specify a start column and a fixed duration'''

    start_field: str
    start_format: OgrSourceTimeFormat
    duration: OgrSourceDuration

    def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
        return {
            "type": "start",
            "startField": self.start_field,
            "startFormat": self.start_format.to_dict(),
            "duration": self.duration.to_dict()
        }

Ancestors

Class variables

var durationOgrSourceDuration
var start_field : str
var start_formatOgrSourceTimeFormat

Methods

def to_dict(self) ‑> Dict[str, Union[str, Dict[str, str]]]
Expand source code
def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]:
    return {
        "type": "start",
        "startField": self.start_field,
        "startFormat": self.start_format.to_dict(),
        "duration": self.duration.to_dict()
    }

Inherited members

class StoredDataset (dataset_id: ForwardRef('DatasetId'), upload_id: ForwardRef('UploadId'))

The result of a store dataset request is a combination of upload_id and dataset_id

Expand source code
class StoredDataset(NamedTuple):
    '''The result of a store dataset request is a combination of `upload_id` and `dataset_id`'''

    dataset_id: DatasetId
    upload_id: UploadId

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> StoredDataset:
        '''Parse a http response to an `StoredDataset`'''
        if 'dataset' not in response and 'upload' not in response:
            raise GeoEngineException(response)

        return StoredDataset(
            dataset_id=DatasetId(UUID(response['dataset'])),
            upload_id=UploadId(UUID(response['upload']))
        )

Ancestors

  • builtins.tuple

Static methods

def from_response(response: Dict[str, str]) ‑> StoredDataset

Parse a http response to an StoredDataset

Expand source code
@classmethod
def from_response(cls, response: Dict[str, str]) -> StoredDataset:
    '''Parse a http response to an `StoredDataset`'''
    if 'dataset' not in response and 'upload' not in response:
        raise GeoEngineException(response)

    return StoredDataset(
        dataset_id=DatasetId(UUID(response['dataset'])),
        upload_id=UploadId(UUID(response['upload']))
    )

Instance variables

var dataset_idDatasetId

Alias for field number 0

var upload_idUploadId

Alias for field number 1

class TimeReference (value, names=None, *, module=None, qualname=None, type=None, start=1)

The reference for a time placeholder

Expand source code
class TimeReference(Enum):
    '''The reference for a time placeholder'''

    START = "Start"
    END = "End"

Ancestors

  • enum.Enum

Class variables

var END
var START
class UploadId (upload_id: UUID)

A wrapper for an upload id

Expand source code
class UploadId:
    '''A wrapper for an upload id'''

    __upload_id: UUID

    def __init__(self, upload_id: UUID) -> None:
        self.__upload_id = upload_id

    @ classmethod
    def from_response(cls, response: Dict[str, str]) -> UploadId:
        '''Parse a http response to an `UploadId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return UploadId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__upload_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two upload ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__upload_id == other.__upload_id  # pylint: disable=protected-access

Static methods

def from_response(response: Dict[str, str]) ‑> UploadId

Parse a http response to an UploadId

Expand source code
@ classmethod
def from_response(cls, response: Dict[str, str]) -> UploadId:
    '''Parse a http response to an `UploadId`'''
    if 'id' not in response:
        raise GeoEngineException(response)

    return UploadId(UUID(response['id']))
class ValueOgrSourceDurationSpec (step: TimeStep)

A fixed value for a source duration

Method generated by attrs for class ValueOgrSourceDurationSpec.

Expand source code
@dataclass
class ValueOgrSourceDurationSpec(OgrSourceDuration):
    '''A fixed value for a source duration'''

    step: TimeStep

    def to_dict(self) -> Dict[str, Union[str, int, TimeStepGranularity]]:
        return {
            "type": "value",
            "step": self.step.step,
            "granularity": self.step.granularity.value
        }

Ancestors

Class variables

var stepTimeStep

Methods

def to_dict(self) ‑> Dict[str, Union[str, int, TimeStepGranularity]]
Expand source code
def to_dict(self) -> Dict[str, Union[str, int, TimeStepGranularity]]:
    return {
        "type": "value",
        "step": self.step.step,
        "granularity": self.step.granularity.value
    }

Inherited members

class Volume (name: str, path: str)

A volume

Method generated by attrs for class Volume.

Expand source code
@dataclass
class Volume:
    '''A volume'''

    name: str
    path: str

    @classmethod
    def from_response(cls, response: Dict[str, str]) -> Volume:
        '''Parse a http response to an `Volume`'''
        return Volume(response['name'], response['path'])

Class variables

var name : str
var path : str

Static methods

def from_response(response: Dict[str, str]) ‑> Volume

Parse a http response to an Volume

Expand source code
@classmethod
def from_response(cls, response: Dict[str, str]) -> Volume:
    '''Parse a http response to an `Volume`'''
    return Volume(response['name'], response['path'])
class VolumeId (volume_id: UUID)

A wrapper for an volume id

Expand source code
class VolumeId:
    '''A wrapper for an volume id'''

    __volume_id: UUID

    def __init__(self, volume_id: UUID) -> None:
        self.__volume_id = volume_id

    @ classmethod
    def from_response(cls, response: Dict[str, str]) -> UploadId:
        '''Parse a http response to an `ColumeId`'''
        if 'id' not in response:
            raise GeoEngineException(response)

        return UploadId(UUID(response['id']))

    def __str__(self) -> str:
        return str(self.__volume_id)

    def __repr__(self) -> str:
        return str(self)

    def __eq__(self, other) -> bool:
        '''Checks if two volume ids are equal'''
        if not isinstance(other, self.__class__):
            return False

        return self.__volume_id == other.__volume_id  # pylint: disable=protected-access

Static methods

def from_response(response: Dict[str, str]) ‑> UploadId

Parse a http response to an ColumeId

Expand source code
@ classmethod
def from_response(cls, response: Dict[str, str]) -> UploadId:
    '''Parse a http response to an `ColumeId`'''
    if 'id' not in response:
        raise GeoEngineException(response)

    return UploadId(UUID(response['id']))
class ZeroOgrSourceDurationSpec

An instant, i.e. no duration

Method generated by attrs for class ZeroOgrSourceDurationSpec.

Expand source code
@dataclass
class ZeroOgrSourceDurationSpec(OgrSourceDuration):
    '''An instant, i.e. no duration'''

    def to_dict(self) -> Dict[str, str]:
        return {
            "type": "zero",
        }

Ancestors

Methods

def to_dict(self) ‑> Dict[str, str]
Expand source code
def to_dict(self) -> Dict[str, str]:
    return {
        "type": "zero",
    }

Inherited members