# coding: utf-8
"""
SDI - Semantic Data Interconnect APIs
The Semantic Data Interconnect (SDI) is a collection of APIs that allows the user to unlock the potential of disparate big data by connecting external data. The SDI can infer the schemas of data based on schema-on-read, allow creating a semantic model and perform big data semantic queries. It seamlessly connects to MindSphere's Integrated Data Lake (IDL), but it can work independently as well. There are two mechanisms that can be used to upload files so that SDI can generate schemas and make data ready for query. The SDI operations are divided into the following groups: **Data Registration for SDI** This set of APIs is used to organize the incoming data. When configuring a Data Registry, you have the option to update your data based on a replace or append strategy. If you consider a use case where schema may change and incoming data files are completely different every time then replace is a good strategy. The replace strategy will replace the existing schema and data during each data ingest operation whereas the append strategy will update the existing schema and data during each data ingest operation. **Custom Data Type for SDI** The SDI by default identifies basic data types for each property, such as String, Integer, Float, Date, etc. The user can use this set of APIs to create their own custom data type. The SDI also provides an API under this category to suggest data type based on user-provided sample test values. **Data Lake for SDI** The SDI can process files uploaded provides endpoints to manage customer's data lake registration based on tenant id, cloud provider and data lake type. The set of REST endpoint allows to create, update and retrieve base path for their data lake. The IDL customer needs to create an SDI folder that is under the root folder. Any file uploaded in this folder is automatically picked up by SDI to process via IDL notification. **Data Ingest for SDI** This set of APIs allows user to upload files, start an ingest job for uploaded files, find job status for ingested jobs or retrieve all job statuses. **Schema Registry for SDI** The SDI provides a way to find the generated schema in this category. Users can find an SDI generated schema for uploaded files based on source name, data tag or schema name. **Data Query for SDI** allows querying based on the extracted schemas. Important supported APIs are: * Query interface for querying semantically correlated and transformed data. * Stores and executes data queries. * Uses a semantic model to translate model-based query to physical queries. **Semantic Model for SDI** allows user to create semantic model ontologies based on the extracted one or more schemas. The important functionalities achieved with APIs are: * Contextual correlation of data from different systems. * Infers & Recommends mappings between different schemas. * Import and store Semantic model. # noqa: E501
"""
from __future__ import absolute_import
from mindsphere_core.mindsphere_core import logger
from mindsphere_core import mindsphere_core, exceptions, token_service
from mindsphere_core.token_service import init_credentials
[docs]class OntologiesOperationsClient:
__base_path__ = '/api/sdi/v4'
__model_package__ = __name__.split('.')[0]
def __init__(self, rest_client_config=None, mindsphere_credentials=None):
self.rest_client_config = rest_client_config
self.mindsphere_credentials = init_credentials(mindsphere_credentials)
[docs] def get_ontology_jobs_id(self, id):
"""Retrieve status of ontology creation/updation job
:param OntologyJobsIdGetRequest request_object: It contains the below parameters --> |br| ( id* - Ontology job ID as obtained on Ontology creation/updation )
:return: JobStatus
"""
logger.info('OntologiesOperationsClient.get_ontology_jobs_id() invoked.')
if id is None:
raise exceptions.MindsphereClientError('`id` is not passed when calling `get_ontology_jobs_id`')
end_point_url = '/ontologyJobs/{id}'
end_point_url = end_point_url.format(id=id)
token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials)
api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config)
headers = {'Authorization': 'Bearer ' + str(token)}
query_params = {}
form_params, local_var_files, body_params = {}, {}, None
logger.info('OntologiesOperationsClient.get_ontology_jobs_id() --> Proceeding for API Invoker.')
return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, 'GET', query_params, form_params, body_params, local_var_files, 'JobStatus', self.__model_package__)
[docs] def create_ontology_jobs(self, request_object):
"""Upload file and submit job for create/update ontology
Upload file and submit job for create/update ontology
:param OntologyJobsPostRequest request_object: It contains the below parameters --> |br| ( ontologyName* - - Ontology name should be unique. ), |br| ( ontologyDescription - - Ontology description. ), |br| ( ontologyId - - Provide OntologyId for updating existing ontology. If empty then will create new ontology. ), |br| ( keyMappingType - - Define keyMappingType for ontology. ), |br| ( file - - We support JSON and OWL file pre-defined format. )
:return: OntologyJob
"""
logger.info('OntologiesOperationsClient.create_ontology_jobs() invoked.')
if request_object is None:
raise exceptions.MindsphereClientError('`request_object` is not passed when calling `create_ontology_jobs`')
if request_object.ontology_name is None:
raise exceptions.MindsphereClientError('The required parameter `ontologyName` is missing from `request_object`, when calling `create_ontology_jobs`')
end_point_url = '/ontologyJobs'
end_point_url = end_point_url.format()
token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials)
api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config)
headers = {'Accept': 'application/json', 'Authorization': 'Bearer ' + str(token)}
query_params = {}
form_params, local_var_files, body_params = {}, {}, None
if request_object.ontology_description is not None:
form_params['ontologyDescription'] = request_object.ontology_description
if request_object.id is not None:
form_params['ontologyId'] = request_object.id
if request_object.ontology_name is not None:
form_params['ontologyName'] = request_object.ontology_name
if request_object.key_mapping_type is not None:
form_params['keyMappingType'] = request_object.key_mapping_type
if request_object.file is not None:
local_var_files['file'] = request_object.file
logger.info('OntologiesOperationsClient.create_ontology_jobs() --> Proceeding for API Invoker.')
return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, 'POST', query_params, form_params, body_params, local_var_files, 'OntologyJob', self.__model_package__)