Source code for iottsbulk.clients.bulk_import_operations_client

# coding: utf-8

"""
    IoT Time Series Bulk API

    This API allows to bulk import IoT time series data based on files uploaded via IoT File Service. Data import for simulation assets (entities) is supported with up to nano second precision and for performance assets (entities) with up to milli second precision. A bulk import is modeled as asynchronous job whose status can be retrieved after creation. Successfully imported time series data can be retrieved using the read operation.   # noqa: E501
"""


from __future__ import absolute_import

from mindsphere_core.mindsphere_core import logger
from mindsphere_core import mindsphere_core, exceptions, token_service
from mindsphere_core.token_service import init_credentials


[docs]class BulkImportOperationsClient: __base_path__ = '/api/iottsbulk/v3' __model_package__ = __name__.split('.')[0] def __init__(self, rest_client_config=None, mindsphere_credentials=None): self.rest_client_config = rest_client_config self.mindsphere_credentials = init_credentials(mindsphere_credentials)
[docs] def create_import_job(self, request_object): """Create bulk import job for importing time series data Creates an import job resource to asynchronously import IoT time series data from files uploaded through IoT File Service. After successful creation of an import job, the provided file contents are validated and imported in the background. The status of a job can be retrieved using the returned job ID. Note that in case of validation errors occuring during or after job creation, no time series data is imported from any of the provided files. ### Restrictions: ### Currently only one asset-aspect (entity-property set) combination can be specified as target of the import. Data for performance assets (entities) must be older than 30 minutes in order to be imported, while for simulation assets (entities) no restriction on minimum age exists. In case of simulation assets (entities), all data must be within the same hour. In case of performance assets (entities), all data must be within the same day. The overall size of the files used to import data for one asset-aspect (entity-property set) combination is limited: - For simulation assets (entities), a maximum of 350 MB per hour is allowed - For performance assets (entities), a maximum of 1 GB per day is allowed Note that hour and day intervals are fixed with respect to UTC time hours and days. A maximum of 100 files can be specified per request. :param CreateImportJobRequest request_object: It contains the below parameters --> |br| ( bulkImportInput* - Input for time series bulk ingestion job creation. Even though an array of multiple asset-aspect (entity-property set) combinations is offered in the schema, only one combination of asset (entity) and aspect (property set) is supported at the moment. All provided time series data must be related to that combination. ) :return: JobStatus """ logger.info('BulkImportOperationsClient.create_import_job() invoked.') if request_object is None: raise exceptions.MindsphereClientError('`request_object` is not passed when calling `create_import_job`') if request_object.bulk_import_input is None: raise exceptions.MindsphereClientError('The required parameter `bulkImportInput` is missing from `request_object`, when calling `create_import_job`') end_point_url = '/importJobs' end_point_url = end_point_url.format() token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials) api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config) headers = {'Accept': 'application/json', 'Authorization': 'Bearer ' + str(token)} query_params = {} form_params, local_var_files, body_params = {}, {}, request_object.bulk_import_input logger.info('BulkImportOperationsClient.create_import_job() --> Proceeding for API Invoker.') return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, 'POST', query_params, form_params, body_params, local_var_files, 'JobStatus', self.__model_package__)
[docs] def retrieve_import_job(self, request_object): """Retrieve status of bulk import job :param RetrieveImportJobRequest request_object: It contains the below parameters --> |br| ( id* - Bulk import job ID as obtained on job creation ) :return: JobStatus """ logger.info('BulkImportOperationsClient.retrieve_import_job() invoked.') if request_object is None: raise exceptions.MindsphereClientError('`request_object` is not passed when calling `retrieve_import_job`') if request_object.id is None: raise exceptions.MindsphereClientError('The required parameter `id` is missing from `request_object`, when calling `retrieve_import_job`') end_point_url = '/importJobs/{id}' end_point_url = end_point_url.format(id=request_object.id) token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials) api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config) headers = {'Accept': 'application/json', 'Authorization': 'Bearer ' + str(token)} query_params = {} form_params, local_var_files, body_params = {}, {}, None logger.info('BulkImportOperationsClient.retrieve_import_job() --> Proceeding for API Invoker.') return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, 'GET', query_params, form_params, body_params, local_var_files, 'JobStatus', self.__model_package__)