Skip to content

Event Analytics Client for Python

Introduction

The Event Analytics Service is used for analyzing event data. The service identifies significant dependencies and helps to get a better understanding of the system's internal processes using statistical analysis.

Further implementation of the IOT EventAnalytics SDK library has been shown in a sample project that you can download and test in local or on Insights Hub application. Please refer to this repository: industrial-iot-python-sdk-examples

Event Operations services

The event operations client analyzes event data statistically to identify the most frequent events.

Client name: EventOperationsClient

Find Top Events

This method finds the N most frequently occurring events and returns them sorted by the number of occurrences in descending order.

# Import the RestClientConfig and UserToken from mindsphere_core module
from mindsphere_core import RestClientConfig
from mindsphere_core import UserToken

# Import the MindsphereError from mindsphere_core.exceptions module
from mindsphere_core.exceptions import MindsphereError

# Import the EventOperationsClient from eventanalytics module
from eventanalytics import EventOperationsClient

# Import all required models from eventanalytics.models
from eventanalytics import Event, TopEventsInputDataModel, EventsInputModelEventsMetadata, TopEventsRequest

# Create the RestClientConfig and UserToken objects
config = RestClientConfig(proxy_host = "<proxy_host>", proxy_port = <proxy_port>)
credentials = UserToken(authorization = "<bearer_token>")

# Create the EventOperationsClient object using the RestClientConfig and UserToken objects
client = EventOperationsClient(rest_client_config = config, mindsphere_credentials = credentials)

try:
    # Create the Events Input Model Events Metadata object
    metadata = EventsInputModelEventsMetadata("<text>")

    # Create a list of events to be analyzed
    event1 = Event("<time_value_1>", "<event_text_1>", "<text_qc_1>")
    event2 = Event("<time_value_2>", "<event_text_2>", "<text_qc_2>")

    events_list = [event1, event2]

    # Create the TopEventsInputDataModel object to be analyzed
    data = TopEventsInputDataModel(
                events_metadata = metadata,
                events = events_list,
                number_of_top_positions_required = <top_positions_value>
            )

    # Create the request object
    request = TopEventsRequest(data = data)

    # Initiate the API call to find top events
    response = client.top_events(request_object = request)

except MindsphereError as err:
    # Exception Handling

Filter Events

This method applies custom filters to simplify the dataset based on the event text.

# Create the EventOperationsClient object as shown above

try:
    # Create the EventsInputModelEventsMetadata object
    metadata = EventsInputModelEventsMetadata("text")

    # Create a list of events to be analyzed
    event1 = Event("2017-10-01T12:00:00.001Z", "INTRODUCING FUEL", 0)
    event2 = Event("2017-10-01T12:02:01.001Z", "Status@Flame On", 0)

    events_list = [event1, event2]

    # Create a list of filter criteria
    filter_list = ["Introduction fuel"]

    # Create the EventSearchInputDataModel object to be analyzed
    data = EventSearchInputDataModel(
                events_metadata = metadata,
                events = events_list,
                filter_list = filter_list
            )

    # Create the request object
    request = FilterEventsRequest(data = data)

    # Initiate the API call to filter events
    response = client.filter_events(request_object = request)

except MindsphereError as err:
    # Exception Handling

Count Events

Determines the number of events per time interval for a user defined interval length.

# Create the EventOperationsClient object as shown above

try:
    # Create the EventInputEventsMetadata object
    metadata = EventInputEventsMetadata(event_text_property_name = "text", split_interval = 5000)

    # Create a list of events to be analyzed
    event1 = Event("2017-10-01T12:00:00.001Z", "INTRODUCING FUEL", 0)
    event2 = Event("2017-10-01T12:02:01.001Z", "Status@Flame On", 0)

    events_list = [event1, event2]

    # Create the EventInput object to be analyzed
    data = EventInput(events_metadata = metadata, events = events_list)

    # Create the request object
    request = CountEventsRequest(data = data)

    # Initiate Count Events API call
    response = client.count_events(request_object = request)

except MindsphereError as err:
    # Exception Handling

Remove Duplicate Events

This method detects duplicate events within a sliding window of user defined width (example 5,000 ms) and reduces the data set by aggregating duplicate events.

# Create EventOperationsClient object as shown above

try:
    # Create the EventsInputEventsMetadata object
    metadata = EventInputEventsMetadata("text", 5000)

    # Create a list of events to be analyzed
    event1 = Event("2017-10-01T12:00:00.001Z", "INTRODUCING FUEL", 0)
    event2 = Event("2017-10-01T12:02:01.001Z", "Status@Flame On", 0)

    events_list = [event1, event2]

    # Create the EventInput object to be analyzed
    data = EventInput(metadata, events_list)

    # Create the request object
    request = RemoveDuplicateEventsRequest(data = data)

    # Initiate API call to remove events
    response = eventOperationsClient.remove_duplicate_events(request)

except MindsphereError as err:
    # Exception Handling

Pattern Operations Services

The Pattern Operations client analyzes the event data statistically to detect event patterns.

Client name: PatternOperationsClient

Find Event Patterns

Searches for user defined patterns in a list of events and detects all events matching the specified pattern(s).

# Import the RestClientConfig and UserToken from mindsphere_core module
from mindsphere_core import RestClientConfig
from mindsphere_core import UserToken

# Import the MindsphereError from mindsphere_core.exceptions module
from mindsphere_core.exceptions import MindsphereError

# Import the PatternOperationsClient from eventanalytics module
from eventanalytics.clients.pattern_operations_client import PatternOperationsClient

# Import all required models from eventanalytics.models
from eventanalytics.models import PatternMatchingInputDataModel, EventsInputModelEventsMetadata, EventInput, PatternDefinition, MatchingPattern, Event, MatchPatternsOverEventsRequest

# Create the RestClientConfig and UserToken objects
clientConfig = RestClientConfig(proxy_host = "<proxy_host>", proxy_port = <proxy_port>)
credentials = UserToken(authorization = "<bearer_token>")

# Create the PatternOperationsClient object using the RestClientConfig and UserToken objects
client = PatternOperationsClient(rest_client_config = clientConfig, mindsphere_credentials = credentials)

try:
    # Create the EventsInputModelEventsMetadata object
    metadata = EventsInputModelEventsMetadata("<text>")

    # Create a list of events to be analyzed
    event_1 = Event("2017-10-01T12:00:00.001Z", "INTRODUCING FUEL", 0)
    event_2 = Event("2017-10-01T12:02:01.001Z", "Status@Flame On", 0)

    events_list = [event_1, event_2]
    events_input = EventInput(metadata, events_list)

    # Create a list of non events to be analyzed
    non_events = ["Error 2.. occurred", "STOPPING ENGINE"]

    # Create a list of patterns to be analyzed
    matching_pattern_1 = MatchingPattern("INTRODUCING FUEL", 1, 2)
    matching_pattern_2 = MatchingPattern("Status@Flame On", 0, 1)

    pattern_list = [matching_pattern_1, matching_pattern_2]

    pattern_def = PatternDefinition()
    pattern_def.pattern = pattern_list
    pattern_def_list = [pattern_def]

    max_pattern_interval = 200

    # Create the PatternMatchingInputDataModel object to be analyzed
    data = PatternMatchingInputDataModel(max_pattern_interval, pattern_def_list, non_events, events_input)

    # Create the request object
    request = MatchPatternsOverEventsRequest(data = data)

    # Initiate the API call to match patterns
    response = client.match_patterns_over_events(request)

except MindsphereError as err:
    # Exception Handling

Last update: April 2, 2024

Except where otherwise noted, content on this site is licensed under the Development License Agreement.