Hi,
When using the AYX Python SDK I'm trying to write different data that has been fetched from one of our microservices to 2 output anchors. Unfortunately, I always run into the following issue:
The first output anchor displays the output data correctly but any follow up anchor returns the error "Memory limit reached. Use a Browse tool to view full results". As suggested I tried using the browse tool but this results in that tool crashing with an unhandled exception.
First output anchor (this one always works):
Second output anchor (if I were to add more output anchors the exact same this would happen):
I tried to simplify the output data by removing the microservice calls and just passing along 2 simple RecordPackets but I am getting the same result. Perhaps it is in the way the Metadata and RecordPackets are built? I couldn't find any clear examples on how to do this in the docs.
Code used.
class TimDatasetManagement(Plugin):
"""A sample Plugin that filters numeric data for odd and even values."""
def __init__(self, provider: ProviderBase):
"""Construct the AyxRecordProcessor."""
self.provider = provider
self.tool_config = provider.tool_config
self.config_email = self.tool_config['email']
self.config_password = self.tool_config['password']
self.dataset_metadata_output_anchor = self.provider.get_output_anchor("Output1")
self.dataset_logs_output_anchor = self.provider.get_output_anchor("Output2")
def on_input_connection_opened(self, input_connection: InputConnectionBase) -> None:
"""Initialize the Input Connections of this plugin."""
if input_connection.metadata is None:
raise RuntimeError("Metadata must be set before setting containers.")
# Metadata open
id_field = Field(name="id", field_type=FieldType.string, size=256)
name_field = Field(name="name", field_type=FieldType.string, size=256)
dataset_metadata = Metadata(fields=[id_field, name_field])
self.dataset_metadata = dataset_metadata
if not self.dataset_metadata_output_anchor.is_open: self.dataset_metadata_output_anchor.open(dataset_metadata)
# Logs open
message_field = Field(name="message", field_type=FieldType.string, size=256)
message_type_field = Field(name="messageType", field_type=FieldType.string, size=256)
origin_field = Field(name="origin", field_type=FieldType.string, size=256)
created_at_field = Field(name="createdAt", field_type=FieldType.string, size=256)
dataset_logs = Metadata(fields=[created_at_field, origin_field, message_field, message_type_field])
self.dataset_logs = dataset_logs
if not self.dataset_logs_output_anchor.is_open: self.dataset_logs_output_anchor.open(dataset_logs)
def on_record_packet(self, input_connection: InputConnectionBase) -> None:
"""Handle the record packet received through the input connection."""
# Metadata output
dataset_metadata_df = pd.DataFrame.from_dict([{'id': '1234', 'name': 'test'}])
dataset_metadata_record_packet = RecordPacket.from_dataframe(self.dataset_metadata, dataset_metadata_df)
self.dataset_metadata_output_anchor.write(dataset_metadata_record_packet)
# Logs output
dataset_logs_df = pd.DataFrame.from_dict([{'message': 'test', 'messageType': 'test', 'origin': 'test', 'createdAt': '123234'}])
dataset_logs_record_packet = RecordPacket.from_dataframe(self.dataset_logs, dataset_logs_df)
self.dataset_logs_output_anchor.write(dataset_logs_record_packet)
def on_complete(self) -> None:
"""Handle for when the plugin is complete."""
self.provider.io.info("FilterLike tool done.")
AyxPlugin = register_plugin(TimDatasetManagement)
Output anchors defined in the ayx_workspace.json (generated by the CLI):
"output_anchors": {
"Output1": {
"label": "",
"allow_multiple": false,
"optional": false
},
"Output2": {
"label": "",
"allow_multiple": false,
"optional": false
}
}
Thanks for reading!
Hi, @MichielVanEetvelde ,
I just tested your code and there is no issues with output. What Designer version you are running?
Hi PetrT,
Sorry for the late response. We are running version 2021.3.1.47945.
We did find a (weird) solution to our problem. The only thing we had to do was change the order of opening and writing to the outputs from the last to the first. That did the trick!