Skip to content

Commit d77170f

Browse files
authored
[SN-145] removed buffered stream mentioning (#1662)
1 parent b59a474 commit d77170f

File tree

8 files changed

+17
-17
lines changed

8 files changed

+17
-17
lines changed

examples/annotation_import/pdf.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@
282282
},
283283
{
284284
"metadata": {},
285-
"source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json_str)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)",
285+
"source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_buffered_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)",
286286
"cell_type": "code",
287287
"outputs": [],
288288
"execution_count": null

examples/basics/batches.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@
121121
},
122122
{
123123
"metadata": {},
124-
"source": "client.enable_experimental = True\n\nexport_task = dataset.export()\nexport_task.wait_till_done()\n\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
124+
"source": "client.enable_experimental = True\n\nexport_task = dataset.export()\nexport_task.wait_till_done()\n\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
125125
"cell_type": "code",
126126
"outputs": [],
127127
"execution_count": null
@@ -254,7 +254,7 @@
254254
},
255255
{
256256
"metadata": {},
257-
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
257+
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
258258
"cell_type": "code",
259259
"outputs": [],
260260
"execution_count": null

examples/basics/custom_embeddings.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@
102102
},
103103
{
104104
"metadata": {},
105-
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
105+
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
106106
"cell_type": "code",
107107
"outputs": [],
108108
"execution_count": null

examples/basics/data_rows.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
},
114114
{
115115
"metadata": {},
116-
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
116+
"source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)",
117117
"cell_type": "code",
118118
"outputs": [],
119119
"execution_count": null
@@ -236,7 +236,7 @@
236236
},
237237
{
238238
"metadata": {},
239-
"source": "# Fetch a data row from the first dataset example\nts = dataset.export()\nts.wait_till_done()\nDATA_ROW_ID = [json.loads(output.json_str) for output in ts.get_stream()\n ][0][\"data_row\"][\"id\"]\nGLOBAL_KEY = [json.loads(output.json_str) for output in ts.get_stream()\n ][0][\"data_row\"][\"global_key\"]\n\nprint(f\"Pick either a data row id : {DATA_ROW_ID} or global key: {GLOBAL_KEY}\")",
239+
"source": "# Fetch a data row from the first dataset example\nts = dataset.export()\nts.wait_till_done()\nDATA_ROW_ID = [json.loads(output.json) for output in ts.get_buffered_stream()\n ][0][\"data_row\"][\"id\"]\nGLOBAL_KEY = [json.loads(output.json) for output in ts.get_buffered_stream()\n ][0][\"data_row\"][\"global_key\"]\n\nprint(f\"Pick either a data row id : {DATA_ROW_ID} or global key: {GLOBAL_KEY}\")",
240240
"cell_type": "code",
241241
"outputs": [],
242242
"execution_count": null

examples/exports/composite_mask_export.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@
211211
},
212212
{
213213
"metadata": {},
214-
"source": "stream = export_task.get_stream()\n\nmask_tool_rgb_mapping = {}\n\nfor output in stream:\n # Parse the JSON string from the output\n output_json = json.loads(output.json_str)\n\n # Get the labels for the specified project ID or an empty list if the project ID is not found\n project_labels = (output_json[\"projects\"].get(PROJECT_ID,\n {}).get(\"labels\", []))\n\n # Iterate through each label\n for label in project_labels:\n # Get the list of annotations (objects) for the label\n annotations = label[\"annotations\"].get(\"objects\", [])\n\n # Iterate through each annotation\n for annotation in annotations:\n # Check if the annotation is of type \"ImageSegmentationMask\"\n if annotation.get(\"annotation_kind\") == \"ImageSegmentationMask\":\n # Add the color RGB information to the mapping dictionary\n mask_tool_rgb_mapping.setdefault(annotation[\"name\"], []).append(\n annotation[\"composite_mask\"][\"color_rgb\"])\n\nprint(mask_tool_rgb_mapping)",
214+
"source": "stream = export_task.get_buffered_stream()\n\nmask_tool_rgb_mapping = {}\n\nfor output in stream:\n # Parse the JSON string from the output\n output_json = json.loads(output.json)\n\n # Get the labels for the specified project ID or an empty list if the project ID is not found\n project_labels = (output_json[\"projects\"].get(PROJECT_ID,\n {}).get(\"labels\", []))\n\n # Iterate through each label\n for label in project_labels:\n # Get the list of annotations (objects) for the label\n annotations = label[\"annotations\"].get(\"objects\", [])\n\n # Iterate through each annotation\n for annotation in annotations:\n # Check if the annotation is of type \"ImageSegmentationMask\"\n if annotation.get(\"annotation_kind\") == \"ImageSegmentationMask\":\n # Add the color RGB information to the mapping dictionary\n mask_tool_rgb_mapping.setdefault(annotation[\"name\"], []).append(\n annotation[\"composite_mask\"][\"color_rgb\"])\n\nprint(mask_tool_rgb_mapping)",
215215
"cell_type": "code",
216216
"outputs": [],
217217
"execution_count": null
@@ -246,7 +246,7 @@
246246
},
247247
{
248248
"metadata": {},
249-
"source": "tools_frames_color = {}\nstream = export_task_video.get_stream()\n\n# Iterate over each output in the stream\nfor output in stream:\n output_json = json.loads(output.json_str)\n\n # Iterate over the labels in the specific project\n for dr in output_json[\"projects\"][VIDEO_PROJECT_ID][\"labels\"]:\n frames_data = dr[\"annotations\"][\"frames\"]\n\n # Iterate over each frame in the frames data\n for frame_key, frame_value in frames_data.items():\n\n # Iterate over each annotation in the frame\n for annotation_key, annotation_value in frame_value.items():\n if \"objects\" in annotation_key and annotation_value.values():\n\n # Iterate over each object in the annotation\n for object_key, object_value in annotation_value.items():\n if (object_value[\"annotation_kind\"] ==\n \"VideoSegmentationMask\"):\n # Update tools_frames_color with object information\n tools_frames_color.setdefault(\n object_value[\"name\"], []).append({\n frame_key:\n object_value[\"composite_mask\"]\n [\"color_rgb\"]\n })\n\nprint(tools_frames_color)",
249+
"source": "tools_frames_color = {}\nstream = export_task_video.get_buffered_stream()\n\n# Iterate over each output in the stream\nfor output in stream:\n output_json = json.loads(output.json)\n\n # Iterate over the labels in the specific project\n for dr in output_json[\"projects\"][VIDEO_PROJECT_ID][\"labels\"]:\n frames_data = dr[\"annotations\"][\"frames\"]\n\n # Iterate over each frame in the frames data\n for frame_key, frame_value in frames_data.items():\n\n # Iterate over each annotation in the frame\n for annotation_key, annotation_value in frame_value.items():\n if \"objects\" in annotation_key and annotation_value.values():\n\n # Iterate over each object in the annotation\n for object_key, object_value in annotation_value.items():\n if (object_value[\"annotation_kind\"] ==\n \"VideoSegmentationMask\"):\n # Update tools_frames_color with object information\n tools_frames_color.setdefault(\n object_value[\"name\"], []).append({\n frame_key:\n object_value[\"composite_mask\"]\n [\"color_rgb\"]\n })\n\nprint(tools_frames_color)",
250250
"cell_type": "code",
251251
"outputs": [],
252252
"execution_count": null

0 commit comments

Comments
 (0)