Skip to content

Commit 5db83e3

Browse files
[AL-5332] allow user to update datarow metadata by global keys (#1060)
1 parent 442eb1e commit 5db83e3

File tree

2 files changed

+24
-8
lines changed

2 files changed

+24
-8
lines changed

labelbox/schema/data_row_metadata.py

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,8 @@ class DataRowMetadataField(_CamelCaseMixin):
4848

4949

5050
class DataRowMetadata(_CamelCaseMixin):
51-
data_row_id: str
51+
global_key: Optional[str]
52+
data_row_id: Optional[str]
5253
fields: List[DataRowMetadataField]
5354

5455

@@ -58,7 +59,8 @@ class DeleteDataRowMetadata(_CamelCaseMixin):
5859

5960

6061
class DataRowMetadataBatchResponse(_CamelCaseMixin):
61-
data_row_id: str
62+
global_key: Optional[str]
63+
data_row_id: Optional[str]
6264
error: Optional[str] = None
6365
fields: List[Union[DataRowMetadataField, SchemaId]]
6466

@@ -75,7 +77,8 @@ class _UpsertDataRowMetadataInput(_CamelCaseMixin):
7577

7678
# Batch of upsert values for a datarow
7779
class _UpsertBatchDataRowMetadata(_CamelCaseMixin):
78-
data_row_id: str
80+
global_key: Optional[str]
81+
data_row_id: Optional[str]
7982
fields: List[_UpsertDataRowMetadataInput]
8083

8184

@@ -476,11 +479,12 @@ def parse_metadata_fields(
476479
def bulk_upsert(
477480
self, metadata: List[DataRowMetadata]
478481
) -> List[DataRowMetadataBatchResponse]:
479-
"""Upsert datarow metadata
480-
482+
"""Upsert metadata to a list of data rows
483+
484+
You may specify data row by either data_row_id or global_key
481485
482486
>>> metadata = DataRowMetadata(
483-
>>> data_row_id="datarow-id",
487+
>>> data_row_id="datarow-id", # Alternatively, set global_key="global-key"
484488
>>> fields=[
485489
>>> DataRowMetadataField(schema_id="schema-id", value="my-message"),
486490
>>> ...
@@ -504,6 +508,7 @@ def _batch_upsert(
504508
) -> List[DataRowMetadataBatchResponse]:
505509
query = """mutation UpsertDataRowMetadataBetaPyApi($metadata: [DataRowCustomMetadataBatchUpsertInput!]!) {
506510
upsertDataRowCustomMetadata(data: $metadata){
511+
globalKey
507512
dataRowId
508513
error
509514
fields {
@@ -515,7 +520,8 @@ def _batch_upsert(
515520
res = self._client.execute(
516521
query, {"metadata": upserts})['upsertDataRowCustomMetadata']
517522
return [
518-
DataRowMetadataBatchResponse(data_row_id=r['dataRowId'],
523+
DataRowMetadataBatchResponse(global_key=r['globalKey'],
524+
data_row_id=r['dataRowId'],
519525
error=r['error'],
520526
fields=self.parse_metadata(
521527
[r])[0].fields) for r in res
@@ -525,6 +531,7 @@ def _batch_upsert(
525531
for m in metadata:
526532
items.append(
527533
_UpsertBatchDataRowMetadata(
534+
global_key=m.global_key,
528535
data_row_id=m.data_row_id,
529536
fields=list(
530537
chain.from_iterable(

tests/integration/test_data_row_metadata.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,12 @@ def big_dataset(dataset: Dataset, image_url):
5353
yield dataset
5454

5555

56-
def make_metadata(dr_id) -> DataRowMetadata:
56+
def make_metadata(dr_id: str = None, gk: str = None) -> DataRowMetadata:
5757
msg = "A message"
5858
time = datetime.utcnow()
5959

6060
metadata = DataRowMetadata(
61+
global_key=gk,
6162
data_row_id=dr_id,
6263
fields=[
6364
DataRowMetadataField(schema_id=SPLIT_SCHEMA_ID,
@@ -122,6 +123,14 @@ def test_bulk_upsert_datarow_metadata(data_row, mdo: DataRowMetadataOntology):
122123
assert len([field for field in exported[0].fields]) == 3
123124

124125

126+
def test_bulk_upsert_datarow_metadata_by_globalkey(
127+
data_rows, mdo: DataRowMetadataOntology):
128+
global_keys = [data_row.global_key for data_row in data_rows]
129+
metadata = [make_metadata(gk=global_key) for global_key in global_keys]
130+
errors = mdo.bulk_upsert(metadata)
131+
assert len(errors) == 0
132+
133+
125134
@pytest.mark.slow
126135
def test_large_bulk_upsert_datarow_metadata(big_dataset, mdo):
127136
metadata = []

0 commit comments

Comments
 (0)