File size: 4,949 Bytes
3da1d9d d08fbc6 066c396 d08fbc6 066c396 9d5b4c0 d08fbc6 0a1b314 d08fbc6 066c396 d08fbc6 066c396 3da1d9d 0a1b314 066c396 d08fbc6 9d5b4c0 d08fbc6 066c396 b462f85 7f6dcb7 d08fbc6 7cdc7d0 7f6dcb7 b462f85 f6ebc4f 7cdc7d0 b462f85 7cdc7d0 d08fbc6 7cdc7d0 d08fbc6 3da1d9d 7f6dcb7 066c396 f4655a2 066c396 d08fbc6 f4655a2 d08fbc6 7cdc7d0 d08fbc6 9d5b4c0 d08fbc6 066c396 7f6dcb7 066c396 7f6dcb7 d08fbc6 066c396 d08fbc6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
import json
from typing import Any, Dict, List, Optional
from datasets import Audio, Features, Image, Sequence, Value
from .artifact import Artifact
from .dict_utils import dict_get
from .operator import InstanceOperatorValidator
from .settings_utils import get_constants
constants = get_constants()
UNITXT_DATASET_SCHEMA = Features(
{
"source": Value("string"),
"target": Value("string"),
"references": Sequence(Value("string")),
"metrics": Sequence(Value("string")),
"groups": Sequence(Value("string")),
"subset": Sequence(Value("string")),
"media": {
"images": Sequence(Image()),
"audios": Sequence(Audio()),
},
"postprocessors": Sequence(Value("string")),
"task_data": Value(dtype="string"),
"data_classification_policy": Sequence(Value("string")),
}
)
UNITXT_INFERENCE_SCHEMA = Features(
{
"source": Value("string"),
"metrics": Sequence(Value("string")),
"groups": Sequence(Value("string")),
"subset": Sequence(Value("string")),
"postprocessors": Sequence(Value("string")),
"task_data": Value(dtype="string"),
"data_classification_policy": Sequence(Value("string")),
}
)
def get_schema(stream_name):
if stream_name == constants.inference_stream:
return UNITXT_INFERENCE_SCHEMA
return UNITXT_DATASET_SCHEMA
class Finalize(InstanceOperatorValidator):
group_by: List[List[str]]
remove_unnecessary_fields: bool = True
@staticmethod
def artifact_to_jsonable(artifact):
if artifact.__id__ is None:
return artifact.to_dict()
return artifact.__id__
def _prepare_media(self, instance):
if "media" not in instance:
instance["media"] = {}
if "images" not in instance["media"]:
instance["media"]["images"] = []
if "audios" not in instance["media"]:
instance["media"]["audios"] = []
return instance
def _get_instance_task_data(
self, instance: Dict[str, Any], use_reference_fields=True
) -> Dict[str, Any]:
task_data = {
**instance["input_fields"],
"metadata": {
"data_classification_policy": instance["data_classification_policy"],
},
}
if use_reference_fields:
task_data = {**task_data, **instance["reference_fields"]}
return task_data
def process(
self, instance: Dict[str, Any], stream_name: Optional[str] = None
) -> Dict[str, Any]:
task_data = self._get_instance_task_data(
instance,
use_reference_fields=stream_name != constants.inference_stream,
)
task_data["metadata"]["num_demos"] = instance["recipe_metadata"]["num_demos"]
task_data["metadata"]["template"] = self.artifact_to_jsonable(
instance["recipe_metadata"]["template"]
)
if "demos" in instance:
task_data["demos"] = [
self._get_instance_task_data(instance)
for instance in instance.pop("demos")
]
instance["task_data"] = json.dumps(task_data)
if self.remove_unnecessary_fields:
keys_to_delete = []
for key in instance.keys():
if key not in get_schema(stream_name):
keys_to_delete.append(key)
for key in keys_to_delete:
del instance[key]
data = {**task_data, **task_data["metadata"]}
groups = []
for group_attributes in self.group_by:
group = {}
if isinstance(group_attributes, str):
group_attributes = [group_attributes]
for attribute in group_attributes:
group[attribute] = dict_get(data, attribute)
groups.append(json.dumps(group))
instance["groups"] = groups
instance["subset"] = []
instance = self._prepare_media(instance)
instance["metrics"] = [
metric.to_json() if isinstance(metric, Artifact) else metric
for metric in instance["metrics"]
]
instance["postprocessors"] = [
processor.to_json() if isinstance(processor, Artifact) else processor
for processor in instance["postprocessors"]
]
return instance
def validate(self, instance: Dict[str, Any], stream_name: Optional[str] = None):
# verify the instance has the required schema
assert instance is not None, "Instance is None"
assert isinstance(
instance, dict
), f"Instance should be a dict, got {type(instance)}"
schema = get_schema(stream_name)
assert all(
key in instance for key in schema
), f"Instance should have the following keys: {schema}. Instance is: {instance}"
schema.encode_example(instance)
|