Skip to content

Commit 01a4b3d

Browse files
committed
Automated rollback of commit bb69352
PiperOrigin-RevId: 627625227
1 parent 8e897c0 commit 01a4b3d

File tree

57 files changed

+550
-745
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+550
-745
lines changed

RELEASE.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@
5555
can now lead to (justified) type checking errors that were previously
5656
hidden due to `C` being of type `Any`.
5757
* `ph.to_list()` was renamed to `ph.make_list()` for consistency.
58-
* Support KFP pipeline spec 2.1.0 version schema
5958

6059

6160
### For Pipeline Authors
@@ -82,7 +81,6 @@
8281
| `tensorflow-decision-forests` | `>=1.0.1,<1.9` | `>=1.0.1,<2` | |
8382
| `tensorflow-hub` | `>=0.9.0,<0.14` | `>=0.15.0,<0.16` | |
8483
| `tensorflow-serving` | `>=1.15,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,<3` | `>=2.15,<2.16` | |
85-
| `kfp-pipeline-spec` | `kfp-pipeline-spec>=0.1.10,<0.2` | `>0.1.13,<0.2` | |
8684

8785
## Documentation Updates
8886

tfx/dependencies.py

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -145,29 +145,25 @@ def make_extra_packages_kfp():
145145
return [
146146
# TODO(b/304892416): Migrate from KFP SDK v1 to v2.
147147
'kfp>=1.8.14,<2',
148-
'kfp-pipeline-spec>0.1.13,<0.2',
148+
'kfp-pipeline-spec>=0.1.10,<0.2',
149149
]
150150

151151

152152
def make_extra_packages_test():
153153
"""Prepare extra packages needed for running unit tests."""
154154
# Note: It is okay to pin packages to exact versions in this list to minimize
155155
# conflicts.
156-
return (
157-
make_extra_packages_airflow()
158-
+ make_extra_packages_kfp()
159-
+ [
160-
'pytest>=5,<7',
161-
]
162-
)
156+
return make_extra_packages_airflow() + make_extra_packages_kfp() + [
157+
'pytest>=5,<7',
158+
]
163159

164160

165161
def make_extra_packages_docker_image():
166162
# Packages needed for tfx docker image.
167163
return [
168164
# TODO(b/304892416): Migrate from KFP SDK v1 to v2.
169165
'kfp>=1.8.14,<2',
170-
'kfp-pipeline-spec>0.1.13,<0.2',
166+
'kfp-pipeline-spec>=0.1.10,<0.2',
171167
'mmh>=2.2,<3',
172168
'python-snappy>=0.5,<0.6',
173169
# Required for tfx/examples/penguin/penguin_utils_cloud_tuner.py
@@ -195,12 +191,10 @@ def make_extra_packages_tf_ranking():
195191
# Packages needed for tf-ranking which is used in tfx/examples/ranking.
196192
return [
197193
'tensorflow-ranking>=0.5,<0.6',
198-
'struct2tensor'
199-
+ select_constraint(
194+
'struct2tensor' + select_constraint(
200195
default='>=0.45,<0.46',
201196
nightly='>=0.46.0.dev',
202-
git_master='@git+https://github.com/google/struct2tensor@master',
203-
),
197+
git_master='@git+https://github.com/google/struct2tensor@master'),
204198
]
205199

206200

tfx/orchestration/kubeflow/v2/compiler_utils.py

Lines changed: 25 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -108,15 +108,15 @@ def build_parameter_type_spec(
108108
is_runtime_param = isinstance(value, data_types.RuntimeParameter)
109109
result = pipeline_pb2.ComponentInputsSpec.ParameterSpec()
110110
if isinstance(value, int) or (is_runtime_param and value.ptype == int):
111-
result.parameter_type = pipeline_pb2.ParameterType.NUMBER_INTEGER
111+
result.type = pipeline_pb2.PrimitiveType.PrimitiveTypeEnum.INT
112112
elif isinstance(value, float) or (is_runtime_param and value.ptype == float):
113-
result.parameter_type = pipeline_pb2.ParameterType.NUMBER_DOUBLE
113+
result.type = pipeline_pb2.PrimitiveType.PrimitiveTypeEnum.DOUBLE
114114
elif isinstance(value, str) or (is_runtime_param and value.ptype == str):
115-
result.parameter_type = pipeline_pb2.ParameterType.STRING
115+
result.type = pipeline_pb2.PrimitiveType.PrimitiveTypeEnum.STRING
116116
else:
117117
# By default, unrecognized object will be json dumped, hence is string type.
118118
# For example, resolver class.
119-
result.parameter_type = pipeline_pb2.ParameterType.STRING
119+
result.type = pipeline_pb2.PrimitiveType.PrimitiveTypeEnum.STRING
120120
return result
121121

122122

@@ -236,54 +236,47 @@ def value_converter(
236236

237237
result = pipeline_pb2.ValueOrRuntimeParameter()
238238
if isinstance(tfx_value, (int, float, str)):
239-
result.constant.CopyFrom(get_google_value(tfx_value))
239+
result.constant_value.CopyFrom(get_kubeflow_value(tfx_value))
240240
elif isinstance(tfx_value, (Dict, List)):
241-
result.constant.CopyFrom(
242-
struct_pb2.Value(string_value=json.dumps(tfx_value))
243-
)
241+
result.constant_value.CopyFrom(
242+
pipeline_pb2.Value(string_value=json.dumps(tfx_value)))
244243
elif isinstance(tfx_value, data_types.RuntimeParameter):
245244
# Attach the runtime parameter to the context.
246245
parameter_utils.attach_parameter(tfx_value)
247246
result.runtime_parameter = tfx_value.name
248247
elif isinstance(tfx_value, metadata_store_pb2.Value):
249248
if tfx_value.WhichOneof('value') == 'int_value':
250-
result.constant.CopyFrom(
251-
struct_pb2.Value(number_value=tfx_value.int_value)
252-
)
249+
result.constant_value.CopyFrom(
250+
pipeline_pb2.Value(int_value=tfx_value.int_value))
253251
elif tfx_value.WhichOneof('value') == 'double_value':
254-
result.constant.CopyFrom(
255-
struct_pb2.Value(number_value=tfx_value.double_value)
256-
)
252+
result.constant_value.CopyFrom(
253+
pipeline_pb2.Value(double_value=tfx_value.double_value))
257254
elif tfx_value.WhichOneof('value') == 'string_value':
258-
result.constant.CopyFrom(
259-
struct_pb2.Value(string_value=tfx_value.string_value)
260-
)
255+
result.constant_value.CopyFrom(
256+
pipeline_pb2.Value(string_value=tfx_value.string_value))
261257
elif isinstance(tfx_value, message.Message):
262-
result.constant.CopyFrom(
263-
struct_pb2.Value(
258+
result.constant_value.CopyFrom(
259+
pipeline_pb2.Value(
264260
string_value=json_format.MessageToJson(
265-
message=tfx_value, sort_keys=True
266-
)
267-
)
268-
)
261+
message=tfx_value, sort_keys=True)))
269262
else:
270263
# By default will attempt to encode the object using json_utils.dumps.
271-
result.constant.CopyFrom(
272-
struct_pb2.Value(string_value=json_utils.dumps(tfx_value))
273-
)
264+
result.constant_value.CopyFrom(
265+
pipeline_pb2.Value(string_value=json_utils.dumps(tfx_value)))
274266
return result
275267

276268

277-
def get_google_value(
278-
tfx_value: Union[int, float, str],
279-
) -> Optional[struct_pb2.Value]:
269+
def get_kubeflow_value(
270+
tfx_value: Union[int, float, str]) -> Optional[pipeline_pb2.Value]:
280271
"""Converts TFX/MLMD values into Kubeflow pipeline Value proto message."""
281272
if tfx_value is None:
282273
return None
283274

284-
result = struct_pb2.Value()
285-
if isinstance(tfx_value, int) or isinstance(tfx_value, float):
286-
result.number_value = tfx_value
275+
result = pipeline_pb2.Value()
276+
if isinstance(tfx_value, int):
277+
result.int_value = tfx_value
278+
elif isinstance(tfx_value, float):
279+
result.double_value = tfx_value
287280
elif isinstance(tfx_value, str):
288281
result.string_value = tfx_value
289282
else:

tfx/orchestration/kubeflow/v2/compiler_utils_test.py

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -136,24 +136,19 @@ def testCustomArtifactSchemaMismatchFails(self):
136136
_MyArtifactWithProperty.PROPERTIES)
137137

138138
def testBuildParameterTypeSpec(self):
139-
type_enum = pipeline_pb2.ParameterType.ParameterTypeEnum
139+
type_enum = pipeline_pb2.PrimitiveType.PrimitiveTypeEnum
140140
testdata = {
141-
42: type_enum.NUMBER_INTEGER,
142-
42.1: type_enum.NUMBER_DOUBLE,
141+
42: type_enum.INT,
142+
42.1: type_enum.DOUBLE,
143143
'42': type_enum.STRING,
144-
data_types.RuntimeParameter(
145-
name='_', ptype=int
146-
): type_enum.NUMBER_INTEGER,
147-
data_types.RuntimeParameter(
148-
name='_', ptype=float
149-
): type_enum.NUMBER_DOUBLE,
144+
data_types.RuntimeParameter(name='_', ptype=int): type_enum.INT,
145+
data_types.RuntimeParameter(name='_', ptype=float): type_enum.DOUBLE,
150146
data_types.RuntimeParameter(name='_', ptype=str): type_enum.STRING,
151147
}
152148
for value, expected_type_enum in testdata.items():
153149
self.assertEqual(
154-
compiler_utils.build_parameter_type_spec(value).parameter_type,
155-
expected_type_enum,
156-
)
150+
compiler_utils.build_parameter_type_spec(value).type,
151+
expected_type_enum)
157152

158153
def testBuildOutputParameterSpecValueArtifact(self):
159154
param = pipeline_pb2.ParameterType

tfx/orchestration/kubeflow/v2/container/kubeflow_v2_entrypoint_utils.py

Lines changed: 12 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,7 @@ def refactor_model_blessing(model_blessing: artifact.Artifact,
113113
name_from_id=name_from_id))
114114

115115

116-
def parse_execution_properties(
117-
exec_properties: Any, inputs_spec: pipeline_pb2.ComponentInputsSpec
118-
) -> Dict[str, Any]:
116+
def parse_execution_properties(exec_properties: Any) -> Dict[str, Any]:
119117
"""Parses a map from key to Value proto as execution properties.
120118
121119
Parses a mapping field in a protobuf message, whose value is a Kubeflow Value
@@ -124,8 +122,6 @@ def parse_execution_properties(
124122
Args:
125123
exec_properties: the mapping field in the proto message, representing the
126124
execution properties of the component.
127-
inputs_spec: Component input spec which has the information of parameter
128-
types of exec_properties.
129125
130126
Returns:
131127
dictionary of the parsed execution properties.
@@ -136,49 +132,35 @@ def parse_execution_properties(
136132
if k == _OLD_INPUT_BASE_PROPERTY_NAME:
137133
k = standard_component_specs.INPUT_BASE_KEY
138134
# Translate each field from Value pb to plain value.
139-
result[k] = getattr(v, v.WhichOneof('kind'))
140-
parameter = inputs_spec.parameters.get(k)
141-
if (
142-
parameter
143-
and parameter.parameter_type
144-
== pipeline_pb2.ParameterType.NUMBER_INTEGER
145-
):
146-
result[k] = int(result[k])
135+
result[k] = getattr(v, v.WhichOneof('value'))
147136
if result[k] is None:
148-
raise TypeError(
149-
'Unrecognized type encountered at field %s of execution properties %s'
150-
% (k, exec_properties)
151-
)
137+
raise TypeError('Unrecognized type encountered at field %s of execution'
138+
' properties %s' % (k, exec_properties))
152139

153140
return result
154141

155142

156143
def translate_executor_output(
157144
output_dict: Mapping[str, List[artifact.Artifact]],
158-
name_from_id: Mapping[int, str],
159-
) -> Dict[str, pipeline_pb2.ArtifactList]:
145+
name_from_id: Mapping[int,
146+
str]) -> Dict[str, pipeline_pb2.ArtifactList]:
160147
"""Translates output_dict to a Kubeflow ArtifactList mapping."""
161148
result = {}
162149
for k, v in output_dict.items():
163-
result[k] = pipeline_pb2.ArtifactList(
164-
artifacts=[
165-
to_runtime_artifact(
166-
artifact_utils.get_single_instance(v), name_from_id
167-
)
168-
]
169-
)
150+
result[k] = pipeline_pb2.ArtifactList(artifacts=[
151+
to_runtime_artifact(
152+
artifact_utils.get_single_instance(v), name_from_id)
153+
])
170154

171155
return result
172156

173157

174158
def _get_json_value_mapping(
175-
mlmd_value_mapping: Dict[str, metadata_store_pb2.Value],
176-
) -> Dict[str, Any]:
159+
mlmd_value_mapping: Dict[str, metadata_store_pb2.Value]) -> Dict[str, Any]:
177160
"""Converts a mapping field with MLMD Value to JSON Value."""
178161

179162
def get_json_value(
180-
mlmd_value: metadata_store_pb2.Value,
181-
) -> artifact.JsonValueType:
163+
mlmd_value: metadata_store_pb2.Value) -> artifact.JsonValueType:
182164
if not mlmd_value.HasField('value'):
183165
return None
184166
elif mlmd_value.WhichOneof('value') == 'int_value':

tfx/orchestration/kubeflow/v2/container/kubeflow_v2_entrypoint_utils_test.py

Lines changed: 9 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -94,38 +94,26 @@ def setUp(self):
9494
# Use two protos to store the testdata.
9595
artifacts_pb = pipeline_pb2.ExecutorInput()
9696
io_utils.parse_json_file(
97-
os.path.join(source_data_dir, 'artifacts.json'), artifacts_pb
98-
)
97+
os.path.join(source_data_dir, 'artifacts.json'), artifacts_pb)
9998
self._artifacts = artifacts_pb.inputs.artifacts
10099

101100
# Test legacy properties/custom properties deserialization.
102101
artifacts_legacy_pb = pipeline_pb2.ExecutorInput()
103102
io_utils.parse_json_file(
104103
os.path.join(source_data_dir, 'artifacts_legacy.json'),
105-
artifacts_legacy_pb,
106-
)
104+
artifacts_legacy_pb)
107105
self._artifacts_legacy = artifacts_legacy_pb.inputs.artifacts
108106

109107
properties_pb = pipeline_pb2.ExecutorInput()
110-
inputs_spec_pb = pipeline_pb2.ComponentInputsSpec()
111-
inputs_spec_pb.parameters['input_config'].parameter_type = (
112-
pipeline_pb2.ParameterType.STRING
113-
)
114-
inputs_spec_pb.parameters['output_config'].parameter_type = (
115-
pipeline_pb2.ParameterType.STRING
116-
)
117108
io_utils.parse_json_file(
118-
os.path.join(source_data_dir, 'exec_properties.json'), properties_pb
119-
)
120-
self._properties = properties_pb.inputs.parameter_values
121-
self._inputs_spec = inputs_spec_pb
109+
os.path.join(source_data_dir, 'exec_properties.json'), properties_pb)
110+
self._properties = properties_pb.inputs.parameters
122111

123112
def testParseRawArtifactDict(self):
124113
for artifacts_dict in [self._artifacts, self._artifacts_legacy]:
125114
name_from_id = {}
126115
actual_result = kubeflow_v2_entrypoint_utils.parse_raw_artifact_dict(
127-
artifacts_dict, name_from_id
128-
)
116+
artifacts_dict, name_from_id)
129117
for key in self._expected_dict:
130118
(expected_artifact,) = self._expected_dict[key]
131119
(actual_artifact,) = actual_result[key]
@@ -149,25 +137,16 @@ def testParseExecutionProperties(self):
149137
self.assertDictEqual(
150138
_EXEC_PROPERTIES,
151139
kubeflow_v2_entrypoint_utils.parse_execution_properties(
152-
self._properties, self._inputs_spec
153-
),
154-
)
140+
self._properties))
155141

156142
def testParseExecutionPropertiesMapsInputBaseUri(self):
157143
properties_pb = pipeline_pb2.ExecutorInput()
158-
properties_pb.inputs.parameter_values['input_base_uri'].string_value = (
159-
'gs://input/base'
160-
)
161-
inputs_spec_pb = pipeline_pb2.ComponentInputsSpec()
162-
inputs_spec_pb.parameters['input_base_uri'].parameter_type = (
163-
pipeline_pb2.ParameterType.STRING
164-
)
144+
properties_pb.inputs.parameters[
145+
'input_base_uri'].string_value = 'gs://input/base'
165146
self.assertDictEqual(
166147
{'input_base': 'gs://input/base'},
167148
kubeflow_v2_entrypoint_utils.parse_execution_properties(
168-
properties_pb.inputs.parameter_values, inputs_spec_pb
169-
),
170-
)
149+
properties_pb.inputs.parameters))
171150

172151
def testCanChangePropertiesByNameIdMapping(self):
173152
model_blessing = standard_artifacts.ModelBlessing()

0 commit comments

Comments
 (0)