Skip to content

Commit 12a652f

Browse files
author
tszerszen
authored
Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869)
Rename `params` to `prediction_params` to avoid clash with BaseOperator arguments
1 parent f77a11d commit 12a652f

File tree

3 files changed

+34
-6
lines changed

3 files changed

+34
-6
lines changed
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
<!--
2+
Licensed to the Apache Software Foundation (ASF) under one
3+
or more contributor license agreements. See the NOTICE file
4+
distributed with this work for additional information
5+
regarding copyright ownership. The ASF licenses this file
6+
to you under the Apache License, Version 2.0 (the
7+
"License"); you may not use this file except in compliance
8+
with the License. You may obtain a copy of the License at
9+
10+
http://www.apache.org/licenses/LICENSE-2.0
11+
12+
Unless required by applicable law or agreed to in writing,
13+
software distributed under the License is distributed on an
14+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
KIND, either express or implied. See the License for the
16+
specific language governing permissions and limitations
17+
under the License.
18+
-->
19+
20+
## Additional info
21+
22+
### Breaking change in `AutoMLBatchPredictOperator`
23+
Class `AutoMLBatchPredictOperator` property `params` is renamed to `prediction_params`.
24+
To keep old behaviour, please rename `params` to `prediction_params` when initializing an instance of `AutoMLBatchPredictOperator`.
25+
26+
Property `params` still exists, but as a property inherited from parent's class `BaseOperator`.
27+
Property `params` has nothing to do with prediction, use `prediction_params` instead.

‎airflow/providers/google/cloud/operators/automl.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -240,9 +240,9 @@ class AutoMLBatchPredictOperator(BaseOperator):
240240
written. If a dict is provided, it must be of the same form as the protobuf message
241241
`google.cloud.automl_v1beta1.types.BatchPredictOutputConfig`
242242
:type output_config: Union[dict, ~google.cloud.automl_v1beta1.types.BatchPredictOutputConfig]
243-
:param params: Additional domain-specific parameters for the predictions, any string must be up to
244-
25000 characters long.
245-
:type params: Optional[Dict[str, str]]
243+
:param prediction_params: Additional domain-specific parameters for the predictions,
244+
any string must be up to 25000 characters long.
245+
:type prediction_params: Optional[Dict[str, str]]
246246
:param project_id: ID of the Google Cloud project where model is located if None then
247247
default project_id is used.
248248
:type project_id: str
@@ -287,7 +287,7 @@ def __init__( # pylint: disable=too-many-arguments
287287
output_config: dict,
288288
location: str,
289289
project_id: Optional[str] = None,
290-
params: Optional[Dict[str, str]] = None,
290+
prediction_params: Optional[Dict[str, str]] = None,
291291
metadata: Optional[MetaData] = None,
292292
timeout: Optional[float] = None,
293293
retry: Optional[Retry] = None,
@@ -300,7 +300,7 @@ def __init__( # pylint: disable=too-many-arguments
300300
self.model_id = model_id
301301
self.location = location
302302
self.project_id = project_id
303-
self.params = params # type: ignore
303+
self.prediction_params = prediction_params
304304
self.metadata = metadata
305305
self.timeout = timeout
306306
self.retry = retry
@@ -321,7 +321,7 @@ def execute(self, context):
321321
output_config=self.output_config,
322322
project_id=self.project_id,
323323
location=self.location,
324-
params=self.params,
324+
params=self.prediction_params,
325325
retry=self.retry,
326326
timeout=self.timeout,
327327
metadata=self.metadata,

‎tests/providers/google/cloud/operators/test_automl.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def test_execute(self, mock_hook):
9494
input_config=INPUT_CONFIG,
9595
output_config=OUTPUT_CONFIG,
9696
task_id=TASK_ID,
97+
prediction_params={},
9798
)
9899
op.execute(context=None)
99100
mock_hook.return_value.batch_predict.assert_called_once_with(

0 commit comments

Comments
 (0)