@@ -162,9 +162,68 @@ class CreateModelInput(object):
162
162
163
163
@attr .s
164
164
class TransformInput (object ):
165
- """Create a class containing all the parameters .
165
+ """Creates a class containing parameters for configuring input data for a batch tramsform job .
166
166
167
167
It can be used when calling ``sagemaker.transformer.Transformer.transform()``
168
+
169
+ Args:
170
+ data (str): The S3 location of the input data that the model can consume.
171
+ data_type (str): The data type for a batch transform job.
172
+ (default: ``'S3Prefix'``)
173
+ content_type (str): The multi-purpose internet email extension (MIME) type of the data.
174
+ (default: None)
175
+ compression_type (str): If your transform data is compressed, specify the compression type.
176
+ Valid values: ``'Gzip'``, ``None``
177
+ (default: None)
178
+ split_type (str): The method to use to split the transform job's data files into smaller
179
+ batches.
180
+ Valid values: ``'Line'``, ``RecordIO``, ``'TFRecord'``, None
181
+ (default: None)
182
+ input_filter (str): A JSONPath expression for selecting a portion of the input data to pass
183
+ to the algorithm. For example, you can use this parameter to exclude fields, such as an
184
+ ID column, from the input. If you want SageMaker to pass the entire input dataset to the
185
+ algorithm, accept the default value ``$``. For more information on batch transform data
186
+ processing, input, join, and output, see
187
+ `Associate Prediction Results with Input Records
188
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/batch-transform-data-processing.html?>`_
189
+ in the *Amazon SageMaker developer guide*.
190
+ Example value: ``$``. For more information about valid values for this parameter, see
191
+ `JSONPath Operators
192
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/batch-transform-data-processing.html#data-processing-operators>`_
193
+ in the *Amazon SageMaker developer guide*.
194
+ (default: ``$``)
195
+ output_filter (str): A JSONPath expression for selecting a portion of the joined dataset to
196
+ save in the output file for a batch transform job. If you want SageMaker to store the
197
+ entire input dataset in the output file, leave the default value, $. If you specify
198
+ indexes that aren't within the dimension size of the joined dataset, you get an error.
199
+ Example value: ``$``. For more information about valid values for this parameter, see
200
+ `JSONPath Operators
201
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/batch-transform-data-processing.html#data-processing-operators>`_
202
+ in the *Amazon SageMaker developer guide*.
203
+ (default: ``$``)
204
+ join_source (str): Specifies the source of the data to join with the transformed data.
205
+ The default value is ``None``, which specifies not to join the input with the
206
+ transformed data. If you want the batch transform job to join the original input data
207
+ with the transformed data, set to ``Input``.
208
+ Valid values: ``None``, ``Input``
209
+ (default: None)
210
+ model_client_config (dict): Configures the timeout and maximum number of retries for
211
+ processing a transform job invocation.
212
+
213
+ * ``'InvocationsTimeoutInSeconds'`` (int) - The timeout value in seconds for an
214
+ invocation request. The default value is 600.
215
+ * ``'InvocationsMaxRetries'`` (int) - The maximum number of retries when invocation
216
+ requests are failing.
217
+
218
+ (default: ``{600,3}``)
219
+ batch_data_capture_config (dict): The dict is an object of `BatchDataCaptureConfig
220
+ <https://sagemaker.readthedocs.io/en/stable/api/utility/inputs.html#sagemaker.inputs.BatchDataCaptureConfig>`_
221
+ and specifies configuration related to batch transform job
222
+ for use with Amazon SageMaker Model Monitoring. For more information,
223
+ see `Capture data from batch transform job
224
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture-batch.html>`_
225
+ in the *Amazon SageMaker developer guide*.
226
+ (default: None)
168
227
"""
169
228
170
229
data : str = attr .ib ()
0 commit comments