Skip to content

Commit d3fe7ce

Browse files
author
AWS
committed
Amazon Personalize Update: Amazon Personalize: Adds ability to get batch recommendations by creating a batch inference job.
1 parent 693827e commit d3fe7ce

File tree

3 files changed

+276
-1
lines changed

3 files changed

+276
-1
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
{
2+
"type": "feature",
3+
"category": "Amazon Personalize",
4+
"description": "Amazon Personalize: Adds ability to get batch recommendations by creating a batch inference job."
5+
}

services/personalize/src/main/resources/codegen-resources/paginators-1.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
{
22
"pagination": {
3+
"ListBatchInferenceJobs": {
4+
"input_token": "nextToken",
5+
"limit_key": "maxResults",
6+
"output_token": "nextToken",
7+
"result_key": "batchInferenceJobs"
8+
},
39
"ListCampaigns": {
410
"input_token": "nextToken",
511
"limit_key": "maxResults",

services/personalize/src/main/resources/codegen-resources/service-2.json

Lines changed: 265 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,23 @@
1313
"uid":"personalize-2018-05-22"
1414
},
1515
"operations":{
16+
"CreateBatchInferenceJob":{
17+
"name":"CreateBatchInferenceJob",
18+
"http":{
19+
"method":"POST",
20+
"requestUri":"/"
21+
},
22+
"input":{"shape":"CreateBatchInferenceJobRequest"},
23+
"output":{"shape":"CreateBatchInferenceJobResponse"},
24+
"errors":[
25+
{"shape":"InvalidInputException"},
26+
{"shape":"ResourceAlreadyExistsException"},
27+
{"shape":"LimitExceededException"},
28+
{"shape":"ResourceNotFoundException"},
29+
{"shape":"ResourceInUseException"}
30+
],
31+
"documentation":"<p>Creates a batch inference job. The operation can handle up to 50 million records and the input file must be in JSON format. For more information, see <a>recommendations-batch</a>.</p>"
32+
},
1633
"CreateCampaign":{
1734
"name":"CreateCampaign",
1835
"http":{
@@ -252,6 +269,21 @@
252269
"documentation":"<p>Describes the given algorithm.</p>",
253270
"idempotent":true
254271
},
272+
"DescribeBatchInferenceJob":{
273+
"name":"DescribeBatchInferenceJob",
274+
"http":{
275+
"method":"POST",
276+
"requestUri":"/"
277+
},
278+
"input":{"shape":"DescribeBatchInferenceJobRequest"},
279+
"output":{"shape":"DescribeBatchInferenceJobResponse"},
280+
"errors":[
281+
{"shape":"InvalidInputException"},
282+
{"shape":"ResourceNotFoundException"}
283+
],
284+
"documentation":"<p>Gets the properties of a batch inference job including name, Amazon Resource Name (ARN), status, input and output configurations, and the ARN of the solution version used to generate the recommendations.</p>",
285+
"idempotent":true
286+
},
255287
"DescribeCampaign":{
256288
"name":"DescribeCampaign",
257289
"http":{
@@ -417,6 +449,21 @@
417449
],
418450
"documentation":"<p>Gets the metrics for the specified solution version.</p>"
419451
},
452+
"ListBatchInferenceJobs":{
453+
"name":"ListBatchInferenceJobs",
454+
"http":{
455+
"method":"POST",
456+
"requestUri":"/"
457+
},
458+
"input":{"shape":"ListBatchInferenceJobsRequest"},
459+
"output":{"shape":"ListBatchInferenceJobsResponse"},
460+
"errors":[
461+
{"shape":"InvalidInputException"},
462+
{"shape":"InvalidNextTokenException"}
463+
],
464+
"documentation":"<p>Gets a list of the batch inference jobs that have been performed off of a solution version.</p>",
465+
"idempotent":true
466+
},
420467
"ListCampaigns":{
421468
"name":"ListCampaigns",
422469
"http":{
@@ -671,6 +718,113 @@
671718
"type":"string",
672719
"max":10000
673720
},
721+
"BatchInferenceJob":{
722+
"type":"structure",
723+
"members":{
724+
"jobName":{
725+
"shape":"Name",
726+
"documentation":"<p>The name of the batch inference job.</p>"
727+
},
728+
"batchInferenceJobArn":{
729+
"shape":"Arn",
730+
"documentation":"<p>The Amazon Resource Name (ARN) of the batch inference job.</p>"
731+
},
732+
"failureReason":{
733+
"shape":"FailureReason",
734+
"documentation":"<p>If the batch inference job failed, the reason for the failure.</p>"
735+
},
736+
"solutionVersionArn":{
737+
"shape":"Arn",
738+
"documentation":"<p>The Amazon Resource Name (ARN) of the solution version from which the batch inference job was created.</p>"
739+
},
740+
"numResults":{
741+
"shape":"NumBatchResults",
742+
"documentation":"<p>The number of recommendations generated by the batch inference job. This number includes the error messages generated for failed input records.</p>"
743+
},
744+
"jobInput":{
745+
"shape":"BatchInferenceJobInput",
746+
"documentation":"<p>The Amazon S3 path that leads to the input data used to generate the batch inference job.</p>"
747+
},
748+
"jobOutput":{
749+
"shape":"BatchInferenceJobOutput",
750+
"documentation":"<p>The Amazon S3 bucket that contains the output data generated by the batch inference job.</p>"
751+
},
752+
"roleArn":{
753+
"shape":"RoleArn",
754+
"documentation":"<p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch inference job.</p>"
755+
},
756+
"status":{
757+
"shape":"Status",
758+
"documentation":"<p>The status of the batch inference job. The status is one of the following values:</p> <ul> <li> <p>PENDING</p> </li> <li> <p>IN PROGRESS</p> </li> <li> <p>ACTIVE</p> </li> <li> <p>CREATE FAILED</p> </li> </ul>"
759+
},
760+
"creationDateTime":{
761+
"shape":"Date",
762+
"documentation":"<p>The time at which the batch inference job was created.</p>"
763+
},
764+
"lastUpdatedDateTime":{
765+
"shape":"Date",
766+
"documentation":"<p>The time at which the batch inference job was last updated.</p>"
767+
}
768+
},
769+
"documentation":"<p>Contains information on a batch inference job.</p>"
770+
},
771+
"BatchInferenceJobInput":{
772+
"type":"structure",
773+
"required":["s3DataSource"],
774+
"members":{
775+
"s3DataSource":{
776+
"shape":"S3DataConfig",
777+
"documentation":"<p>The URI of the Amazon S3 location that contains your input data. The Amazon S3 bucket must be in the same region as the API endpoint you are calling.</p>"
778+
}
779+
},
780+
"documentation":"<p>The input configuration of a batch inference job.</p>"
781+
},
782+
"BatchInferenceJobOutput":{
783+
"type":"structure",
784+
"required":["s3DataDestination"],
785+
"members":{
786+
"s3DataDestination":{
787+
"shape":"S3DataConfig",
788+
"documentation":"<p>Information on the Amazon S3 bucket in which the batch inference job's output is stored.</p>"
789+
}
790+
},
791+
"documentation":"<p>The output configuration parameters of a batch inference job.</p>"
792+
},
793+
"BatchInferenceJobSummary":{
794+
"type":"structure",
795+
"members":{
796+
"batchInferenceJobArn":{
797+
"shape":"Arn",
798+
"documentation":"<p>The Amazon Resource Name (ARN) of the batch inference job.</p>"
799+
},
800+
"jobName":{
801+
"shape":"Name",
802+
"documentation":"<p>The name of the batch inference job.</p>"
803+
},
804+
"status":{
805+
"shape":"Status",
806+
"documentation":"<p>The status of the batch inference job. The status is one of the following values:</p> <ul> <li> <p>PENDING</p> </li> <li> <p>IN PROGRESS</p> </li> <li> <p>ACTIVE</p> </li> <li> <p>CREATE FAILED</p> </li> </ul>"
807+
},
808+
"creationDateTime":{
809+
"shape":"Date",
810+
"documentation":"<p>The time at which the batch inference job was created.</p>"
811+
},
812+
"lastUpdatedDateTime":{
813+
"shape":"Date",
814+
"documentation":"<p>The time at which the batch inference job was last updated.</p>"
815+
},
816+
"failureReason":{
817+
"shape":"FailureReason",
818+
"documentation":"<p>If the batch inference job failed, the reason for the failure.</p>"
819+
}
820+
},
821+
"documentation":"<p>A truncated version of the <a>BatchInferenceJob</a> datatype. The <a>ListBatchInferenceJobs</a> operation returns a list of batch inference job summaries.</p>"
822+
},
823+
"BatchInferenceJobs":{
824+
"type":"list",
825+
"member":{"shape":"BatchInferenceJobSummary"},
826+
"max":100
827+
},
674828
"Boolean":{"type":"boolean"},
675829
"Campaign":{
676830
"type":"structure",
@@ -835,6 +989,51 @@
835989
"type":"double",
836990
"min":-1000000
837991
},
992+
"CreateBatchInferenceJobRequest":{
993+
"type":"structure",
994+
"required":[
995+
"jobName",
996+
"solutionVersionArn",
997+
"jobInput",
998+
"jobOutput",
999+
"roleArn"
1000+
],
1001+
"members":{
1002+
"jobName":{
1003+
"shape":"Name",
1004+
"documentation":"<p>The name of the batch inference job to create.</p>"
1005+
},
1006+
"solutionVersionArn":{
1007+
"shape":"Arn",
1008+
"documentation":"<p>The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference recommendations.</p>"
1009+
},
1010+
"numResults":{
1011+
"shape":"NumBatchResults",
1012+
"documentation":"<p>The number of recommendations to retreive.</p>"
1013+
},
1014+
"jobInput":{
1015+
"shape":"BatchInferenceJobInput",
1016+
"documentation":"<p>The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in JSON format.</p>"
1017+
},
1018+
"jobOutput":{
1019+
"shape":"BatchInferenceJobOutput",
1020+
"documentation":"<p>The path to the Amazon S3 bucket where the job's output will be stored.</p>"
1021+
},
1022+
"roleArn":{
1023+
"shape":"RoleArn",
1024+
"documentation":"<p>The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input and out Amazon S3 buckets respectively.</p>"
1025+
}
1026+
}
1027+
},
1028+
"CreateBatchInferenceJobResponse":{
1029+
"type":"structure",
1030+
"members":{
1031+
"batchInferenceJobArn":{
1032+
"shape":"Arn",
1033+
"documentation":"<p>The ARN of the batch inference job.</p>"
1034+
}
1035+
}
1036+
},
8381037
"CreateCampaignRequest":{
8391038
"type":"structure",
8401039
"required":[
@@ -1550,6 +1749,25 @@
15501749
}
15511750
}
15521751
},
1752+
"DescribeBatchInferenceJobRequest":{
1753+
"type":"structure",
1754+
"required":["batchInferenceJobArn"],
1755+
"members":{
1756+
"batchInferenceJobArn":{
1757+
"shape":"Arn",
1758+
"documentation":"<p>The ARN of the batch inference job to describe.</p>"
1759+
}
1760+
}
1761+
},
1762+
"DescribeBatchInferenceJobResponse":{
1763+
"type":"structure",
1764+
"members":{
1765+
"batchInferenceJob":{
1766+
"shape":"BatchInferenceJob",
1767+
"documentation":"<p>Information on the specified batch inference job.</p>"
1768+
}
1769+
}
1770+
},
15531771
"DescribeCampaignRequest":{
15541772
"type":"structure",
15551773
"required":["campaignArn"],
@@ -2027,6 +2245,36 @@
20272245
"documentation":"<p>The limit on the number of requests per second has been exceeded.</p>",
20282246
"exception":true
20292247
},
2248+
"ListBatchInferenceJobsRequest":{
2249+
"type":"structure",
2250+
"members":{
2251+
"solutionVersionArn":{
2252+
"shape":"Arn",
2253+
"documentation":"<p>The Amazon Resource Name (ARN) of the solution version from which the batch inference jobs were created.</p>"
2254+
},
2255+
"nextToken":{
2256+
"shape":"NextToken",
2257+
"documentation":"<p>The token to request the next page of results.</p>"
2258+
},
2259+
"maxResults":{
2260+
"shape":"MaxResults",
2261+
"documentation":"<p>The maximum number of batch inference job results to return in each page. The default value is 100.</p>"
2262+
}
2263+
}
2264+
},
2265+
"ListBatchInferenceJobsResponse":{
2266+
"type":"structure",
2267+
"members":{
2268+
"batchInferenceJobs":{
2269+
"shape":"BatchInferenceJobs",
2270+
"documentation":"<p>A list containing information on each job that is returned.</p>"
2271+
},
2272+
"nextToken":{
2273+
"shape":"NextToken",
2274+
"documentation":"<p>The token to use to retreive the next page of results. The value is <code>null</code> when there are no more results to return.</p>"
2275+
}
2276+
}
2277+
},
20302278
"ListCampaignsRequest":{
20312279
"type":"structure",
20322280
"members":{
@@ -2319,6 +2567,7 @@
23192567
"type":"string",
23202568
"max":1300
23212569
},
2570+
"NumBatchResults":{"type":"integer"},
23222571
"ParameterName":{
23232572
"type":"string",
23242573
"max":256
@@ -2445,6 +2694,21 @@
24452694
"max":256,
24462695
"pattern":"arn:([a-z\\d-]+):iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"
24472696
},
2697+
"S3DataConfig":{
2698+
"type":"structure",
2699+
"required":["path"],
2700+
"members":{
2701+
"path":{
2702+
"shape":"S3Location",
2703+
"documentation":"<p>The file path of the Amazon S3 bucket.</p>"
2704+
},
2705+
"kmsKeyArn":{
2706+
"shape":"KmsKeyArn",
2707+
"documentation":"<p>The Amazon Resource Name (ARN) of the Amazon Key Management Service (KMS) key that Amazon Personalize uses to encrypt or decrypt the input and output files of a batch inference job.</p>"
2708+
}
2709+
},
2710+
"documentation":"<p>The configuration details of an Amazon S3 input or output bucket.</p>"
2711+
},
24482712
"S3Location":{
24492713
"type":"string",
24502714
"max":256
@@ -2521,7 +2785,7 @@
25212785
},
25222786
"hpoConfig":{
25232787
"shape":"HPOConfig",
2524-
"documentation":"<p>Describes the properties for hyperparameter optimization (HPO). For use with the bring-your-own-recipe feature. Not used with Amazon Personalize predefined recipes.</p>"
2788+
"documentation":"<p>Describes the properties for hyperparameter optimization (HPO).</p>"
25252789
},
25262790
"algorithmHyperParameters":{
25272791
"shape":"HyperParameters",

0 commit comments

Comments
 (0)