|
| 1 | +/// <reference types="mocha" /> |
| 2 | +/** |
| 3 | + * This is the integration test that make sure the client can make request cross-platform-ly |
| 4 | + * in NodeJS, Chromium and Firefox. This test is written in mocha. |
| 5 | + */ |
| 6 | +import * as chai from "chai"; |
| 7 | +import * as chaiAsPromised from "chai-as-promised"; |
| 8 | +import { S3 } from "../index"; |
| 9 | +import { Credentials } from "@aws-sdk/types"; |
| 10 | +import { createBuffer } from "./helpers"; |
| 11 | +chai.use(chaiAsPromised); |
| 12 | +const { expect } = chai; |
| 13 | +// There will be default values of defaultRegion, credentials, and isBrowser variable in browser tests. |
| 14 | +// Define the values for Node.js tests |
| 15 | +const region: string | undefined = |
| 16 | + (globalThis as any).defaultRegion || undefined; |
| 17 | +const credentials: Credentials | undefined = |
| 18 | + (globalThis as any).credentials || undefined; |
| 19 | +const isBrowser: boolean | undefined = (globalThis as any).isBrowser || false; |
| 20 | + |
| 21 | +// this bucket requires enabling CORS: |
| 22 | +// AllowedOrigin(*), AllowedMethod(GET, PUT, POST, DELETE, HEAD), ExposeHeader(ETag), AllowedHeader(*) |
| 23 | +const Bucket = "aws-sdk-unit-test"; |
| 24 | +let Key = `${Date.now()}`; |
| 25 | + |
| 26 | +describe("@aws-sdk/client-s3", () => { |
| 27 | + const client = new S3({ |
| 28 | + region: region, |
| 29 | + credentials |
| 30 | + }); |
| 31 | + |
| 32 | + describe("PutObject", () => { |
| 33 | + before(() => { |
| 34 | + Key = `${Date.now()}`; |
| 35 | + }); |
| 36 | + after(async () => { |
| 37 | + await client.deleteObject({ Bucket, Key }); |
| 38 | + }); |
| 39 | + if (isBrowser) { |
| 40 | + const buf = createBuffer("1KB"); |
| 41 | + it("should succeed with blob body", async () => { |
| 42 | + const result = await client.putObject({ |
| 43 | + Bucket, |
| 44 | + Key, |
| 45 | + Body: new Blob([buf]) |
| 46 | + }); |
| 47 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 48 | + }); |
| 49 | + |
| 50 | + it("should succeed with TypedArray body", async () => { |
| 51 | + const result = await client.putObject({ |
| 52 | + Bucket, |
| 53 | + Key, |
| 54 | + Body: buf |
| 55 | + }); |
| 56 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 57 | + }); |
| 58 | + |
| 59 | + it("should succeed with ReadableStream body", async () => { |
| 60 | + const length = 10 * 1000; // 10KB |
| 61 | + const chunkSize = 10; |
| 62 | + const readableStream = new ReadableStream({ |
| 63 | + start(controller) { |
| 64 | + let sizeLeft = length; |
| 65 | + while (sizeLeft > 0) { |
| 66 | + let chunk = ""; |
| 67 | + for (let i = 0; i < Math.min(sizeLeft, chunkSize); i++) { |
| 68 | + chunk += "x"; |
| 69 | + } |
| 70 | + controller.enqueue(chunk); |
| 71 | + sizeLeft -= chunk.length; |
| 72 | + } |
| 73 | + } |
| 74 | + }); |
| 75 | + const result = await client.putObject({ |
| 76 | + Bucket, |
| 77 | + Key, |
| 78 | + Body: readableStream |
| 79 | + }); |
| 80 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 81 | + }); |
| 82 | + } else { |
| 83 | + it("should succeed with Node.js readable stream body", async () => { |
| 84 | + const length = 10 * 1000; // 10KB |
| 85 | + const chunkSize = 10; |
| 86 | + const { Readable } = require("stream"); |
| 87 | + let sizeLeft = length; |
| 88 | + const inputStream = new Readable({ |
| 89 | + read() { |
| 90 | + if (sizeLeft <= 0) { |
| 91 | + this.push(null); //end stream; |
| 92 | + return; |
| 93 | + } |
| 94 | + let chunk = ""; |
| 95 | + for (let i = 0; i < Math.min(sizeLeft, chunkSize); i++) { |
| 96 | + chunk += "x"; |
| 97 | + } |
| 98 | + this.push(chunk); |
| 99 | + sizeLeft -= chunk.length; |
| 100 | + } |
| 101 | + }); |
| 102 | + inputStream.size = length; // This is required |
| 103 | + const result = await client.putObject({ |
| 104 | + Bucket, |
| 105 | + Key, |
| 106 | + Body: inputStream |
| 107 | + }); |
| 108 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 109 | + }); |
| 110 | + } |
| 111 | + }); |
| 112 | + |
| 113 | + describe("GetObject", function () { |
| 114 | + this.timeout(10 * 1000); |
| 115 | + before(async () => { |
| 116 | + Key = `${Date.now()}`; |
| 117 | + }); |
| 118 | + |
| 119 | + after(async () => { |
| 120 | + await client.deleteObject({ Bucket, Key }); |
| 121 | + }); |
| 122 | + |
| 123 | + it("should succeed with valid body payload", async () => { |
| 124 | + // prepare the object. |
| 125 | + const body = createBuffer("1MB"); |
| 126 | + await client.putObject({ Bucket, Key, Body: body }); |
| 127 | + const result = await client.getObject({ Bucket, Key }); |
| 128 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 129 | + if (isBrowser) { |
| 130 | + expect(result.Body).to.be.instanceOf(ReadableStream); |
| 131 | + } else { |
| 132 | + const { Readable } = require("stream"); |
| 133 | + expect(result.Body).to.be.instanceOf(Readable); |
| 134 | + } |
| 135 | + }); |
| 136 | + }); |
| 137 | + |
| 138 | + describe("ListObjects", () => { |
| 139 | + before(() => { |
| 140 | + Key = `${Date.now()}`; |
| 141 | + }); |
| 142 | + it("should succeed with valid bucket", async () => { |
| 143 | + const result = await client.listObjects({ |
| 144 | + Bucket |
| 145 | + }); |
| 146 | + expect(result.$metadata.httpStatusCode).to.equal(200); |
| 147 | + expect(result.Contents).to.be.instanceOf(Array); |
| 148 | + }); |
| 149 | + |
| 150 | + it("should throw with invalid bucket", () => |
| 151 | + expect( |
| 152 | + client.listObjects({ Bucket: "invalid-bucket" }) |
| 153 | + ).to.eventually.be.rejected.and.be.an.instanceOf(Error)); |
| 154 | + }); |
| 155 | + |
| 156 | + describe("MultipartUpload", () => { |
| 157 | + let UploadId: string; |
| 158 | + let Etag: string; |
| 159 | + const multipartObjectKey = `${Key}-multipart`; |
| 160 | + before(() => { |
| 161 | + Key = `${Date.now()}`; |
| 162 | + }); |
| 163 | + afterEach(async () => { |
| 164 | + if (UploadId) { |
| 165 | + await client.abortMultipartUpload({ |
| 166 | + Bucket, |
| 167 | + Key: multipartObjectKey, |
| 168 | + UploadId |
| 169 | + }); |
| 170 | + } |
| 171 | + await client.deleteObject({ |
| 172 | + Bucket, |
| 173 | + Key: multipartObjectKey |
| 174 | + }); |
| 175 | + }); |
| 176 | + |
| 177 | + it("should successfully create, upload list and complete", async () => { |
| 178 | + //create multipart upload |
| 179 | + const createResult = await client.createMultipartUpload({ |
| 180 | + Bucket, |
| 181 | + Key: multipartObjectKey |
| 182 | + }); |
| 183 | + expect(createResult.$metadata.httpStatusCode).to.equal(200); |
| 184 | + expect(typeof createResult.UploadId).to.equal("string"); |
| 185 | + UploadId = createResult.UploadId as string; |
| 186 | + |
| 187 | + //upload part |
| 188 | + const uploadResult = await client.uploadPart({ |
| 189 | + Bucket, |
| 190 | + Key: multipartObjectKey, |
| 191 | + UploadId, |
| 192 | + PartNumber: 1, |
| 193 | + Body: createBuffer("1KB") |
| 194 | + }); |
| 195 | + expect(uploadResult.$metadata.httpStatusCode).to.equal(200); |
| 196 | + expect(typeof uploadResult.ETag).to.equal("string"); |
| 197 | + Etag = uploadResult.ETag as string; |
| 198 | + |
| 199 | + //list parts |
| 200 | + const listPartsResult = await client.listParts({ |
| 201 | + Bucket, |
| 202 | + Key: multipartObjectKey, |
| 203 | + UploadId |
| 204 | + }); |
| 205 | + expect(listPartsResult.$metadata.httpStatusCode).to.equal(200); |
| 206 | + expect(listPartsResult.Parts?.length).to.equal(1); |
| 207 | + expect(listPartsResult.Parts?.[0].ETag).to.equal(Etag); |
| 208 | + |
| 209 | + //complete multipart upload |
| 210 | + const completeResult = await client.completeMultipartUpload({ |
| 211 | + Bucket, |
| 212 | + Key: multipartObjectKey, |
| 213 | + UploadId, |
| 214 | + MultipartUpload: { Parts: [{ ETag: Etag, PartNumber: 1 }] } |
| 215 | + }); |
| 216 | + expect(completeResult.$metadata.httpStatusCode).to.equal(200); |
| 217 | + |
| 218 | + //validate the object is uploaded |
| 219 | + const headResult = await client.headObject({ |
| 220 | + Bucket, |
| 221 | + Key: multipartObjectKey |
| 222 | + }); |
| 223 | + expect(headResult.$metadata.httpStatusCode).to.equal(200); |
| 224 | + }); |
| 225 | + |
| 226 | + it("should successfully create, abort, and list upload", async () => { |
| 227 | + //create multipart upload |
| 228 | + const createResult = await client.createMultipartUpload({ |
| 229 | + Bucket, |
| 230 | + Key: multipartObjectKey |
| 231 | + }); |
| 232 | + expect(createResult.$metadata.httpStatusCode).to.equal(200); |
| 233 | + const toAbort = createResult.UploadId; |
| 234 | + expect(typeof toAbort).to.equal("string"); |
| 235 | + |
| 236 | + //abort multipart upload |
| 237 | + const abortResult = await client.abortMultipartUpload({ |
| 238 | + Bucket, |
| 239 | + Key: multipartObjectKey, |
| 240 | + UploadId: toAbort |
| 241 | + }); |
| 242 | + expect(abortResult.$metadata.httpStatusCode).to.equal(204); |
| 243 | + |
| 244 | + //validate multipart upload is aborted |
| 245 | + const listUploadsResult = await client.listMultipartUploads({ |
| 246 | + Bucket |
| 247 | + }); |
| 248 | + expect(listUploadsResult.$metadata.httpStatusCode).to.equal(200); |
| 249 | + expect( |
| 250 | + listUploadsResult.Uploads?.map(upload => upload.UploadId) |
| 251 | + ).not.to.contain(toAbort); |
| 252 | + }); |
| 253 | + }); |
| 254 | +}); |
0 commit comments