-
Notifications
You must be signed in to change notification settings - Fork 5.8k
Expand file tree
/
Copy pathbasic.js
More file actions
225 lines (199 loc) · 7.4 KB
/
basic.js
File metadata and controls
225 lines (199 loc) · 7.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
/* This example code shows you how to use Amazon S3 can be used as a core component of an application.
* You'll do the following:
* - Create a bucket.
* - Upload files to the bucket.
* - List files in the bucket.
* - Copy files from another bucket to this one.
* - Download files from the bucket.
* - Empty the bucket.
* - Delete the bucket.
*/
// snippet-start:[javascript.v3.s3.scenarios.basic.imports]
// Used to check if currently running file is this file.
import { fileURLToPath } from "node:url";
import { readdirSync, readFileSync, writeFileSync } from "node:fs";
// Local helper utils.
import { dirnameFromMetaUrl } from "@aws-doc-sdk-examples/lib/utils/util-fs.js";
import { Prompter } from "@aws-doc-sdk-examples/lib/prompter.js";
import { wrapText } from "@aws-doc-sdk-examples/lib/utils/util-string.js";
import {
S3Client,
CreateBucketCommand,
PutObjectCommand,
ListObjectsCommand,
CopyObjectCommand,
GetObjectCommand,
DeleteObjectsCommand,
DeleteBucketCommand,
} from "@aws-sdk/client-s3";
// snippet-end:[javascript.v3.s3.scenarios.basic.imports]
const prompter = new Prompter();
const continueMessage = "Continue?";
// snippet-start:[javascript.v3.s3.scenarios.basic.S3Client]
// The Region can be provided as an argument to S3Client or
// declared in the AWS configuration file. In this case
// we're using the Region provided in the AWS configuration.
const s3Client = new S3Client({});
// snippet-end:[javascript.v3.s3.scenarios.basic.S3Client]
// snippet-start:[javascript.v3.s3.scenarios.basic.CreateBucket]
export const createBucket = async () => {
const bucketName = await prompter.input({
message: "Enter a bucket name. Bucket names must be globally unique:",
});
const command = new CreateBucketCommand({ Bucket: bucketName });
await s3Client.send(command);
console.log("Bucket created successfully.\n");
return bucketName;
};
// snippet-end:[javascript.v3.s3.scenarios.basic.CreateBucket]
// snippet-start:[javascript.v3.s3.scenarios.basic.PutObject]
export const uploadFilesToBucket = async ({ bucketName, folderPath }) => {
console.log(`Uploading files from ${folderPath}\n`);
const keys = readdirSync(folderPath);
const files = keys.map((key) => {
const filePath = `${folderPath}/${key}`;
const fileContent = readFileSync(filePath);
return {
Key: key,
Body: fileContent,
};
});
for (const file of files) {
await s3Client.send(
new PutObjectCommand({
Bucket: bucketName,
Body: file.Body,
Key: file.Key,
}),
);
console.log(`${file.Key} uploaded successfully.`);
}
};
// snippet-end:[javascript.v3.s3.scenarios.basic.PutObject]
// snippet-start:[javascript.v3.s3.scenarios.basic.ListObjects]
export const listFilesInBucket = async ({ bucketName }) => {
const command = new ListObjectsCommand({ Bucket: bucketName });
const { Contents } = await s3Client.send(command);
const contentsList = Contents.map((c) => ` • ${c.Key}`).join("\n");
console.log("\nHere's a list of files in the bucket:");
console.log(`${contentsList}\n`);
};
// snippet-end:[javascript.v3.s3.scenarios.basic.ListObjects]
// snippet-start:[javascript.v3.s3.scenarios.basic.CopyObject]
export const copyFileFromBucket = async ({ destinationBucket }) => {
const proceed = await prompter.confirm({
message: "Would you like to copy an object from another bucket?",
});
if (!proceed) {
return;
}
const copy = async () => {
try {
const sourceBucket = await prompter.input({
message: "Enter source bucket name:",
});
const sourceKey = await prompter.input({
message: "Enter source key:",
});
const destinationKey = await prompter.input({
message: "Enter destination key:",
});
const command = new CopyObjectCommand({
Bucket: destinationBucket,
CopySource: `${sourceBucket}/${sourceKey}`,
Key: destinationKey,
});
await s3Client.send(command);
await copyFileFromBucket({ destinationBucket });
} catch (err) {
console.error("Copy error.");
console.error(err);
const retryAnswer = await prompter.confirm({ message: "Try again?" });
if (retryAnswer) {
await copy();
}
}
};
await copy();
};
// snippet-end:[javascript.v3.s3.scenarios.basic.CopyObject]
// snippet-start:[javascript.v3.s3.scenarios.basic.GetObject]
export const downloadFilesFromBucket = async ({ bucketName }) => {
const { Contents } = await s3Client.send(
new ListObjectsCommand({ Bucket: bucketName }),
);
const path = await prompter.input({
message: "Enter destination path for files:",
});
for (const content of Contents) {
const obj = await s3Client.send(
new GetObjectCommand({ Bucket: bucketName, Key: content.Key }),
);
writeFileSync(
`${path}/${content.Key}`,
await obj.Body.transformToByteArray(),
);
}
console.log("Files downloaded successfully.\n");
};
// snippet-end:[javascript.v3.s3.scenarios.basic.GetObject]
// snippet-start:[javascript.v3.s3.scenarios.basic.clean]
export const emptyBucket = async ({ bucketName }) => {
const listObjectsCommand = new ListObjectsCommand({ Bucket: bucketName });
const { Contents } = await s3Client.send(listObjectsCommand);
const keys = Contents.map((c) => c.Key);
const deleteObjectsCommand = new DeleteObjectsCommand({
Bucket: bucketName,
Delete: { Objects: keys.map((key) => ({ Key: key })) },
});
await s3Client.send(deleteObjectsCommand);
console.log(`${bucketName} emptied successfully.\n`);
};
export const deleteBucket = async ({ bucketName }) => {
const command = new DeleteBucketCommand({ Bucket: bucketName });
await s3Client.send(command);
console.log(`${bucketName} deleted successfully.\n`);
};
// snippet-end:[javascript.v3.s3.scenarios.basic.clean]
// snippet-start:[javascript.v3.s3.scenarios.basic.main]
const main = async () => {
const OBJECT_DIRECTORY = `${dirnameFromMetaUrl(
import.meta.url,
)}../../../../resources/sample_files/.sample_media`;
try {
console.log(wrapText("Welcome to the Amazon S3 getting started example."));
console.log("Let's create a bucket.");
const bucketName = await createBucket();
await prompter.confirm({ message: continueMessage });
console.log(wrapText("File upload."));
console.log(
"I have some default files ready to go. You can edit the source code to provide your own.",
);
await uploadFilesToBucket({
bucketName,
folderPath: OBJECT_DIRECTORY,
});
await listFilesInBucket({ bucketName });
await prompter.confirm({ message: continueMessage });
console.log(wrapText("Copy files."));
await copyFileFromBucket({ destinationBucket: bucketName });
await listFilesInBucket({ bucketName });
await prompter.confirm({ message: continueMessage });
console.log(wrapText("Download files."));
await downloadFilesFromBucket({ bucketName });
console.log(wrapText("Clean up."));
await emptyBucket({ bucketName });
await deleteBucket({ bucketName });
} catch (err) {
console.error(err);
}
};
// snippet-end:[javascript.v3.s3.scenarios.basic.main]
// snippet-start:[javascript.v3.s3.scenarios.basic.runner]
// Invoke main function if this file was run directly.
if (process.argv[1] === fileURLToPath(import.meta.url)) {
main();
}
// snippet-end:[javascript.v3.s3.scenarios.basic.runner]