Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 008bca0

Browse filesBrowse files
committed
feat: update file upload process - change API endpoint and handle additional upload parameters
1 parent 8777415 commit 008bca0
Copy full SHA for 008bca0

File tree

3 files changed

+35
-218
lines changed
Filter options

3 files changed

+35
-218
lines changed

‎custom/uploader.vue

Copy file name to clipboardExpand all lines: custom/uploader.vue
+6-4Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,8 @@ const onFileChange = async (e) => {
229229
reader.readAsDataURL(file);
230230
}
231231
232-
const { uploadUrl, tagline, s3Path, error } = await callAdminForthApi({
233-
path: `/plugin/${props.meta.pluginInstanceId}/get_s3_upload_url`,
232+
const { uploadUrl, uploadExtraParams, filePath, error } = await callAdminForthApi({
233+
path: `/plugin/${props.meta.pluginInstanceId}/get_file_upload_url`,
234234
method: 'POST',
235235
body: {
236236
originalFilename: nameNoExtension,
@@ -266,7 +266,9 @@ const onFileChange = async (e) => {
266266
});
267267
xhr.open('PUT', uploadUrl, true);
268268
xhr.setRequestHeader('Content-Type', type);
269-
xhr.setRequestHeader('x-amz-tagging', tagline);
269+
uploadExtraParams && Object.entries(uploadExtraParams).forEach(([key, value]: [string, string]) => {
270+
xhr.setRequestHeader(key, value);
271+
})
270272
xhr.send(file);
271273
});
272274
if (!success) {
@@ -284,7 +286,7 @@ const onFileChange = async (e) => {
284286
return;
285287
}
286288
uploaded.value = true;
287-
emit('update:value', s3Path);
289+
emit('update:value', filePath);
288290
} catch (error) {
289291
console.error('Error uploading file:', error);
290292
adminforth.alert({

‎index.ts

Copy file name to clipboardExpand all lines: index.ts
+18-185Lines changed: 18 additions & 185 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11

22
import { PluginOptions } from './types.js';
3-
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
4-
import { ExpirationStatus, GetObjectCommand, ObjectCannedACL, PutObjectCommand, S3 } from '@aws-sdk/client-s3';
53
import { AdminForthPlugin, AdminForthResourceColumn, AdminForthResource, Filters, IAdminForth, IHttpServer, suggestIfTypo } from "adminforth";
64
import { Readable } from "stream";
75
import { RateLimiter } from "adminforth";
@@ -30,76 +28,15 @@ export default class UploadPlugin extends AdminForthPlugin {
3028
}
3129

3230
async setupLifecycleRule() {
33-
// check that lifecyle rule "adminforth-unused-cleaner" exists
34-
const CLEANUP_RULE_ID = 'adminforth-unused-cleaner';
35-
36-
const s3 = new S3({
37-
credentials: {
38-
accessKeyId: this.options.s3AccessKeyId,
39-
secretAccessKey: this.options.s3SecretAccessKey,
40-
},
41-
region: this.options.s3Region,
42-
});
43-
44-
// check bucket exists
45-
const bucketExists = s3.headBucket({ Bucket: this.options.s3Bucket })
46-
if (!bucketExists) {
47-
throw new Error(`Bucket ${this.options.s3Bucket} does not exist`);
48-
}
49-
50-
// check that lifecycle rule exists
51-
let ruleExists: boolean = false;
52-
53-
try {
54-
const lifecycleConfig: any = await s3.getBucketLifecycleConfiguration({ Bucket: this.options.s3Bucket });
55-
ruleExists = lifecycleConfig.Rules.some((rule: any) => rule.ID === CLEANUP_RULE_ID);
56-
} catch (e: any) {
57-
if (e.name !== 'NoSuchLifecycleConfiguration') {
58-
console.error(`⛔ Error checking lifecycle configuration, please check keys have permissions to
59-
getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${this.options.s3Region}. Exception:`, e);
60-
throw e;
61-
} else {
62-
ruleExists = false;
63-
}
64-
}
65-
66-
if (!ruleExists) {
67-
// create
68-
// rule deletes object has tag adminforth-candidate-for-cleanup = true after 2 days
69-
const params = {
70-
Bucket: this.options.s3Bucket,
71-
LifecycleConfiguration: {
72-
Rules: [
73-
{
74-
ID: CLEANUP_RULE_ID,
75-
Status: ExpirationStatus.Enabled,
76-
Filter: {
77-
Tag: {
78-
Key: ADMINFORTH_NOT_YET_USED_TAG,
79-
Value: 'true'
80-
}
81-
},
82-
Expiration: {
83-
Days: 2
84-
}
85-
}
86-
]
87-
}
88-
};
89-
90-
await s3.putBucketLifecycleConfiguration(params);
91-
}
31+
this.options.storage.adapter.setupLifecycle();
9232
}
9333

94-
async genPreviewUrl(record: any, s3: S3) {
34+
async genPreviewUrl(record: any) {
9535
if (this.options.preview?.previewUrl) {
96-
record[`previewUrl_${this.pluginInstanceId}`] = this.options.preview.previewUrl({ s3Path: record[this.options.pathColumnName] });
36+
record[`previewUrl_${this.pluginInstanceId}`] = this.options.preview.previewUrl({ filePath: record[this.options.pathColumnName] });
9737
return;
9838
}
99-
const previewUrl = await await getSignedUrl(s3, new GetObjectCommand({
100-
Bucket: this.options.s3Bucket,
101-
Key: record[this.options.pathColumnName],
102-
}));
39+
const previewUrl = await this.options.storage.adapter.getDownloadUrl(record[this.options.pathColumnName], 1800);
10340

10441
record[`previewUrl_${this.pluginInstanceId}`] = previewUrl;
10542
}
@@ -222,23 +159,9 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
222159
process.env.HEAVY_DEBUG && console.log('💾💾 after save ', record?.id);
223160

224161
if (record[pathColumnName]) {
225-
const s3 = new S3({
226-
credentials: {
227-
accessKeyId: this.options.s3AccessKeyId,
228-
secretAccessKey: this.options.s3SecretAccessKey,
229-
},
230-
231-
region: this.options.s3Region,
232-
});
233162
process.env.HEAVY_DEBUG && console.log('🪥🪥 remove ObjectTagging', record[pathColumnName]);
234163
// let it crash if it fails: this is a new file which just was uploaded.
235-
await s3.putObjectTagging({
236-
Bucket: this.options.s3Bucket,
237-
Key: record[pathColumnName],
238-
Tagging: {
239-
TagSet: []
240-
}
241-
});
164+
await this.options.storage.adapter.markKeyForNotDeletation(record[pathColumnName]);
242165
}
243166
return { ok: true };
244167
});
@@ -255,16 +178,7 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
255178
return { ok: true };
256179
}
257180
if (record[pathColumnName]) {
258-
const s3 = new S3({
259-
credentials: {
260-
accessKeyId: this.options.s3AccessKeyId,
261-
secretAccessKey: this.options.s3SecretAccessKey,
262-
},
263-
264-
region: this.options.s3Region,
265-
});
266-
267-
await this.genPreviewUrl(record, s3);
181+
await this.genPreviewUrl(record)
268182
}
269183
return { ok: true };
270184
});
@@ -275,18 +189,9 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
275189

276190
if (pathColumn.showIn.list) {
277191
resourceConfig.hooks.list.afterDatasourceResponse.push(async ({ response }: { response: any }) => {
278-
const s3 = new S3({
279-
credentials: {
280-
accessKeyId: this.options.s3AccessKeyId,
281-
secretAccessKey: this.options.s3SecretAccessKey,
282-
},
283-
284-
region: this.options.s3Region,
285-
});
286-
287192
await Promise.all(response.map(async (record: any) => {
288193
if (record[this.options.pathColumnName]) {
289-
await this.genPreviewUrl(record, s3);
194+
await this.genPreviewUrl(record)
290195
}
291196
}));
292197
return { ok: true };
@@ -298,28 +203,8 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
298203
// add delete hook which sets tag adminforth-candidate-for-cleanup to true
299204
resourceConfig.hooks.delete.afterSave.push(async ({ record }: { record: any }) => {
300205
if (record[pathColumnName]) {
301-
const s3 = new S3({
302-
credentials: {
303-
accessKeyId: this.options.s3AccessKeyId,
304-
secretAccessKey: this.options.s3SecretAccessKey,
305-
},
306-
307-
region: this.options.s3Region,
308-
});
309-
310206
try {
311-
await s3.putObjectTagging({
312-
Bucket: this.options.s3Bucket,
313-
Key: record[pathColumnName],
314-
Tagging: {
315-
TagSet: [
316-
{
317-
Key: ADMINFORTH_NOT_YET_USED_TAG,
318-
Value: 'true'
319-
}
320-
]
321-
}
322-
});
207+
await this.options.storage.adapter.markKeyForDeletation(record[pathColumnName]);
323208
} catch (e) {
324209
// file might be e.g. already deleted, so we catch error
325210
console.error(`Error setting tag ${ADMINFORTH_NOT_YET_USED_TAG} to true for object ${record[pathColumnName]}. File will not be auto-cleaned up`, e);
@@ -345,30 +230,10 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
345230
resourceConfig.hooks.edit.afterSave.push(async ({ updates, oldRecord }: { updates: any, oldRecord: any }) => {
346231

347232
if (updates[virtualColumn.name] || updates[virtualColumn.name] === null) {
348-
const s3 = new S3({
349-
credentials: {
350-
accessKeyId: this.options.s3AccessKeyId,
351-
secretAccessKey: this.options.s3SecretAccessKey,
352-
},
353-
354-
region: this.options.s3Region,
355-
});
356-
357233
if (oldRecord[pathColumnName]) {
358234
// put tag to delete old file
359235
try {
360-
await s3.putObjectTagging({
361-
Bucket: this.options.s3Bucket,
362-
Key: oldRecord[pathColumnName],
363-
Tagging: {
364-
TagSet: [
365-
{
366-
Key: ADMINFORTH_NOT_YET_USED_TAG,
367-
Value: 'true'
368-
}
369-
]
370-
}
371-
});
236+
await this.options.storage.adapter.markKeyForDeletation(oldRecord[pathColumnName]);
372237
} catch (e) {
373238
// file might be e.g. already deleted, so we catch error
374239
console.error(`Error setting tag ${ADMINFORTH_NOT_YET_USED_TAG} to true for object ${oldRecord[pathColumnName]}. File will not be auto-cleaned up`, e);
@@ -377,13 +242,7 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
377242
if (updates[virtualColumn.name] !== null) {
378243
// remove tag from new file
379244
// in this case we let it crash if it fails: this is a new file which just was uploaded.
380-
await s3.putObjectTagging({
381-
Bucket: this.options.s3Bucket,
382-
Key: updates[pathColumnName],
383-
Tagging: {
384-
TagSet: []
385-
}
386-
});
245+
await this.options.storage.adapter.markKeyForNotDeletation(updates[pathColumnName]);
387246
}
388247
}
389248
return { ok: true };
@@ -414,7 +273,7 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
414273

415274
server.endpoint({
416275
method: 'POST',
417-
path: `/plugin/${this.pluginInstanceId}/get_s3_upload_url`,
276+
path: `/plugin/${this.pluginInstanceId}/get_file_upload_url`,
418277
handler: async ({ body }) => {
419278
const { originalFilename, contentType, size, originalExtension, recordPk } = body;
420279

@@ -433,49 +292,23 @@ getBucketLifecycleConfiguration on bucket ${this.options.s3Bucket} in region ${t
433292
)
434293
}
435294

436-
const s3Path: string = this.options.s3Path({ originalFilename, originalExtension, contentType, record });
437-
if (s3Path.startsWith('/')) {
295+
const filePath: string = this.options.filePath({ originalFilename, originalExtension, contentType, record });
296+
if (filePath.startsWith('/')) {
438297
throw new Error('s3Path should not start with /, please adjust s3path function to not return / at the start of the path');
439298
}
440-
const s3 = new S3({
441-
credentials: {
442-
accessKeyId: this.options.s3AccessKeyId,
443-
secretAccessKey: this.options.s3SecretAccessKey,
444-
},
445-
446-
region: this.options.s3Region,
447-
});
448-
449299
const tagline = `${ADMINFORTH_NOT_YET_USED_TAG}=true`;
450-
const params = {
451-
Bucket: this.options.s3Bucket,
452-
Key: s3Path,
453-
ContentType: contentType,
454-
ACL: (this.options.s3ACL || 'private') as ObjectCannedACL,
455-
Tagging: tagline,
456-
};
457-
458-
const uploadUrl = await await getSignedUrl(s3, new PutObjectCommand(params), {
459-
expiresIn: 1800,
460-
unhoistableHeaders: new Set(['x-amz-tagging']),
461-
});
462-
300+
const { uploadUrl, uploadExtraParams } = await this.options.storage.adapter.getUploadSignedUrl(filePath, contentType, 1800);
463301
let previewUrl;
464302
if (this.options.preview?.previewUrl) {
465-
previewUrl = this.options.preview.previewUrl({ s3Path });
466-
} else if (this.options.s3ACL === 'public-read') {
467-
previewUrl = `https://${this.options.s3Bucket}.s3.${this.options.s3Region}.amazonaws.com/${s3Path}`;
303+
previewUrl = this.options.preview.previewUrl({ filePath });
468304
} else {
469-
previewUrl = await getSignedUrl(s3, new GetObjectCommand({
470-
Bucket: this.options.s3Bucket,
471-
Key: s3Path,
472-
}));
305+
previewUrl = await this.options.storage.adapter.getDownloadUrl(filePath, 1800);
473306
}
474307

475308
return {
476309
uploadUrl,
477-
s3Path,
478-
tagline,
310+
filePath,
311+
uploadExtraParams,
479312
previewUrl,
480313
};
481314
}

‎types.ts

Copy file name to clipboardExpand all lines: types.ts
+11-29Lines changed: 11 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { AdminUser, ImageGenerationAdapter } from "adminforth";
1+
import { AdminUser, ImageGenerationAdapter, StorageAdapter } from "adminforth";
22

33
export type PluginOptions = {
44

@@ -18,32 +18,6 @@ export type PluginOptions = {
1818
*/
1919
maxFileSize?: number;
2020

21-
/**
22-
* S3 bucket name where we will upload the files, e.g. 'my-bucket'
23-
*/
24-
s3Bucket: string,
25-
26-
/**
27-
* S3 region, e.g. 'us-east-1'
28-
*/
29-
s3Region: string,
30-
31-
/**
32-
* S3 access key id
33-
*/
34-
s3AccessKeyId: string,
35-
36-
/**
37-
* S3 secret access key
38-
*/
39-
s3SecretAccessKey: string,
40-
41-
/**
42-
* ACL which will be set to uploaded file, e.g. 'public-read'.
43-
* If you want to use 'public-read', it is your responsibility to set the "ACL Enabled" to true in the S3 bucket policy and Uncheck "Block all public access" in the bucket settings.
44-
*/
45-
s3ACL?: string,
46-
4721
/**
4822
* The path where the file will be uploaded to the S3 bucket, same path will be stored in the database
4923
* in the column specified in {@link pathColumnName}
@@ -55,7 +29,7 @@ export type PluginOptions = {
5529
* ```
5630
*
5731
*/
58-
s3Path: ({originalFilename, originalExtension, contentType, record }: {
32+
filePath: ({originalFilename, originalExtension, contentType, record }: {
5933
originalFilename: string,
6034
originalExtension: string,
6135
contentType: string,
@@ -113,7 +87,7 @@ export type PluginOptions = {
11387
* ```
11488
*
11589
*/
116-
previewUrl?: ({s3Path}) => string,
90+
previewUrl?: ({filePath}) => string,
11791
}
11892

11993

@@ -181,4 +155,12 @@ export type PluginOptions = {
181155

182156
}
183157

158+
storage?: {
159+
/**
160+
* The adapter used to store the files.
161+
* For now only S3 adapter is supported.
162+
*/
163+
adapter: StorageAdapter,
164+
}
165+
184166
}

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.