feat(s3): implement S3Uploader for file uploads

- Introduced S3Uploader class to handle file uploads to S3 with support for custom domains and automatic endpoint generation.
- Refactored BackupService to utilize S3Uploader for uploading backup files, enhancing code clarity and maintainability.
- Added unit tests for S3Uploader to ensure functionality and reliability.

Signed-off-by: Innei <tukon479@gmail.com>
This commit is contained in:
Innei
2025-05-18 23:31:51 +08:00
parent 4336e0fb97
commit c52759181b
5 changed files with 337 additions and 1198 deletions

View File

@@ -50,7 +50,6 @@
"dependencies": {
"@algolia/client-search": "^4.22.1",
"@antfu/install-pkg": "1.1.0",
"@aws-sdk/client-s3": "3.802.0",
"@babel/core": "7.27.1",
"@babel/plugin-transform-modules-commonjs": "7.27.1",
"@babel/plugin-transform-typescript": "7.27.1",

View File

@@ -4,7 +4,6 @@ import path, { join, resolve } from 'node:path'
import { flatten } from 'lodash'
import { mkdirp } from 'mkdirp'
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3'
import { $, cd } from '@mx-space/compiled'
import {
BadRequestException,
@@ -27,6 +26,7 @@ import { BACKUP_DIR, DATA_DIR } from '~/constants/path.constant'
import { migrateDatabase } from '~/migration/migrate'
import { EventManagerService } from '~/processors/helper/helper.event.service'
import { RedisService } from '~/processors/redis/redis.service'
import { S3Uploader } from '~/utils/s3.util'
import { scheduleManager } from '~/utils/schedule.util'
import { getFolderSize, installPKG } from '~/utils/system.util'
import { getMediumDateTime } from '~/utils/time.util'
@@ -302,28 +302,21 @@ export class BackupService {
return
}
const s3 = new S3Client({
const s3 = new S3Uploader({
bucket,
region,
accessKey: secretId,
secretKey,
endpoint,
credentials: {
accessKeyId: secretId,
secretAccessKey: secretKey,
},
})
const remoteFileKey = backup.path.slice(backup.path.lastIndexOf('/') + 1)
const command = new PutObjectCommand({
Bucket: bucket,
Key: remoteFileKey,
Body: backup.buffer,
ContentType: 'application/zip',
})
this.logger.log('--> 开始上传到 S3')
await s3.send(command).catch((error) => {
await s3.uploadFile(backup.buffer, remoteFileKey).catch((error) => {
this.logger.error('--> 上传失败了')
throw error
})
this.logger.log('--> 上传成功')
})
}

View File

@@ -0,0 +1,164 @@
import * as crypto from 'node:crypto'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type { S3UploaderOptions } from './s3.util'
import { S3Uploader } from './s3.util'
// Mock fetch
global.fetch = vi.fn()
describe('S3Uploader', () => {
let uploader: S3Uploader
let options: S3UploaderOptions
let mockBuffer: Buffer
beforeEach(() => {
// Setup test data
options = {
bucket: 'test-bucket',
region: 'us-east-1',
accessKey: 'test-access-key',
secretKey: 'test-secret-key',
endpoint: 'https://test-endpoint.com',
}
uploader = new S3Uploader(options)
mockBuffer = Buffer.from('test-file-content')
// Reset and setup fetch mock
vi.mocked(fetch).mockReset()
vi.mocked(fetch).mockResolvedValue({
ok: true,
status: 200,
} as Response)
})
describe('constructor and getters', () => {
it('should initialize with provided options', () => {
expect(uploader.bucket).toBe(options.bucket)
expect(uploader.region).toBe(options.region)
expect(uploader.accessKey).toBe(options.accessKey)
expect(uploader.secretKey).toBe(options.secretKey)
expect(uploader.endpoint).toBe(options.endpoint)
})
it('should generate endpoint when not provided', () => {
const noEndpointOptions = { ...options, endpoint: undefined }
const noEndpointUploader = new S3Uploader(noEndpointOptions)
expect(noEndpointUploader.endpoint).toBe(
`https://${options.bucket}.s3.${options.region}.amazonaws.com`,
)
})
})
describe('setters', () => {
it('should update options', () => {
const newOptions: S3UploaderOptions = {
bucket: 'new-bucket',
region: 'eu-west-1',
accessKey: 'new-access-key',
secretKey: 'new-secret-key',
}
uploader.setOptions(newOptions)
expect(uploader.bucket).toBe(newOptions.bucket)
expect(uploader.region).toBe(newOptions.region)
expect(uploader.accessKey).toBe(newOptions.accessKey)
expect(uploader.secretKey).toBe(newOptions.secretKey)
})
it('should update custom domain', () => {
const domain = 'https://cdn.example.com'
uploader.setCustomDomain(domain)
vi.spyOn(crypto, 'createHash').mockImplementation(() => {
return {
update: () => ({
digest: () => 'mock-hash',
}),
} as any
})
return uploader.uploadImage(mockBuffer, 'images').then((url) => {
expect(url).toBe(`${domain}/images/mock-hash.png`)
})
})
})
describe('uploadImage', () => {
it('should upload image and return URL', async () => {
// Mock the crypto hash function
vi.spyOn(crypto, 'createHash').mockImplementation(() => {
return {
update: () => ({
digest: () => 'mock-hash',
}),
} as any
})
const result = await uploader.uploadImage(mockBuffer, 'images')
expect(result).toBe('images/images/mock-hash.png')
expect(fetch).toHaveBeenCalledTimes(1)
expect(fetch).toHaveBeenCalledWith(
expect.stringContaining('/test-bucket/images/mock-hash.png'),
expect.objectContaining({
method: 'PUT',
headers: expect.objectContaining({
'Content-Type': 'image/png',
}),
body: mockBuffer,
}),
)
})
})
describe('uploadFile', () => {
it('should upload a file and return URL', async () => {
// Mock the crypto hash function
vi.spyOn(crypto, 'createHash').mockImplementation(() => {
return {
update: () => ({
digest: () => 'mock-hash',
}),
} as any
})
const result = await uploader.uploadFile(mockBuffer, 'files/document.pdf')
expect(result).toBe('files/document.pdf/files/document.pdf/mock-hash.pdf')
expect(fetch).toHaveBeenCalledTimes(1)
expect(fetch).toHaveBeenCalledWith(
expect.stringContaining(
'/test-bucket/files/document.pdf/mock-hash.pdf',
),
expect.objectContaining({
method: 'PUT',
headers: expect.objectContaining({
'Content-Type': 'application/octet-stream',
}),
body: mockBuffer,
}),
)
})
})
describe('uploadToS3', () => {
it('should throw error when upload fails', async () => {
vi.mocked(fetch).mockResolvedValueOnce({
ok: false,
status: 403,
} as Response)
await expect(
uploader.uploadToS3('test-object', mockBuffer, 'text/plain'),
).rejects.toThrow('Upload failed with status code: 403')
})
it('should successfully upload to S3', async () => {
await uploader.uploadToS3('test-object', mockBuffer, 'text/plain')
expect(fetch).toHaveBeenCalledTimes(1)
expect(fetch).toHaveBeenCalledWith(
expect.stringContaining('/test-bucket/test-object'),
expect.objectContaining({
method: 'PUT',
body: mockBuffer,
}),
)
})
})
})

View File

@@ -0,0 +1,166 @@
import * as crypto from 'node:crypto'
import { extname } from 'node:path'
export interface S3UploaderOptions {
bucket: string
region: string
accessKey: string
secretKey: string
endpoint?: string
}
export class S3Uploader {
private options: S3UploaderOptions
private customDomain: string = ''
constructor(options: S3UploaderOptions) {
this.options = options
}
get endpoint(): string {
return (
this.options.endpoint ||
`https://${this.options.bucket}.s3.${this.options.region}.amazonaws.com`
)
}
get bucket(): string {
return this.options.bucket
}
get region(): string {
return this.options.region
}
get accessKey(): string {
return this.options.accessKey
}
get secretKey(): string {
return this.options.secretKey
}
setOptions(options: S3UploaderOptions): void {
this.options = options
}
setCustomDomain(domain: string): void {
this.customDomain = domain
}
// Helper function to calculate HMAC-SHA256
private hmacSha256(key: Buffer, message: string): Buffer {
return crypto.createHmac('sha256', key).update(message).digest()
}
async uploadImage(imageData: Buffer, path: string): Promise<string> {
const md5Filename = crypto.createHash('md5').update(imageData).digest('hex')
const objectKey = `${path}/${md5Filename}.png`
await this.uploadToS3(objectKey, imageData, 'image/png')
if (this.customDomain && this.customDomain.length > 0) {
return `${this.customDomain}/${objectKey}`
}
return `${path}/${objectKey}`
}
async uploadFile(fileData: Buffer, path: string): Promise<string> {
const md5Filename = crypto.createHash('md5').update(fileData).digest('hex')
const ext = extname(path)
const objectKey = `${path}/${md5Filename}${ext}`
await this.uploadToS3(objectKey, fileData, 'application/octet-stream')
return `${path}/${objectKey}`
}
// Generic S3-compatible storage upload function
async uploadToS3(
objectKey: string,
fileData: Buffer,
contentType: string,
): Promise<void> {
const service = 's3'
const date = new Date()
const xAmzDate = date.toISOString().replaceAll(/[:-]|\.\d{3}/g, '')
const dateStamp = xAmzDate.slice(0, 8) // YYYYMMDD
// Calculate hashed payload
const hashedPayload = crypto
.createHash('sha256')
.update(fileData)
.digest('hex')
// Set request headers
const url = new URL(this.endpoint)
const host = url.host
const contentLength = fileData.length.toString()
const headers: Record<string, string> = {
Host: host,
'Content-Type': contentType,
'Content-Length': contentLength,
'x-amz-date': xAmzDate,
'x-amz-content-sha256': hashedPayload,
}
// Create canonical request
const sortedHeaders = Object.keys(headers).sort()
const canonicalHeaders = sortedHeaders
.map((key) => `${key.toLowerCase()}:${headers[key].trim()}`)
.join('\n')
const signedHeaders = sortedHeaders
.map((key) => key.toLowerCase())
.join(';')
const canonicalRequest = [
'PUT',
`/${this.bucket}/${objectKey}`,
'', // No query parameters
String(canonicalHeaders),
'', // Extra newline
signedHeaders,
hashedPayload,
].join('\n')
// Create string to sign
const algorithm = 'AWS4-HMAC-SHA256'
const credentialScope = `${dateStamp}/${this.region}/${service}/aws4_request`
const hashedCanonicalRequest = crypto
.createHash('sha256')
.update(canonicalRequest)
.digest('hex')
const stringToSign = [
algorithm,
xAmzDate,
credentialScope,
hashedCanonicalRequest,
].join('\n')
// Calculate signature
const kSecret = Buffer.from(`AWS4${this.secretKey}`)
const kDate = this.hmacSha256(kSecret, dateStamp)
const kRegion = this.hmacSha256(kDate, this.region)
const kService = this.hmacSha256(kRegion, service)
const kSigning = this.hmacSha256(kService, 'aws4_request')
const signature = this.hmacSha256(kSigning, stringToSign).toString('hex')
// Assemble Authorization header
const authorization = `${algorithm} Credential=${this.accessKey}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`
// Create and send PUT request
const requestUrl = `${this.endpoint}/${this.bucket}/${objectKey}`
const response = await fetch(requestUrl, {
method: 'PUT',
headers: {
...headers,
Authorization: authorization,
},
body: fileData,
})
if (!response.ok) {
throw new Error(`Upload failed with status code: ${response.status}`)
}
}
}

1183
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff