refactor: use aws-sdk-s3 v3

This commit is contained in:
liqingwei 2021-03-28 11:26:01 +08:00
parent 4ae858c82b
commit 8b18706161
14 changed files with 943 additions and 874 deletions

View File

@ -48,7 +48,7 @@ STORE_TYPE=MINIO
STORE_ACCESS_KEY=
STORE_SECRET_KEY=
STORE_BUCKET=notea
STORE_END_POINT=
STORE_END_POINT=http://localhost:9000
```
### Amazon S3
@ -72,7 +72,8 @@ STORE_TYPE=OSS
STORE_ACCESS_KEY=
STORE_SECRET_KEY=
STORE_BUCKET=notea
STORE_END_POINT=oss-cn-hangzhou.aliyuncs.com
STORE_END_POINT=http://oss-cn-hangzhou.aliyuncs.com
STORE_REGION=oss-cn-hangzhou
```
## Environment variables
@ -80,7 +81,7 @@ STORE_END_POINT=oss-cn-hangzhou.aliyuncs.com
| Name | Description | Default | Optional | Required |
| ---------------- | ---------------------------- | --------- | --------------------- | -------- |
| PASSWORD | password to login to the app | | | true |
| STORE_TYPE | storage method | | `MINIO`, `OSS`, `AWS` | true |
| STORE_TYPE | storage service | | `MINIO`, `OSS`, `AWS` | true |
| STORE_ACCESS_KEY | accessKey | | | true |
| STORE_SECRET_KEY | secretKey | | | true |
| STORE_BUCKET | bucket | | | true |

View File

@ -8,14 +8,14 @@ import {
} from 'libs/shared/meta'
export function jsonToMeta(meta?: Record<string, string | undefined>) {
const metaData: Map<string, string> = new Map()
const metaData: Record<string, string> = {}
if (meta) {
PAGE_META_KEY.forEach((key) => {
const value = meta[key]
if (value) {
metaData.set(key, strCompress(value.toString()))
metaData[key] = strCompress(value.toString())
}
})
}
@ -23,12 +23,12 @@ export function jsonToMeta(meta?: Record<string, string | undefined>) {
return metaData
}
export function metaToJson(metaData?: Map<string, string>) {
export function metaToJson(metaData?: Record<string, string>) {
const meta: Record<string, any> = {}
if (metaData) {
PAGE_META_KEY.forEach((key) => {
const value = metaData.get(key)
const value = metaData[key]
if (!isNil(value)) {
const strValue = strDecompress(value) || undefined

View File

@ -4,21 +4,6 @@ const runtimeCaching = require('next-pwa/cache')
module.exports = withPWA({
target: process.env.NETLIFY ? 'serverless' : 'server',
headers() {
return [
{
source: '/api/file/:file*',
headers: [
{
key: 'Cache-Control',
value:
'public, max-age=31536000, s-maxage=31536000, stale-while-revalidate=31536000',
},
],
},
]
},
webpack(config, { defaultLoaders }) {
config.module.rules.push({
test: /\.jsx/,

View File

@ -11,12 +11,14 @@ export function toBuffer(raw: unknown, compressed = false): Buffer {
}
export function toStr(
buffer?: Buffer,
bufferOrString?: Buffer | string,
deCompressed = false
): string | undefined {
if (!buffer) return
if (!bufferOrString) return
const str = buffer.toString()
const str = Buffer.isBuffer(bufferOrString)
? bufferOrString.toString()
: bufferOrString
return deCompressed ? strDecompress(str) : str
}

View File

@ -4,7 +4,8 @@
"main": "src/index.ts",
"license": "MIT",
"dependencies": {
"awos-js": "^2.0.3"
"@aws-sdk/client-s3": "^3.10.0",
"@aws-sdk/s3-request-presigner": "^3.10.0"
},
"devDependencies": {}
}

View File

@ -1,4 +1,5 @@
import { getEnv } from '@notea/shared'
import { URL } from 'url'
import { StoreS3 } from './providers/s3'
export type StroageType = 'OSS' | 'MINIO' | 'AWS'
@ -8,18 +9,28 @@ export function createStore(
type = getEnv<StroageType>('STORE_TYPE')
) {
switch (type) {
case 'OSS':
case 'OSS': {
let endPoint = getEnv(
'STORE_END_POINT',
'http://oss-cn-hangzhou.aliyuncs.com'
) as string
if (!/:\/\//.test(endPoint)) {
endPoint = `http://${endPoint}`
}
const url = new URL(endPoint)
return new StoreS3({
type: 'oss',
accessKey: getEnv('STORE_ACCESS_KEY'),
secretKey: getEnv('STORE_SECRET_KEY'),
endPoint: getEnv('STORE_END_POINT'),
endPoint,
bucket: getEnv('STORE_BUCKET', 'notea'),
region: getEnv('STORE_REGION', url.host.split('.')[0]),
prefix,
})
}
case 'AWS':
return new StoreS3({
type: 'aws',
accessKey: getEnv('STORE_ACCESS_KEY'),
secretKey: getEnv('STORE_SECRET_KEY'),
bucket: getEnv('STORE_BUCKET', 'notea'),
@ -29,11 +40,11 @@ export function createStore(
case 'MINIO':
default:
return new StoreS3({
type: 'aws',
accessKey: getEnv('STORE_ACCESS_KEY'),
secretKey: getEnv('STORE_SECRET_KEY'),
endPoint: getEnv('STORE_END_POINT', 'http://localhost:9000'),
bucket: getEnv('STORE_BUCKET', 'notea'),
region: getEnv('STORE_REGION', 'us-east-1'),
prefix,
pathStyle: true,
})

View File

@ -3,7 +3,7 @@ export interface StoreProviderConfig {
}
export interface ObjectOptions {
meta?: Map<string, any>
meta?: { [key: string]: string }
contentType?: string
headers?: {
cacheControl?: string
@ -46,7 +46,9 @@ export abstract class StoreProvider {
* Meta
* @returns meta
*/
abstract getObjectMeta(path: string): Promise<Map<string, string> | undefined>
abstract getObjectMeta(
path: string
): Promise<{ [key: string]: string } | undefined>
/**
* Meta
@ -54,11 +56,10 @@ export abstract class StoreProvider {
*/
abstract getObjectAndMeta(
path: string,
metaKeys: string[],
isCompressed?: boolean
): Promise<{
content?: string
meta?: Map<string, string>
meta?: { [key: string]: string }
}>
/**

View File

@ -1,53 +1,67 @@
import { ObjectOptions, StoreProvider, StoreProviderConfig } from './base'
import { toBuffer, toStr } from '@notea/shared'
import { Client } from 'awos-js'
import { isBuffer } from 'lodash'
import {
CopyObjectCommand,
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
S3Client,
} from '@aws-sdk/client-s3'
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
import { streamToString } from '../utils'
import { Readable } from 'stream'
import { isEmpty } from 'lodash'
/**
* @todo unit test
*/
export interface S3Config extends StoreProviderConfig {
bucket: string
accessKey: string
secretKey: string
type: 'oss' | 'aws'
endPoint?: string
pathStyle?: boolean
region?: string
}
export class StoreS3 extends StoreProvider {
store: Client
client: S3Client
config: S3Config
constructor(config: S3Config) {
super(config)
this.store = new Client({
type: config.type,
ossOptions: {
accessKeyId: config.accessKey,
accessKeySecret: config.secretKey,
endpoint: config.endPoint as string,
bucket: config.bucket,
},
awsOptions: {
this.client = new S3Client({
forcePathStyle: config.pathStyle,
region: config.region,
endpoint: config.endPoint,
credentials: {
accessKeyId: config.accessKey,
secretAccessKey: config.secretKey,
endpoint: config.endPoint as string,
bucket: config.bucket,
s3ForcePathStyle: config.pathStyle,
region: config.region,
},
})
this.config = config
}
async getSignUrl(path: string, expires = 600) {
return this.store.signatureUrl(this.getPath(path), {
expires,
})
getSignUrl(path: string, expires = 600) {
return getSignedUrl(
this.client,
new GetObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
}),
{ expiresIn: expires }
)
}
async hasObject(path: string) {
try {
const data = await this.store.head(this.getPath(path))
const data = await this.client.send(
new HeadObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
})
)
return !!data
} catch (e) {
@ -59,8 +73,13 @@ export class StoreS3 extends StoreProvider {
let content
try {
const result = await this.store.getAsBuffer(this.getPath(path))
content = result?.content
const result = await this.client.send(
new GetObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
})
)
content = await streamToString(result.Body as Readable)
} catch (err) {
if (err.code !== 'NoSuchKey') {
throw err
@ -72,8 +91,13 @@ export class StoreS3 extends StoreProvider {
async getObjectMeta(path: string) {
try {
const result = await this.store.head(this.getPath(path))
return result || undefined
const result = await this.client.send(
new HeadObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
})
)
return result.Metadata
} catch (err) {
if (err.code !== 'NoSuchKey') {
throw err
@ -82,18 +106,19 @@ export class StoreS3 extends StoreProvider {
}
}
async getObjectAndMeta(
path: string,
metaKeys: string[],
isCompressed = false
) {
async getObjectAndMeta(path: string, isCompressed = false) {
let content
let meta
try {
const result = await this.store.getAsBuffer(this.getPath(path), metaKeys)
content = result?.content
meta = result?.meta
const result = await this.client.send(
new GetObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
})
)
content = await streamToString(result.Body as Readable)
meta = result.Metadata
} catch (err) {
if (err.code !== 'NoSuchKey') {
throw err
@ -109,18 +134,42 @@ export class StoreS3 extends StoreProvider {
options?: ObjectOptions,
isCompressed?: boolean
) {
await this.store.put(
this.getPath(path),
isBuffer(raw) ? raw : toBuffer(raw, isCompressed),
options
await this.client.send(
new PutObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
Body: Buffer.isBuffer(raw) ? raw : toBuffer(raw, isCompressed),
Metadata: options?.meta,
CacheControl: options?.headers?.cacheControl,
ContentDisposition: options?.headers?.contentDisposition,
ContentEncoding: options?.headers?.contentEncoding,
ContentType: options?.contentType,
})
)
}
async deleteObject(path: string) {
await this.store.del(this.getPath(path))
await this.client.send(
new DeleteObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(path),
})
)
}
async copyObject(fromPath: string, toPath: string, options: ObjectOptions) {
await this.store.copy(this.getPath(toPath), this.getPath(fromPath), options)
await this.client.send(
new CopyObjectCommand({
Bucket: this.config.bucket,
Key: this.getPath(toPath),
CopySource: `${this.config.bucket}/${this.getPath(fromPath)}`,
Metadata: options?.meta,
CacheControl: options?.headers?.cacheControl,
ContentDisposition: options?.headers?.contentDisposition,
ContentEncoding: options?.headers?.contentEncoding,
ContentType: options?.contentType,
MetadataDirective: isEmpty(options?.meta) ? 'COPY' : 'REPLACE',
})
)
}
}

View File

@ -0,0 +1,12 @@
import { Readable } from 'stream'
// Apparently the stream parameter should be of type Readable|ReadableStream|Blob
// The latter 2 don't seem to exist anywhere.
export async function streamToString(stream: Readable): Promise<string> {
return await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = []
stream.on('data', (chunk) => chunks.push(chunk))
stream.on('error', reject)
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')))
})
}

View File

@ -8,13 +8,16 @@ export const config = {
},
}
// On aliyun `X-Amz-Expires` must be less than 604800 seconds
const expires = 604800 - 1
export default api()
.use(useStore)
.get(async (req, res) => {
if (req.query.file) {
const signUrl = await req.store.getSignUrl(
getPathFileByName((req.query.file as string[]).join('/')),
31536000
expires
)
if (signUrl) {
@ -23,5 +26,10 @@ export default api()
}
}
res.setHeader(
'Cache-Control',
`public, max-age=${expires}, s-maxage=${expires}, stale-while-revalidate=${expires}`
)
res.redirect('/404')
})

View File

@ -4,7 +4,6 @@ import { metaToJson } from 'libs/server/meta'
import { useAuth } from 'libs/server/middlewares/auth'
import { useStore } from 'libs/server/middlewares/store'
import { getPathNoteById } from 'libs/server/note-path'
import { PAGE_META_KEY } from 'libs/shared/meta'
import { NoteModel } from 'libs/web/state/note'
import { StoreProvider } from 'packages/store/src'
import { API } from 'libs/server/middlewares/error'
@ -13,10 +12,7 @@ export async function getNote(
store: StoreProvider,
id: string
): Promise<NoteModel> {
const { content, meta } = await store.getObjectAndMeta(
getPathNoteById(id),
PAGE_META_KEY
)
const { content, meta } = await store.getObjectAndMeta(getPathNoteById(id))
if (!content && !meta) {
throw API.NOT_FOUND.throw()
@ -65,7 +61,7 @@ export default api()
const oldMeta = await req.store.getObjectMeta(notePath)
if (oldMeta) {
oldMeta.set('date', strCompress(new Date().toISOString()))
oldMeta['date'] = strCompress(new Date().toISOString())
}
await req.store.putObject(notePath, content, {

View File

@ -19,7 +19,7 @@ export default api()
})
if (oldMeta) {
meta = new Map([...oldMeta, ...meta])
meta = { ...oldMeta, ...meta }
// 处理删除情况
const { deleted } = req.body

View File

@ -48,7 +48,7 @@ async function restoreNote(req: ApiRequest, id: string, parentId = 'root') {
deleted: NOTE_DELETED.NORMAL.toString(),
})
if (oldMeta) {
meta = new Map([...oldMeta, ...meta])
meta = { ...oldMeta, ...meta }
}
await req.store.copyObject(notePath, notePath, {

1583
yarn.lock

File diff suppressed because it is too large Load Diff