feat: rollback to use S3 client for now.

This commit is contained in:
Yufan Sheng 2024-06-21 18:17:03 +08:00
parent 910342184b
commit f3623ab452
Signed by: syhily
GPG Key ID: 9D18A22A7DCD5A9B
4 changed files with 1845 additions and 176 deletions

1889
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -56,6 +56,7 @@
}, },
"devDependencies": { "devDependencies": {
"@astrojs/check": "^0.7.0", "@astrojs/check": "^0.7.0",
"@aws-sdk/client-s3": "^3.600.0",
"@biomejs/biome": "^1.8.2", "@biomejs/biome": "^1.8.2",
"@napi-rs/canvas": "^0.1.53", "@napi-rs/canvas": "^0.1.53",
"@types/lodash": "^4.17.5", "@types/lodash": "^4.17.5",
@ -66,7 +67,7 @@
"@types/unist": "^3.0.2", "@types/unist": "^3.0.2",
"aplayer": "^1.10.1", "aplayer": "^1.10.1",
"bootstrap": "^5.3.3", "bootstrap": "^5.3.3",
"opendal": "^0.46.1", "mime": "^4.0.3",
"prettier": "^3.3.2", "prettier": "^3.3.2",
"prettier-plugin-astro": "^0.14.0", "prettier-plugin-astro": "^0.14.0",
"prettier-plugin-astro-organize-imports": "^0.4.8", "prettier-plugin-astro-organize-imports": "^0.4.8",

View File

@ -1,8 +1,16 @@
import {
HeadBucketCommand,
HeadObjectCommand,
NoSuchBucket,
NotFound,
PutObjectCommand,
S3Client,
} from '@aws-sdk/client-s3';
import type { AstroIntegration, AstroIntegrationLogger } from 'astro'; import type { AstroIntegration, AstroIntegrationLogger } from 'astro';
import { z } from 'astro/zod'; import { z } from 'astro/zod';
import mime from 'mime';
import fs from 'node:fs'; import fs from 'node:fs';
import path from 'node:path'; import path from 'node:path';
import { Operator } from 'opendal';
import { rimrafSync } from 'rimraf'; import { rimrafSync } from 'rimraf';
const S3Options = z const S3Options = z
@ -23,9 +31,6 @@ const S3Options = z
accessKey: z.string().min(1), accessKey: z.string().min(1),
// The secret access key. // The secret access key.
secretAccessKey: z.string().min(1), secretAccessKey: z.string().min(1),
// The extra options provided by opendal.
// All the methods in https://docs.rs/opendal/latest/opendal/services/struct.S3.html#implementations can be treated as an option.
extraOptions: z.record(z.string(), z.string()).default({}),
}) })
.strict() .strict()
.superRefine((opts, { addIssue }) => { .superRefine((opts, { addIssue }) => {
@ -34,26 +39,9 @@ const S3Options = z
} }
}); });
const parseOptions = (opts: z.input<typeof S3Options>, logger: AstroIntegrationLogger) => { const parseOptions = (opts: z.input<typeof S3Options>, logger: AstroIntegrationLogger): z.infer<typeof S3Options> => {
try { try {
const { paths, bucket, root, accessKey, secretAccessKey, region, endpoint, extraOptions, keep } = return S3Options.parse(opts);
S3Options.parse(opts);
// Create opendal operator.
// The common configurations are listed here https://docs.rs/opendal/latest/opendal/services/struct.S3.html#configuration
const options: Record<string, string> = {
...extraOptions,
root: root,
bucket: bucket,
region: region,
access_key_id: accessKey,
secret_access_key: secretAccessKey,
};
if (endpoint !== undefined) {
options.endpoint = endpoint;
}
return { options, paths, keep };
} catch (err) { } catch (err) {
if (err instanceof z.ZodError) { if (err instanceof z.ZodError) {
logger.error(`Uploader options validation error, there are ${err.issues.length} errors:`); logger.error(`Uploader options validation error, there are ${err.issues.length} errors:`);
@ -66,17 +54,74 @@ const parseOptions = (opts: z.input<typeof S3Options>, logger: AstroIntegrationL
} }
}; };
class Context {
private client: S3Client;
private bucket: string;
private root: string;
constructor(client: S3Client, bucket: string, root: string) {
this.client = client;
this.bucket = bucket;
this.root = root;
}
async isExist(key: string): Promise<boolean> {
const headCmd = new HeadObjectCommand({ Bucket: this.bucket, Key: path.posix.join(this.root, key) });
try {
await this.client.send(headCmd);
return true;
} catch (error) {
if (error instanceof NotFound) {
return false;
}
throw error;
}
}
async write(key: string, body: Buffer) {
const contentType = mime.getType(key);
const putCmd = new PutObjectCommand({
Bucket: this.bucket,
Key: path.posix.join(this.root, key),
Body: body,
ContentType: contentType === null ? undefined : contentType,
});
await this.client.send(putCmd);
}
}
export const uploader = (opts: z.input<typeof S3Options>): AstroIntegration => ({ export const uploader = (opts: z.input<typeof S3Options>): AstroIntegration => ({
name: 'S3 Uploader', name: 'S3 Uploader',
hooks: { hooks: {
'astro:build:done': async ({ dir, logger }: { dir: URL; logger: AstroIntegrationLogger }) => { 'astro:build:done': async ({ dir, logger }: { dir: URL; logger: AstroIntegrationLogger }) => {
const { options, paths, keep } = parseOptions(opts, logger); const { paths, keep, region, endpoint, bucket, root, accessKey, secretAccessKey } = parseOptions(opts, logger);
const operator = new Operator('s3', options); const client = new S3Client({
region: region,
endpoint: endpoint,
credentials: { accessKeyId: accessKey, secretAccessKey: secretAccessKey },
useGlobalEndpoint: endpoint !== undefined && endpoint !== '',
});
logger.info('Try to verify the S3 credentials.');
try {
await client.send(new HeadBucketCommand({ Bucket: bucket }));
} catch (err) {
// If the bucket is not existed.
if (err instanceof NoSuchBucket) {
logger.error(`The bucket ${bucket} isn't existed on the region: ${region} endpoint: ${endpoint}`);
} else {
logger.error(JSON.stringify(err));
}
throw err;
}
logger.info(`Start to upload static files in dir ${paths} to S3 compatible backend.`); logger.info(`Start to upload static files in dir ${paths} to S3 compatible backend.`);
const context = new Context(client, bucket, root);
for (const current of paths) { for (const current of paths) {
await uploadFile(operator, logger, current, dir.pathname); await uploadFile(context, logger, current, dir.pathname);
if (!keep) { if (!keep) {
rimrafSync(path.join(dir.pathname, current)); rimrafSync(path.join(dir.pathname, current));
} }
@ -92,35 +137,29 @@ const normalizePath = (current: string): string => {
return current.includes(path.win32.sep) ? current.split(path.win32.sep).join(path.posix.sep) : current; return current.includes(path.win32.sep) ? current.split(path.win32.sep).join(path.posix.sep) : current;
}; };
const uploadFile = async (operator: Operator, logger: AstroIntegrationLogger, current: string, root: string) => { const uploadFile = async (context: Context, logger: AstroIntegrationLogger, current: string, root: string) => {
const filePath = path.join(root, current); const filePath = path.join(root, current);
const isFile = !fs.statSync(filePath).isDirectory(); const isFile = !fs.statSync(filePath).isDirectory();
const uploadAction = async (key: string) => { const uploadAction = async (key: string) => {
logger.info(`Start to upload file: ${key}`); logger.info(`Start to upload file: ${key}`);
const body = fs.readFileSync(filePath); const body = fs.readFileSync(filePath);
await operator.write(key, body); await context.write(key, body);
}; };
if (isFile) { if (isFile) {
const key = normalizePath(current); const key = normalizePath(current);
try { if (await context.isExist(key)) {
const meta = await operator.stat(key);
if (meta.isFile()) {
logger.info(`${key} exists on backend, skip.`); logger.info(`${key} exists on backend, skip.`);
} else { } else {
await uploadAction(key); await uploadAction(key);
} }
} catch (error) { } else {
await uploadAction(key);
}
return;
}
// Reclusive upload files. // Reclusive upload files.
for (const next of fs.readdirSync(filePath)) { for (const next of fs.readdirSync(filePath)) {
if (next.startsWith('.')) { if (next.startsWith('.')) {
continue; continue;
} }
await uploadFile(operator, logger, path.join(current, next), root); await uploadFile(context, logger, path.join(current, next), root);
}
} }
}; };

View File

@ -1,8 +1,6 @@
import Aplayer from 'aplayer/dist/APlayer.min.js'; import Aplayer from 'aplayer/dist/APlayer.min.js';
import stickySidebar from './sticky-sidebar.js'; import stickySidebar from './sticky-sidebar.js';
const LOGO = '';
// Menu toggle. // Menu toggle.
const menuBody = document.querySelector('.site-aside'); const menuBody = document.querySelector('.site-aside');
document.addEventListener('keydown', (event) => { document.addEventListener('keydown', (event) => {