feat: use s3 api for uploading the static files. (#46)

* chore: bump the dependencies.

* feat: use s3 v3 client instead of the upyun sdk.

* chore: change the schema of biome.
This commit is contained in:
Yufan Sheng 2024-06-20 22:41:40 +08:00 committed by GitHub
parent a6776fe145
commit f2f7cd8fa6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 1955 additions and 385 deletions

View File

@ -7,7 +7,6 @@ COPY . .
ENV ASTRO_TELEMETRY_DISABLED=1
RUN NODE_ENV=development npm install
RUN npm i patch-package && npm exec patch-package
RUN NODE_ENV=production npm run build
FROM base AS runtime

View File

@ -3,7 +3,7 @@ import node from '@astrojs/node';
import { defineConfig, envField } from 'astro/config';
import options from './options';
import { astroImage } from './plugins/images';
import { upyun } from './plugins/upyun';
import { uploader } from './plugins/uploader';
// https://astro.build/config
export default defineConfig({
@ -31,8 +31,12 @@ export default defineConfig({
mdx({
remarkPlugins: [astroImage],
}),
upyun({
path: ['images', 'og', 'cats'],
uploader({
paths: ['images', 'og', 'cats'],
endpoint: process.env.S3_ENDPOINT,
bucket: process.env.S3_BUCKET as string,
accessKey: process.env.S3_ACCESS_KEY as string,
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY as string,
}),
],
adapter: node({

View File

@ -1,8 +1,17 @@
{
"$schema": "https://biomejs.dev/schemas/1.8.0/schema.json",
"formatter": { "enabled": false },
"organizeImports": { "enabled": false },
"linter": { "enabled": true, "rules": { "recommended": true } },
"$schema": "https://biomejs.dev/schemas/1.8.2/schema.json",
"formatter": {
"enabled": false
},
"organizeImports": {
"enabled": false
},
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
},
"vcs": {
"enabled": true,
"clientKind": "git",

2073
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -44,7 +44,7 @@
"@astrojs/mdx": "^3.1.1",
"@astrojs/node": "^8.3.1",
"@astrojs/rss": "^4.0.6",
"astro": "^4.10.3",
"astro": "^4.11.0",
"drizzle-orm": "^0.31.2",
"fuse.js": "^7.0.0",
"lodash": "^4.17.21",
@ -56,15 +56,15 @@
},
"devDependencies": {
"@astrojs/check": "^0.7.0",
"@biomejs/biome": "^1.8.1",
"@aws-sdk/client-s3": "^3.600.0",
"@biomejs/biome": "^1.8.2",
"@napi-rs/canvas": "^0.1.53",
"@types/lodash": "^4.17.5",
"@types/luxon": "^3.4.2",
"@types/node": "^20.14.5",
"@types/node": "^20.14.6",
"@types/pg": "^8.11.6",
"@types/qrcode-svg": "^1.1.4",
"@types/unist": "^3.0.2",
"@types/upyun": "^3.4.3",
"aplayer": "^1.10.1",
"bootstrap": "^5.3.3",
"prettier": "^3.3.2",
@ -75,7 +75,6 @@
"rimraf": "^5.0.7",
"sharp": "^0.33.4",
"typescript": "^5.4.5",
"unist-util-visit": "^5.0.0",
"upyun": "^3.4.6"
"unist-util-visit": "^5.0.0"
}
}

View File

@ -1,22 +0,0 @@
diff --git a/node_modules/astro/dist/content/types-generator.js b/node_modules/astro/dist/content/types-generator.js
index af976bb..b03e214 100644
--- a/node_modules/astro/dist/content/types-generator.js
+++ b/node_modules/astro/dist/content/types-generator.js
@@ -351,8 +351,6 @@ async function writeContentFiles({
collection: ${collectionKey};
data: ${dataType}
};
-`;
- dataTypesStr += `};
`;
}
if (settings.config.experimental.contentCollectionJsonSchema && collectionConfig?.schema) {
@@ -382,6 +380,8 @@ async function writeContentFiles({
);
}
}
+ dataTypesStr += `};
+`;
break;
}
}

106
plugins/uploader.ts Normal file
View File

@ -0,0 +1,106 @@
import { HeadObjectCommand, NotFound, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
import type { Logger } from '@smithy/types';
import type { AstroIntegration, AstroIntegrationLogger } from 'astro';
import { z } from 'astro/zod';
import fs from 'node:fs';
import path from 'node:path';
const S3Options = z.object({
paths: z.array(z.string()).min(1),
region: z.string().min(1).default('us-east-1'),
endpoint: z.string().url().optional(),
bucket: z.string().min(1),
accessKey: z.string().min(1),
secretAccessKey: z.string().min(1),
});
export const uploader = (opts: z.input<typeof S3Options>): AstroIntegration => ({
name: 'S3 Uploader',
hooks: {
'astro:build:done': async ({ dir, logger }: { dir: URL; logger: AstroIntegrationLogger }) => {
const { paths, bucket, accessKey, secretAccessKey, region, endpoint } = S3Options.parse(opts);
// Create S3 Client.
const clientLogger = (): Logger => {
const s3Logger = logger.fork('S3 Client');
return {
// biome-ignore lint/suspicious/noExplicitAny: It's define by external types.
debug: (...content: any[]): void => {
s3Logger.debug(content.join(' '));
},
// biome-ignore lint/suspicious/noExplicitAny: It's define by external types.
info: (...content: any[]): void => {
s3Logger.info(content.join(' '));
},
// biome-ignore lint/suspicious/noExplicitAny: It's define by external types.
warn: (...content: any[]): void => {
s3Logger.warn(content.join(' '));
},
// biome-ignore lint/suspicious/noExplicitAny: It's define by external types.
error: (...content: any[]): void => {
s3Logger.error(content.join(' '));
},
};
};
const client = new S3Client({
region: region,
endpoint: endpoint,
logger: clientLogger(),
credentials: { accessKeyId: accessKey, secretAccessKey: secretAccessKey },
useGlobalEndpoint: endpoint === '' || endpoint === '',
});
logger.info(`Start to upload static files in dir ${paths} to S3 compatible backend.`);
for (const current of paths) {
await uploadFile(client, logger, bucket, current, dir.pathname);
}
logger.info('Upload all the files successfully.');
},
},
});
// Change the windows path into the unix path.
const normalizePath = (current: string): string => {
return current.includes(path.win32.sep) ? current.split(path.win32.sep).join(path.posix.sep) : current;
};
const uploadFile = async (
client: S3Client,
logger: AstroIntegrationLogger,
bucket: string,
current: string,
root: string,
) => {
const filePath = path.join(root, current);
const isFile = !fs.statSync(filePath).isDirectory();
if (isFile) {
const key = normalizePath(current);
const headCmd = new HeadObjectCommand({ Bucket: bucket, Key: key });
try {
await client.send(headCmd);
logger.info(`${key} exists on backend, skip.`);
} catch (error) {
if (error instanceof NotFound) {
logger.info(`Start to upload file: ${key}`);
const body = fs.readFileSync(filePath);
const putCmd = new PutObjectCommand({ Bucket: bucket, Key: key, Body: body });
await client.send(putCmd);
} else {
throw error;
}
}
return;
}
// Reclusive upload files.
for (const next of fs.readdirSync(filePath)) {
if (next.startsWith('.')) {
continue;
}
await uploadFile(client, logger, bucket, path.join(current, next), root);
}
};

View File

@ -1,93 +0,0 @@
import type { AstroIntegration, AstroIntegrationLogger, RouteData } from 'astro';
import fs from 'node:fs';
import path from 'node:path';
import { rimrafSync } from 'rimraf';
import up from 'upyun';
export type UpyunOption = {
path: string[];
bucket?: string;
operator?: string;
password?: string;
};
const defaultOption: UpyunOption = {
path: ['images'],
bucket: process.env.UPYUN_BUCKET,
operator: process.env.UPYUN_OPERATOR,
password: process.env.UPYUN_PASSWORD,
};
export const upyun = (opt: UpyunOption): AstroIntegration => ({
name: 'upyun',
hooks: {
'astro:build:done': async ({ dir, logger }: { dir: URL; routes: RouteData[]; logger: AstroIntegrationLogger }) => {
const option: UpyunOption = { ...defaultOption, ...opt };
if (typeof option.bucket === 'undefined' || opt.bucket === null) {
logger.error('No "bucket" found on your configuration, skip deploying.');
return;
}
if (typeof option.operator === 'undefined' || opt.operator === null) {
logger.error('No "operator" found on your configuration, skip deploying.');
return;
}
if (typeof option.password === 'undefined' || opt.password === null) {
logger.error('No "password" found on your configuration, skip deploying.');
return;
}
if (option.path.length === 0) {
logger.warn('No files need to be upload to upyun. Skip.');
return;
}
// Create UPYUN Client
const service = new up.Service(option.bucket, option.operator, option.password);
const client = new up.Client(service);
// Upload one by one
const staticRootPath = dir.pathname;
for (const dir of option.path) {
logger.info(`Start to upload the ${dir} to upyun`);
await uploadFile(logger, client, staticRootPath, dir);
rimrafSync(path.join(staticRootPath, dir));
}
},
},
});
const normalizePath = (p: string): string => {
return p.includes(path.win32.sep) ? p.split(path.win32.sep).join(path.posix.sep) : p;
};
const uploadFile = async (logger: AstroIntegrationLogger, client: up.Client, root: string, current: string) => {
const fullPath = path.join(root, current);
const isDir = fs.statSync(fullPath).isDirectory();
// Visit file.
if (!isDir) {
const filePath = normalizePath(current);
const res1 = await client.headFile(filePath);
if (res1 === false) {
// This file need to be uploaded to upyun.
// Try Create directory first.
const newDir = filePath.substring(0, filePath.lastIndexOf(path.posix.sep));
const res2 = await client.headFile(newDir);
if (res2 === false) {
logger.info(`Try to create ${newDir} on upyun`);
await client.makeDir(newDir);
}
// Upload file.
logger.info(`Try to upload file ${filePath} to upyun`);
await client.putFile(filePath, fs.readFileSync(fullPath));
} else {
logger.info(`The file ${filePath} is existed on upyun. Skip by default.`);
}
return;
}
for (const item of fs.readdirSync(fullPath)) {
await uploadFile(logger, client, root, path.join(current, item));
}
};

View File

@ -1,5 +1,11 @@
---
import BaseLayout from '@/layouts/BaseLayout.astro';
interface Props {
error: unknown;
}
const { error } = Astro.props;
---
<BaseLayout title="内部错误">
@ -7,6 +13,7 @@ import BaseLayout from '@/layouts/BaseLayout.astro';
<div class="my-auto">
<h1 class="font-number">❌500</h1>
<div>抱歉,网站系统出现内部错误。请刷新页面重试,或者返回上一页。</div>
<div>{error instanceof Error ? error.message : '未知错误'}</div>
</div>
</div>
</BaseLayout>