feat: add the switch for uploading and make the upload path can be configurable.

This commit is contained in:
Yufan Sheng 2024-11-29 15:52:09 +08:00
parent 509c7a69d6
commit 523e4febf5
No known key found for this signature in database
GPG Key ID: 980439DFE585D1D8
4 changed files with 1726 additions and 1574 deletions

View File

@ -50,9 +50,25 @@ export default defineConfig({
## Options ## Options
```ts ```ts
type Path = {
// The directory in the astro static build that you want to upload to S3.
path: string;
// Whether to upload the files that locates in the inner directory.
recursive?: boolean;
// Whether to keep the original files after uploading.
keep?: boolean;
// Whether to override the existing files on S3.
// It will be override only when the content-length don't match the file size by default.
override?: boolean;
};
type Options = { type Options = {
// The directories that you want to upload to S3. // Enable the uploader
paths: string[]; enable?: boolean;
// The directory in the astro static build that you want to upload to S3.
paths: Array<string | Path>;
// Whether to upload the files that locates in the inner directory.
recursive?: boolean;
// Whether to keep the original files after uploading. // Whether to keep the original files after uploading.
keep?: boolean; keep?: boolean;
// Whether to override the existing files on S3. // Whether to override the existing files on S3.
@ -64,13 +80,13 @@ type Options = {
endpoint?: string; endpoint?: string;
// The name of the bucket. // The name of the bucket.
bucket: string; bucket: string;
// The root directory you want to upload files. // The root directory in S3 service that you want to upload files.
root?: string; root?: string;
// The access key id. // The access key id.
accessKey: string; accessKey: string;
// The secret access key. // The secret access key.
secretAccessKey: string; secretAccessKey: string;
// All the methods in https://docs.rs/opendal/latest/opendal/services/struct.S3.html#implementations can be treated as an extra option. // All the methods in https://docs.rs/opendal/latest/opendal/services/struct.S3.html#implementations can be treated as an extra option.
extraOptions?: Record<string, string> extraOptions?: Record<string, string>;
}; };
``` ```

3143
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "astro-uploader", "name": "astro-uploader",
"version": "1.1.3", "version": "1.2.0",
"description": "A uploader for uploading the Astro generated files through the S3 API.", "description": "A uploader for uploading the Astro generated files through the S3 API.",
"keywords": ["Astro", "S3", "withastro", "opendal"], "keywords": ["Astro", "S3", "withastro", "opendal"],
"bugs": { "bugs": {
@ -31,13 +31,13 @@
}, },
"dependencies": { "dependencies": {
"mime": "^4.0.4", "mime": "^4.0.4",
"opendal": "^0.47.1", "opendal": "^0.47.7",
"rimraf": "^6.0.1" "rimraf": "^6.0.1"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.8.3", "@biomejs/biome": "^1.9.4",
"@types/node": "^22.1.0", "@types/node": "^22.10.1",
"astro": "^4.13.1", "astro": "^4.16.16",
"unbuild": "^2.0.0" "unbuild": "^2.0.0"
} }
} }

View File

@ -6,9 +6,25 @@ import path from 'node:path';
import { Operator } from 'opendal'; import { Operator } from 'opendal';
import { rimrafSync } from 'rimraf'; import { rimrafSync } from 'rimraf';
type Path = {
// The directory in the astro static build that you want to upload to S3.
path: string;
// Whether to upload the files that locates in the inner directory.
recursive?: boolean;
// Whether to keep the original files after uploading.
keep?: boolean;
// Whether to override the existing files on S3.
// It will be override only when the content-length don't match the file size by default.
override?: boolean;
};
type Options = { type Options = {
// The directories that you want to upload to S3. // Enable the uploader
paths: string[]; enable?: boolean;
// The directory in the astro static build that you want to upload to S3.
paths: Array<string | Path>;
// Whether to upload the files that locates in the inner directory.
recursive?: boolean;
// Whether to keep the original files after uploading. // Whether to keep the original files after uploading.
keep?: boolean; keep?: boolean;
// Whether to override the existing files on S3. // Whether to override the existing files on S3.
@ -20,7 +36,7 @@ type Options = {
endpoint?: string; endpoint?: string;
// The name of the bucket. // The name of the bucket.
bucket: string; bucket: string;
// The root directory you want to upload files. // The root directory in S3 service that you want to upload files.
root?: string; root?: string;
// The access key id. // The access key id.
accessKey: string; accessKey: string;
@ -32,8 +48,22 @@ type Options = {
const S3Options = z const S3Options = z
.object({ .object({
paths: z.array(z.string()).min(1), enable: z.boolean().optional().default(true),
paths: z
.array(
z.union([
z.string(),
z.object({
path: z.string(),
keep: z.boolean().optional(),
recursive: z.boolean().optional(),
override: z.boolean(),
}),
]),
)
.min(1),
keep: z.boolean().default(false), keep: z.boolean().default(false),
recursive: z.boolean().default(true),
override: z.boolean().default(false), override: z.boolean().default(false),
region: z.string().min(1).default('auto'), region: z.string().min(1).default('auto'),
endpoint: z.string().url().optional(), endpoint: z.string().url().optional(),
@ -56,8 +86,20 @@ const S3Options = z
const parseOptions = (opts: Options, logger: AstroIntegrationLogger) => { const parseOptions = (opts: Options, logger: AstroIntegrationLogger) => {
try { try {
const { paths, keep, override, region, endpoint, bucket, root, accessKey, secretAccessKey, extraOptions } = const {
S3Options.parse(opts); enable,
paths,
recursive,
keep,
override,
region,
endpoint,
bucket,
root,
accessKey,
secretAccessKey,
extraOptions,
} = S3Options.parse(opts);
// Create opendal operator options. // Create opendal operator options.
// The common configurations are listed here https://docs.rs/opendal/latest/opendal/services/struct.S3.html#configuration // The common configurations are listed here https://docs.rs/opendal/latest/opendal/services/struct.S3.html#configuration
@ -73,7 +115,18 @@ const parseOptions = (opts: Options, logger: AstroIntegrationLogger) => {
options.endpoint = endpoint; options.endpoint = endpoint;
} }
return { options, paths, keep, override }; const resolvedPaths = paths.map((path) =>
typeof path === 'string'
? { path, recursive, keep, override }
: {
path: path.path,
recursive: path.recursive === undefined ? recursive : path.recursive,
keep: path.keep === undefined ? keep : path.keep,
override: path.override === undefined ? override : path.override,
},
);
return { options, paths: resolvedPaths, enable };
} catch (err) { } catch (err) {
if (err instanceof z.ZodError) { if (err instanceof z.ZodError) {
logger.error(`Uploader options validation error, there are ${err.issues.length} errors:`); logger.error(`Uploader options validation error, there are ${err.issues.length} errors:`);
@ -88,17 +141,15 @@ const parseOptions = (opts: Options, logger: AstroIntegrationLogger) => {
class Uploader { class Uploader {
private operator: Operator; private operator: Operator;
private override: boolean;
constructor(operator: Operator, override: boolean) { constructor(operator: Operator) {
this.operator = operator; this.operator = operator;
this.override = override;
} }
async isExist(key: string, size: number): Promise<boolean> { async isExist(key: string, size: number, override: boolean): Promise<boolean> {
try { try {
const { contentLength } = await this.operator.stat(key); const { contentLength } = await this.operator.stat(key);
if ((contentLength !== null && contentLength !== BigInt(size)) || this.override) { if ((contentLength !== null && contentLength !== BigInt(size)) || override) {
await this.operator.delete(key); await this.operator.delete(key);
return false; return false;
} }
@ -125,19 +176,25 @@ export const uploader = (opts: Options): AstroIntegration => ({
name: 'S3 Uploader', name: 'S3 Uploader',
hooks: { hooks: {
'astro:build:done': async ({ dir, logger }: { dir: URL; logger: AstroIntegrationLogger }) => { 'astro:build:done': async ({ dir, logger }: { dir: URL; logger: AstroIntegrationLogger }) => {
const { options, paths, keep, override } = parseOptions(opts, logger); const { options, paths, enable } = parseOptions(opts, logger);
const operator = new Operator('s3', options); if (!enable) {
logger.warn('Skip the astro uploader.');
return;
}
logger.info('Try to verify the S3 credentials.'); logger.info('Try to verify the S3 credentials.');
const operator = new Operator('s3', options);
await operator.check(); await operator.check();
logger.info(`Start to upload static files in dir ${paths} to S3 compatible backend.`); logger.info(`Start to upload static files in dir ${paths} to S3 compatible backend.`);
const uploader = new Uploader(operator);
const uploader = new Uploader(operator, override);
for (const current of paths) { for (const current of paths) {
await uploadFile(uploader, logger, current, dir.pathname); await uploadFile(uploader, logger, current, dir.pathname);
if (!keep) { if (!current.keep && current.recursive) {
rimrafSync(path.join(dir.pathname, current)); const resolvedPath = path.join(dir.pathname, current.path);
logger.info(`Remove the path: ${resolvedPath}`);
// Delete the whole path
rimrafSync(resolvedPath);
} }
} }
@ -151,8 +208,18 @@ const normalizePath = (current: string): string => {
return current.includes(path.win32.sep) ? current.split(path.win32.sep).join(path.posix.sep) : current; return current.includes(path.win32.sep) ? current.split(path.win32.sep).join(path.posix.sep) : current;
}; };
const uploadFile = async (uploader: Uploader, logger: AstroIntegrationLogger, current: string, root: string) => { const uploadFile = async (
const filePath = path.join(root, current); uploader: Uploader,
logger: AstroIntegrationLogger,
current: {
path: string;
recursive: boolean;
keep: boolean;
override: boolean;
},
root: string,
) => {
const filePath = current.path;
const fileStats = fs.statSync(filePath); const fileStats = fs.statSync(filePath);
const isFile = !fileStats.isDirectory(); const isFile = !fileStats.isDirectory();
const uploadAction = async (key: string) => { const uploadAction = async (key: string) => {
@ -162,19 +229,27 @@ const uploadFile = async (uploader: Uploader, logger: AstroIntegrationLogger, cu
}; };
if (isFile) { if (isFile) {
const key = normalizePath(current); const key = normalizePath(path.join(root, current.path));
if (await uploader.isExist(key, fileStats.size)) { if (await uploader.isExist(key, fileStats.size, current.override)) {
logger.info(`${key} exists on backend, skip.`); logger.info(`${key} exists on backend, skip.`);
} else { } else {
await uploadAction(key); await uploadAction(key);
} }
if (!current.keep && !current.recursive) {
rimrafSync(current.path);
}
} else { } else {
// Reclusive upload files. // Reclusive upload files or only upload the first hierarchy of the files.
for (const next of fs.readdirSync(filePath)) { for (const next of fs.readdirSync(filePath)) {
if (next.startsWith('.')) { if (next.startsWith('.')) {
continue; continue;
} }
await uploadFile(uploader, logger, path.join(current, next), root);
const nextFilePath = path.join(current.path, next);
if (current.recursive || !fs.statSync(nextFilePath).isDirectory()) {
await uploadFile(uploader, logger, { ...current, path: nextFilePath }, root);
}
} }
} }
}; };