feat: initial version.
This commit is contained in:
parent
0f00800dc1
commit
0c5dafc1a1
16
build.config.ts
Normal file
16
build.config.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import { defineBuildConfig } from 'unbuild'
|
||||||
|
|
||||||
|
export default defineBuildConfig({
|
||||||
|
entries: [
|
||||||
|
'src/index',
|
||||||
|
],
|
||||||
|
clean: true,
|
||||||
|
declaration: true,
|
||||||
|
rollup: {
|
||||||
|
emitCJS: true,
|
||||||
|
},
|
||||||
|
externals: [
|
||||||
|
'astro',
|
||||||
|
'astro/zod'
|
||||||
|
],
|
||||||
|
})
|
10438
package-lock.json
generated
Normal file
10438
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
54
package.json
Normal file
54
package.json
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"name": "astro-uploader",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "A uploader for uploading the Astro generated files through the S3 API.",
|
||||||
|
"author": "Yufan Sheng <syhily@gmail.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./dist/index.mjs",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/syhily/astro-uploader"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/syhily/astro-uploader/issues"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"Astro",
|
||||||
|
"S3"
|
||||||
|
],
|
||||||
|
"type": "module",
|
||||||
|
"sideEffects": false,
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.mts",
|
||||||
|
"default": "./dist/index.mjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"module": "./dist/index.mjs",
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "unbuild",
|
||||||
|
"stub": "unbuild --stub"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"vite": "^2.9.0 || ^3.0.0-0 || ^4.0.0 || ^5.0.0-0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"vite": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^20.14.9",
|
||||||
|
"astro": "^4.11.3",
|
||||||
|
"unbuild": "^2.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.600.0",
|
||||||
|
"mime": "^4.0.3",
|
||||||
|
"rimraf": "^5.0.7"
|
||||||
|
}
|
||||||
|
}
|
238
src/index.ts
Normal file
238
src/index.ts
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
import {
|
||||||
|
DeleteObjectCommand,
|
||||||
|
HeadBucketCommand,
|
||||||
|
HeadObjectCommand,
|
||||||
|
NoSuchBucket,
|
||||||
|
NotFound,
|
||||||
|
PutObjectCommand,
|
||||||
|
S3Client,
|
||||||
|
} from "@aws-sdk/client-s3";
|
||||||
|
import type { AstroIntegration, AstroIntegrationLogger } from "astro";
|
||||||
|
import { z } from "astro/zod";
|
||||||
|
import mime from "mime";
|
||||||
|
import fs from "node:fs";
|
||||||
|
import path from "node:path";
|
||||||
|
import { rimrafSync } from "rimraf";
|
||||||
|
|
||||||
|
type Options = {
|
||||||
|
// The directories that you want to upload to S3.
|
||||||
|
paths: string[];
|
||||||
|
// Whether to keep the original files after uploading.
|
||||||
|
keep?: boolean;
|
||||||
|
// Whether to override the existing files on S3.
|
||||||
|
// It will be override only when the content-length don't match the file size by default.
|
||||||
|
override?: boolean;
|
||||||
|
// The S3 region, set it if you use AWS S3 service.
|
||||||
|
region?: string;
|
||||||
|
// The endpoint, set it if you use 3rd-party S3 service.
|
||||||
|
endpoint?: string;
|
||||||
|
// The name of the bucket.
|
||||||
|
bucket: string;
|
||||||
|
// The root directory you want to upload files.
|
||||||
|
root?: string;
|
||||||
|
// The access key id.
|
||||||
|
accessKey: string;
|
||||||
|
// The secret access key.
|
||||||
|
secretAccessKey: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const S3Options = z
|
||||||
|
.object({
|
||||||
|
paths: z.array(z.string()).min(1),
|
||||||
|
keep: z.boolean().default(false),
|
||||||
|
override: z.boolean().default(false),
|
||||||
|
region: z.string().min(1).default("auto"),
|
||||||
|
endpoint: z.string().url().optional(),
|
||||||
|
bucket: z.string().min(1),
|
||||||
|
root: z.string().default("/"),
|
||||||
|
accessKey: z.string().min(1),
|
||||||
|
secretAccessKey: z.string().min(1),
|
||||||
|
})
|
||||||
|
.strict()
|
||||||
|
.superRefine((opts, { addIssue }) => {
|
||||||
|
if (opts.region === "auto" && opts.endpoint === undefined) {
|
||||||
|
addIssue({
|
||||||
|
fatal: true,
|
||||||
|
code: "custom",
|
||||||
|
message: "either the region or the endpoint should be provided",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const parseOptions = (
|
||||||
|
opts: Options,
|
||||||
|
logger: AstroIntegrationLogger,
|
||||||
|
): z.infer<typeof S3Options> => {
|
||||||
|
try {
|
||||||
|
return S3Options.parse(opts);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof z.ZodError) {
|
||||||
|
logger.error(
|
||||||
|
`Uploader options validation error, there are ${err.issues.length} errors:`,
|
||||||
|
);
|
||||||
|
for (const issue of err.issues) {
|
||||||
|
logger.error(issue.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
class Uploader {
|
||||||
|
private client: S3Client;
|
||||||
|
private options: z.infer<typeof S3Options>;
|
||||||
|
|
||||||
|
constructor(client: S3Client, options: z.infer<typeof S3Options>) {
|
||||||
|
this.client = client;
|
||||||
|
this.options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
private key(key: string): string {
|
||||||
|
return path.posix.join(this.options.root, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async delete(key: string): Promise<void> {
|
||||||
|
const deleteCmd = new DeleteObjectCommand({
|
||||||
|
Bucket: this.options.bucket,
|
||||||
|
Key: this.key(key),
|
||||||
|
});
|
||||||
|
await this.client.send(deleteCmd);
|
||||||
|
}
|
||||||
|
|
||||||
|
async isExist(key: string, size: number): Promise<boolean> {
|
||||||
|
const headCmd = new HeadObjectCommand({
|
||||||
|
Bucket: this.options.bucket,
|
||||||
|
Key: this.key(key),
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const { ContentLength } = await this.client.send(headCmd);
|
||||||
|
// The file checksum should be uploaded with file. So we only check content length here.
|
||||||
|
if (
|
||||||
|
this.options.override ||
|
||||||
|
(ContentLength !== undefined && ContentLength !== size)
|
||||||
|
) {
|
||||||
|
await this.delete(key);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof NotFound) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async write(key: string, body: Buffer) {
|
||||||
|
const contentType = mime.getType(key);
|
||||||
|
const putCmd = new PutObjectCommand({
|
||||||
|
Bucket: this.options.bucket,
|
||||||
|
Key: this.key(key),
|
||||||
|
Body: body,
|
||||||
|
ContentType: contentType === null ? undefined : contentType,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.client.send(putCmd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const uploader = (opts: Options): AstroIntegration => ({
|
||||||
|
name: "S3 Uploader",
|
||||||
|
hooks: {
|
||||||
|
"astro:build:done": async ({
|
||||||
|
dir,
|
||||||
|
logger,
|
||||||
|
}: { dir: URL; logger: AstroIntegrationLogger }) => {
|
||||||
|
const options = parseOptions(opts, logger);
|
||||||
|
const {
|
||||||
|
paths,
|
||||||
|
keep,
|
||||||
|
region,
|
||||||
|
endpoint,
|
||||||
|
bucket,
|
||||||
|
accessKey,
|
||||||
|
secretAccessKey,
|
||||||
|
} = options;
|
||||||
|
const client = new S3Client({
|
||||||
|
region: region,
|
||||||
|
endpoint: endpoint,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: accessKey,
|
||||||
|
secretAccessKey: secretAccessKey,
|
||||||
|
},
|
||||||
|
useGlobalEndpoint: endpoint !== undefined && endpoint !== "",
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info("Try to verify the S3 credentials.");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.send(new HeadBucketCommand({ Bucket: bucket }));
|
||||||
|
} catch (err) {
|
||||||
|
// If the bucket is not existed.
|
||||||
|
if (err instanceof NoSuchBucket) {
|
||||||
|
logger.error(
|
||||||
|
`The bucket ${bucket} isn't existed on the region: ${region} endpoint: ${endpoint}`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
logger.error(JSON.stringify(err));
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Start to upload static files in dir ${paths} to S3 compatible backend.`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const uploader = new Uploader(client, options);
|
||||||
|
for (const current of paths) {
|
||||||
|
await uploadFile(uploader, logger, current, dir.pathname);
|
||||||
|
if (!keep) {
|
||||||
|
rimrafSync(path.join(dir.pathname, current));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Upload all the files successfully.");
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Change the windows path into the unix path.
|
||||||
|
const normalizePath = (current: string): string => {
|
||||||
|
return current.includes(path.win32.sep)
|
||||||
|
? current.split(path.win32.sep).join(path.posix.sep)
|
||||||
|
: current;
|
||||||
|
};
|
||||||
|
|
||||||
|
const uploadFile = async (
|
||||||
|
uploader: Uploader,
|
||||||
|
logger: AstroIntegrationLogger,
|
||||||
|
current: string,
|
||||||
|
root: string,
|
||||||
|
) => {
|
||||||
|
const filePath = path.join(root, current);
|
||||||
|
const fileStats = fs.statSync(filePath);
|
||||||
|
const isFile = !fileStats.isDirectory();
|
||||||
|
const uploadAction = async (key: string) => {
|
||||||
|
logger.info(`Start to upload file: ${key}`);
|
||||||
|
const body = fs.readFileSync(filePath);
|
||||||
|
await uploader.write(key, body);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isFile) {
|
||||||
|
const key = normalizePath(current);
|
||||||
|
if (await uploader.isExist(key, fileStats.size)) {
|
||||||
|
logger.info(`${key} exists on backend, skip.`);
|
||||||
|
} else {
|
||||||
|
await uploadAction(key);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Reclusive upload files.
|
||||||
|
for (const next of fs.readdirSync(filePath)) {
|
||||||
|
if (next.startsWith(".")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
await uploadFile(uploader, logger, path.join(current, next), root);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
15
tsconfig.json
Normal file
15
tsconfig.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig.json",
|
||||||
|
"extends": "astro/tsconfigs/strict",
|
||||||
|
"compilerOptions": {
|
||||||
|
"baseUrl": ".",
|
||||||
|
"strict": true,
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "ESNext",
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"paths": {
|
||||||
|
"@/options": ["./options.ts"],
|
||||||
|
"@/*": ["src/*"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user