Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.

Commit 20f3bc3

Browse files
committed
feat: Create uploads module
1 parent 1317b2b commit 20f3bc3

File tree

19 files changed

+1438
-0
lines changed

19 files changed

+1438
-0
lines changed

modules/uploads/config.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import { UploadSize } from "./utils/data_size.ts";
2+
3+
export interface Config {
4+
maxUploadSize?: UploadSize;
5+
maxMultipartUploadSize?: UploadSize;
6+
maxFilesPerUpload?: number;
7+
defaultMultipartChunkSize?: UploadSize;
8+
9+
s3: {
10+
bucketName: string;
11+
region: string;
12+
endpoint: string;
13+
14+
accessKeyId?: string;
15+
secretAccessKey?: string;
16+
};
17+
}
18+
19+
export const DEFAULT_MAX_FILES_PER_UPLOAD = 10;
20+
21+
export const DEFAULT_MAX_UPLOAD_SIZE: UploadSize = "30mib";
22+
export const DEFAULT_MAX_MULTIPART_UPLOAD_SIZE: UploadSize = "10gib";
23+
export const DEFAULT_MULTIPART_CHUNK_SIZE: UploadSize = "10mib";
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
-- CreateTable
2+
CREATE TABLE "Upload" (
3+
"id" UUID NOT NULL,
4+
"userId" UUID,
5+
"bucket" TEXT NOT NULL,
6+
"contentLength" BIGINT NOT NULL,
7+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
8+
"updatedAt" TIMESTAMP(3) NOT NULL,
9+
"completedAt" TIMESTAMP(3),
10+
"deletedAt" TIMESTAMP(3),
11+
12+
CONSTRAINT "Upload_pkey" PRIMARY KEY ("id")
13+
);
14+
15+
-- CreateTable
16+
CREATE TABLE "Files" (
17+
"uploadId" UUID NOT NULL,
18+
"multipartUploadId" TEXT,
19+
"path" TEXT NOT NULL,
20+
"mime" TEXT,
21+
"contentLength" BIGINT NOT NULL,
22+
23+
CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path")
24+
);
25+
26+
-- AddForeignKey
27+
ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Please do not edit this file manually
2+
# It should be added in your version-control system (i.e. Git)
3+
provider = "postgresql"

modules/uploads/db/schema.prisma

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
// Do not modify this `datasource` block
2+
datasource db {
3+
provider = "postgresql"
4+
url = env("DATABASE_URL")
5+
}
6+
7+
model Upload {
8+
id String @id @default(uuid()) @db.Uuid
9+
userId String? @db.Uuid
10+
11+
bucket String
12+
contentLength BigInt
13+
14+
createdAt DateTime @default(now())
15+
updatedAt DateTime @updatedAt
16+
completedAt DateTime?
17+
deletedAt DateTime?
18+
19+
files Files[] @relation("Files")
20+
}
21+
22+
model Files {
23+
uploadId String @db.Uuid
24+
upload Upload @relation("Files", fields: [uploadId], references: [id])
25+
26+
multipartUploadId String?
27+
28+
path String
29+
mime String?
30+
contentLength BigInt
31+
32+
@@id([uploadId, path])
33+
}

modules/uploads/module.yaml

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
scripts:
2+
prepare:
3+
name: Prepare Upload
4+
description: Prepare an upload batch for data transfer
5+
complete:
6+
name: Complete Upload
7+
description: Alert the module that the upload has been completed
8+
get:
9+
name: Get Upload Metadata
10+
description: Get the metadata (including contained files) for specified upload IDs
11+
get_public_file_urls:
12+
name: Get File Link
13+
description: Get presigned download links for each of the specified files
14+
list_for_user:
15+
name: List Uploads for Users
16+
description: Get a list of upload IDs associated with the specified user IDs
17+
delete:
18+
name: Delete Upload
19+
description: Removes the upload and deletes the files from the bucket
20+
errors:
21+
no_files:
22+
name: No Files Provided
23+
description: An upload must have at least 1 file
24+
too_many_files:
25+
name: Too Many Files Provided
26+
description: There is a limit to how many files can be put into a single upload (see config)
27+
duplicate_paths:
28+
name: Duplicate Paths Provided
29+
description: An upload cannot contain 2 files with the same paths (see `cause` for offending paths)
30+
size_limit_exceeded:
31+
name: Combined Size Limit Exceeded
32+
description: There is a maximum total size per upload (see config)
33+
upload_not_found:
34+
name: Upload Not Found
35+
description: The provided upload ID didn't match any known existing uploads
36+
upload_already_completed:
37+
name: Upload Already completed
38+
description: \`complete\` was already called on this upload
39+
s3_not_configured:
40+
name: S3 Not Configured
41+
description: The S3 bucket is not configured (missing env variables)
42+
too_many_chunks:
43+
name: Possibility Of Too Many Chunks
44+
description: |
45+
AWS S3 has a limit on the number of parts that can be uploaded in a
46+
multipart upload. This limit is 10,000 parts. If the number of chunks
47+
required to upload the maximum multipart upload size exceeds this limit,
48+
any operation will preemptively throw this error.
49+
multipart_upload_completion_fail:
50+
name: Multipart Upload Completion Failure
51+
description: The multipart upload failed to complete (see `cause` for more information)
52+
dependencies: {}

modules/uploads/scripts/complete.ts

Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
import { RuntimeError, ScriptContext } from "../_gen/scripts/complete.ts";
2+
import {
3+
completeMultipartUpload,
4+
getMultipartUploadParts,
5+
keyExists,
6+
} from "../utils/bucket.ts";
7+
import { getConfig } from "../utils/config_defaults.ts";
8+
import { getKey, prismaToOutputWithFiles, Upload } from "../utils/types.ts";
9+
10+
export interface Request {
11+
uploadId: string;
12+
}
13+
14+
export interface Response {
15+
upload: Upload;
16+
}
17+
18+
export async function run(
19+
ctx: ScriptContext,
20+
req: Request,
21+
): Promise<Response> {
22+
const config = getConfig(ctx.userConfig);
23+
24+
const newUpload = await ctx.db.$transaction(async (db) => {
25+
// Find the upload by ID
26+
const upload = await db.upload.findFirst({
27+
where: {
28+
id: req.uploadId,
29+
},
30+
select: {
31+
id: true,
32+
userId: true,
33+
bucket: true,
34+
contentLength: true,
35+
files: true,
36+
createdAt: true,
37+
updatedAt: true,
38+
completedAt: true,
39+
},
40+
});
41+
42+
// Error if the upload wasn't prepared
43+
if (!upload) {
44+
throw new RuntimeError(
45+
"upload_not_found",
46+
{
47+
meta: { uploadId: req.uploadId },
48+
},
49+
);
50+
}
51+
52+
// Check with S3 to see if the files were uploaded
53+
const fileExistencePromises = upload.files.map(
54+
async (file) => {
55+
// If the file was uploaded in parts, complete the multipart upload
56+
if (file.multipartUploadId) {
57+
try {
58+
const parts = await getMultipartUploadParts(
59+
config.s3,
60+
getKey(upload.id, file.path),
61+
file.multipartUploadId,
62+
);
63+
if (parts.length === 0) return false;
64+
65+
await completeMultipartUpload(
66+
config.s3,
67+
getKey(upload.id, file.path),
68+
file.multipartUploadId,
69+
parts,
70+
);
71+
} catch (e) {
72+
throw new RuntimeError(
73+
"multipart_upload_completion_fail",
74+
{ cause: e },
75+
);
76+
}
77+
78+
return true;
79+
} else {
80+
// Check if the file exists
81+
return await keyExists(config.s3, getKey(upload.id, file.path));
82+
}
83+
},
84+
);
85+
const fileExistence = await Promise.all(fileExistencePromises);
86+
const filesAllExist = fileExistence.every(Boolean);
87+
if (!filesAllExist) {
88+
const missingFiles = upload.files.filter((_, i) => !fileExistence[i]);
89+
throw new RuntimeError(
90+
"files_not_uploaded",
91+
{
92+
meta: {
93+
uploadId: req.uploadId,
94+
missingFiles: missingFiles.map((file) => file.path),
95+
},
96+
},
97+
);
98+
}
99+
100+
// Error if `complete` was already called with this ID
101+
if (upload.completedAt !== null) {
102+
throw new RuntimeError(
103+
"upload_already_completed",
104+
{
105+
meta: { uploadId: req.uploadId },
106+
},
107+
);
108+
}
109+
110+
// Update the upload to mark it as completed
111+
const completedUpload = await db.upload.update({
112+
where: {
113+
id: req.uploadId,
114+
},
115+
data: {
116+
completedAt: new Date().toISOString(),
117+
},
118+
select: {
119+
id: true,
120+
userId: true,
121+
bucket: true,
122+
contentLength: true,
123+
files: true,
124+
createdAt: true,
125+
updatedAt: true,
126+
completedAt: true,
127+
},
128+
});
129+
130+
return completedUpload;
131+
});
132+
133+
return {
134+
upload: prismaToOutputWithFiles(newUpload),
135+
};
136+
}

modules/uploads/scripts/delete.ts

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
import { RuntimeError, ScriptContext } from "../_gen/scripts/delete.ts";
2+
import { getKey } from "../utils/types.ts";
3+
import { deleteKeys } from "../utils/bucket.ts";
4+
import { getConfig } from "../utils/config_defaults.ts";
5+
6+
export interface Request {
7+
uploadId: string;
8+
}
9+
10+
export interface Response {
11+
bytesDeleted: string;
12+
}
13+
14+
export async function run(
15+
ctx: ScriptContext,
16+
req: Request,
17+
): Promise<Response> {
18+
const config = getConfig(ctx.userConfig);
19+
20+
const bytesDeleted = await ctx.db.$transaction(async (db) => {
21+
const upload = await db.upload.findFirst({
22+
where: {
23+
id: req.uploadId,
24+
completedAt: { not: null },
25+
deletedAt: null,
26+
},
27+
select: {
28+
id: true,
29+
userId: true,
30+
bucket: true,
31+
contentLength: true,
32+
files: true,
33+
createdAt: true,
34+
updatedAt: true,
35+
completedAt: true,
36+
},
37+
});
38+
if (!upload) {
39+
throw new RuntimeError(
40+
"upload_not_found",
41+
{
42+
meta: {
43+
modified: false,
44+
uploadId: req.uploadId,
45+
},
46+
},
47+
);
48+
}
49+
50+
const filesToDelete = upload.files.map((file) =>
51+
getKey(file.uploadId, file.path)
52+
);
53+
const deleteResults = await deleteKeys(config.s3, filesToDelete);
54+
55+
const failures = upload.files
56+
.map((file, i) => [file, deleteResults[i]] as const)
57+
.filter(([, successfullyDeleted]) => !successfullyDeleted)
58+
.map(([file]) => file);
59+
60+
if (failures.length) {
61+
const failedPaths = JSON.stringify(failures.map((file) => file.path));
62+
throw new RuntimeError(
63+
"failed_to_delete",
64+
{
65+
meta: {
66+
modified: failures.length !== filesToDelete.length,
67+
reason: `Failed to delete files with paths ${failedPaths}`,
68+
},
69+
},
70+
);
71+
}
72+
73+
await db.upload.update({
74+
where: {
75+
id: req.uploadId,
76+
},
77+
data: {
78+
deletedAt: new Date().toISOString(),
79+
},
80+
});
81+
82+
return upload.contentLength.toString();
83+
});
84+
return { bytesDeleted };
85+
}

0 commit comments

Comments
 (0)