Skip to content

Commit

Permalink
feat: support both formdata and binary as input
Browse files Browse the repository at this point in the history
this commit supports file size being exceeded
  • Loading branch information
inian committed Jun 15, 2021
1 parent c488b91 commit 69da4d5
Showing 1 changed file with 30 additions and 27 deletions.
57 changes: 30 additions & 27 deletions src/routes/object/createObject.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ export default async function routes(fastify: FastifyInstance) {
const objectName = request.params['*']
const path = `${bucketName}/${objectName}`
const s3Key = `${projectRef}/${path}`
let mimeType: string, cacheControl: string
let mimeType: string, cacheControl: string, isTruncated: boolean
let uploadResult: ServiceOutputTypes

if (!isValidKey(objectName) || !isValidKey(bucketName)) {
Expand Down Expand Up @@ -132,34 +132,9 @@ export default async function routes(fastify: FastifyInstance) {
// busboy sets the truncated property on streams if limit was exceeded
// https://github.com/fastify/fastify-multipart/issues/196#issuecomment-782847791
/* @ts-expect-error: busboy doesn't export proper types */
const isTruncated = data.file.truncated
if (isTruncated) {
// undo operations as super user
await superUserPostgrest
.from<Obj>('objects')
.delete()
.match({
name: objectName,
bucket_id: bucketName,
})
.single()
await deleteObject(client, globalS3Bucket, s3Key)

// return an error response
return response
.status(400)
.send(
createResponse(
'The object exceeded the maximum allowed size',
'413',
'Payload too large'
)
)
}
isTruncated = data.file.truncated
} else {
// just assume its a binary file
// @todo we need to get cacheControl from header
// @todo how do we check max size
mimeType = request.headers['content-type']
cacheControl = request.headers['cache-control'] ?? 'no-cache'

Expand All @@ -171,6 +146,34 @@ export default async function routes(fastify: FastifyInstance) {
mimeType,
cacheControl
)
const { fileSizeLimit } = getConfig()
// @todo more secure to get this from the stream or from s3 in the next step
console.log('comparing', Number(request.headers['content-length']), fileSizeLimit)
isTruncated = Number(request.headers['content-length']) > fileSizeLimit
}
console.log('trunc', isTruncated)
if (isTruncated) {
// undo operations as super user
await superUserPostgrest
.from<Obj>('objects')
.delete()
.match({
name: objectName,
bucket_id: bucketName,
})
.single()
await deleteObject(client, globalS3Bucket, s3Key)

// return an error response
return response
.status(400)
.send(
createResponse(
'The object exceeded the maximum allowed size',
'413',
'Payload too large'
)
)
}

const objectMetadata = await headObject(client, globalS3Bucket, s3Key)
Expand Down

0 comments on commit 69da4d5

Please # to comment.