Add s3 integration
Doing so has a number of benefits - Overcome the 10Mb limit of the API gateway the lambdas have to go through - By storing the key as the hash of the code we can return previous generated assets, i.e. caching - cost, transfering assets into the bucket within the AWS ecosystem is faster than return, and there fore the lambdas execute for less time - Sets us up for the future as when generating artifacts for repos when there is a change to master etc we want to store these assets somewhere and s3 is an obvious choice - Solved a weird CORS issue where I couldn't get CORS working with binaryMediaTypes enabled, don't need binary types when dumping in s3 Resolves #316
This commit is contained in:
@@ -1,95 +1,88 @@
|
||||
const { runScad, stlExport } = require('./runScad')
|
||||
const { runScad } = require('./runScad')
|
||||
const middy = require('middy')
|
||||
const { cors } = require('middy/middlewares')
|
||||
const AWS = require('aws-sdk')
|
||||
const {
|
||||
makeHash,
|
||||
checkIfAlreadyExists,
|
||||
getObjectUrl,
|
||||
loggerWrap,
|
||||
storeAssetAndReturnUrl,
|
||||
} = require('../common/utils')
|
||||
|
||||
const health = async () => {
|
||||
console.log('Health endpoint')
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: 'ok',
|
||||
}
|
||||
}
|
||||
|
||||
// cors true does not seem to work in serverless.yml, perhaps docker lambdas aren't covered by that config
|
||||
// special lambda just for responding to options requests
|
||||
const preflightOptions = (req, _context, callback) => {
|
||||
const response = {
|
||||
statusCode: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'POST',
|
||||
'Access-Control-Allow-Headers': '*',
|
||||
},
|
||||
}
|
||||
callback(null, response)
|
||||
}
|
||||
const s3 = new AWS.S3()
|
||||
|
||||
const preview = async (req, _context, callback) => {
|
||||
_context.callbackWaitsForEmptyEventLoop = false
|
||||
const eventBody = Buffer.from(req.body, 'base64').toString('ascii')
|
||||
console.log(eventBody, 'eventBody')
|
||||
const { file, settings } = JSON.parse(eventBody)
|
||||
const { error, result, tempFile } = await runScad({ file, settings })
|
||||
if (error) {
|
||||
const response = {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error, tempFile }),
|
||||
}
|
||||
callback(null, response)
|
||||
} else {
|
||||
console.log(`got result in route: ${result}, file is: ${tempFile}`)
|
||||
const fs = require('fs')
|
||||
const image = fs.readFileSync(`/tmp/${tempFile}/output.png`, {
|
||||
encoding: 'base64',
|
||||
})
|
||||
console.log(image, 'encoded image')
|
||||
const eventBody = req.body
|
||||
console.log('eventBody', eventBody)
|
||||
const key = `${makeHash(eventBody)}.png`
|
||||
console.log('key', key)
|
||||
|
||||
const params = {
|
||||
Bucket: process.env.BUCKET,
|
||||
Key: key,
|
||||
}
|
||||
const previousAsset = await checkIfAlreadyExists(params, s3)
|
||||
if (previousAsset.isAlreadyInBucket) {
|
||||
console.log('already in bucket')
|
||||
const response = {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
imageBase64: image,
|
||||
result,
|
||||
tempFile,
|
||||
url: getObjectUrl(params, s3),
|
||||
consoleMessage: previousAsset.consoleMessage,
|
||||
}),
|
||||
}
|
||||
callback(null, response)
|
||||
return
|
||||
}
|
||||
|
||||
const { file, settings } = JSON.parse(eventBody)
|
||||
const { error, consoleMessage, fullPath } = await runScad({ file, settings })
|
||||
await storeAssetAndReturnUrl({
|
||||
error,
|
||||
callback,
|
||||
fullPath,
|
||||
consoleMessage,
|
||||
key,
|
||||
s3,
|
||||
params,
|
||||
})
|
||||
}
|
||||
|
||||
const stl = async (req, _context, callback) => {
|
||||
_context.callbackWaitsForEmptyEventLoop = false
|
||||
const eventBody = Buffer.from(req.body, 'base64').toString('ascii')
|
||||
console.log(eventBody, 'eventBody')
|
||||
const { file } = JSON.parse(eventBody)
|
||||
const { error, result, tempFile } = await stlExport({ file })
|
||||
if (error) {
|
||||
const response = {
|
||||
statusCode: 400,
|
||||
body: { error, tempFile },
|
||||
}
|
||||
callback(null, response)
|
||||
} else {
|
||||
console.log(`got result in route: ${result}, file is: ${tempFile}`)
|
||||
const fs = require('fs')
|
||||
const stl = fs.readFileSync(`/tmp/${tempFile}/output.stl`, {
|
||||
encoding: 'base64',
|
||||
})
|
||||
console.log('encoded stl', stl)
|
||||
const response = {
|
||||
statusCode: 200,
|
||||
headers: {
|
||||
'content-type': 'application/stl',
|
||||
},
|
||||
body: stl,
|
||||
isBase64Encoded: true,
|
||||
}
|
||||
console.log('callback fired')
|
||||
callback(null, response)
|
||||
}
|
||||
}
|
||||
// const stl = async (req, _context, callback) => {
|
||||
// _context.callbackWaitsForEmptyEventLoop = false
|
||||
// const eventBody = Buffer.from(req.body, 'base64').toString('ascii')
|
||||
// console.log(eventBody, 'eventBody')
|
||||
// const { file } = JSON.parse(eventBody)
|
||||
// const { error, result, tempFile } = await stlExport({ file })
|
||||
// if (error) {
|
||||
// const response = {
|
||||
// statusCode: 400,
|
||||
// body: { error, tempFile },
|
||||
// }
|
||||
// callback(null, response)
|
||||
// } else {
|
||||
// console.log(`got result in route: ${result}, file is: ${tempFile}`)
|
||||
// const fs = require('fs')
|
||||
// const stl = fs.readFileSync(`/tmp/${tempFile}/output.stl`, {
|
||||
// encoding: 'base64',
|
||||
// })
|
||||
// console.log('encoded stl', stl)
|
||||
// const response = {
|
||||
// statusCode: 200,
|
||||
// headers: {
|
||||
// 'content-type': 'application/stl',
|
||||
// },
|
||||
// body: stl,
|
||||
// isBase64Encoded: true,
|
||||
// }
|
||||
// console.log('callback fired')
|
||||
// callback(null, response)
|
||||
// }
|
||||
// }
|
||||
|
||||
module.exports = {
|
||||
health: middy(health).use(cors()),
|
||||
stl: middy(stl).use(cors()),
|
||||
preview: middy(preview).use(cors()),
|
||||
preflightOptions,
|
||||
// stl: middy(stl).use(cors()),
|
||||
preview: middy(loggerWrap(preview)).use(cors()),
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user