Handling multipart/form-data POST with Express in Cloud Functions - firebase

I've been trying to handle POSTs (multipart/form-data) with a Firebase function and Express but it just doesn't work. Tried this in local server and it works just fine. Everything's the same except it's not contained in a Firebase function.
Besides screwing up the request object it seems it also screws up the way busboy works.
I've tried different solutions presented here but they just don't work. As one user mentions, the callbacks passed to busboy (to be called when a 'field' is found or when it finishes going through the data) are never called and the function just hangs.
Any ideas?
Here's my function's code for reference:
const functions = require('firebase-functions');
const express = require('express');
const getRawBody = require('raw-body');
const contentType = require('content-type')
const Busboy = require('busboy');
const app = express();
const logging = (req, res, next) => {
console.log(`> request body: ${req.body}`);
next();
}
const toRawBody = (req, res, next) => {
const options = {
length: req.headers['content-length'],
limit: '1mb',
encoding: contentType.parse(req).parameters.charset
};
getRawBody(req, options)
.then(rawBody => {
req.rawBody = rawBody
next();
})
.catch(error => {
return next(error);
});
};
const handlePostWithBusboy = (req, res) => {
const busboy = new Busboy({ headers: req.headers });
const formData = {};
busboy.on('field', (fieldname, value) => {
formData[fieldname] = value;
});
busboy.on('finish', () => {
console.log(`> form data: ${JSON.stringify(formData)}`);
res.status(200).send(formData);
});
busboy.end(req.rawBody);
}
app.post('/', logging, toRawBody, handlePostWithBusboy);
const exchange = functions.https.onRequest((req, res) => {
if (!req.path) {
req.url = `/${req.url}`
}
return app(req, res)
})
module.exports = {
exchange
}

The documentation Doug referred to in his answer is good. An important caveat though is that rawBody does not work in the emulator. The workaround is to do:
if (req.rawBody) {
busboy.end(req.rawBody);
}
else {
req.pipe(busboy);
}
As described in this issue:
https://github.com/GoogleCloudPlatform/cloud-functions-emulator/issues/161#issuecomment-376563784

Please read the documentation for handling multipart form uploads.
... if you want your Cloud Function to process multipart/form-data, you can use the rawBody property of the request.
Because of the way Cloud Functions pre-processes some requests, you can expect that some Express middleware will not work, and that's what you're running into.

I've combined the previous two answers into a easy-to-use async function.
const Busboy = require('busboy');
const os = require('os');
const path = require('path');
const fs = require('fs');
module.exports = function extractMultipartFormData(req) {
return new Promise((resolve, reject) => {
if (req.method != 'POST') {
return reject(405);
} else {
const busboy = new Busboy({ headers: req.headers });
const tmpdir = os.tmpdir();
const fields = {};
const fileWrites = [];
const uploads = {};
busboy.on('field', (fieldname, val) => (fields[fieldname] = val));
busboy.on('file', (fieldname, file, filename) => {
const filepath = path.join(tmpdir, filename);
const writeStream = fs.createWriteStream(filepath);
uploads[fieldname] = filepath;
file.pipe(writeStream);
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
busboy.on('finish', async () => {
const result = { fields, uploads: {} };
await Promise.all(fileWrites);
for (const file in uploads) {
const filename = uploads[file];
result.uploads[file] = fs.readFileSync(filename);
fs.unlinkSync(filename);
}
resolve(result);
});
busboy.on('error', reject);
if (req.rawBody) {
busboy.end(req.rawBody);
} else {
req.pipe(busboy);
}
}
});
};

Related

telemetry with next.js and opentelemetry

is there any way to deploy OpenTelemetry in next.js without having to create a custom server?
what I found would boil down to the following codes:
tracing.js:
'use strict'
const opentelemetry = require('#opentelemetry/sdk-node');
const { getNodeAutoInstrumentations } = require('#opentelemetry/auto-instrumentations-node');
const { OTLPTraceExporter } = require('#opentelemetry/exporter-otlp-grpc');
const { Resource } = require('#opentelemetry/resources');
const { SemanticResourceAttributes } = require('#opentelemetry/semantic-conventions');
// custom nextjs server
const { startServer } = require('./server');
// configure the SDK to export telemetry data to the console
// enable all auto-instrumentations from the meta package
const exporterOptions = {
url: 'http://localhost:4317',
}
const traceExporter = new OTLPTraceExporter(exporterOptions);
const sdk = new opentelemetry.NodeSDK({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: 'SigNoz-Nextjs-Example'
}),
traceExporter,
instrumentations: [getNodeAutoInstrumentations()]
});
// initialize the SDK and register with the OpenTelemetry API
// this enables the API to record telemetry
sdk.start()
.then(() => console.log('Tracing initialized'))
.then(() => startServer())
.catch((error) => console.log('Error initializing tracing', error));
// gracefully shut down the SDK on process exit
process.on('SIGTERM', () => {
sdk.shutdown()
.then(() => console.log('Tracing terminated'))
.catch((error) => console.log('Error terminating tracing', error))
.finally(() => process.exit(0));
});
module.exports = sdk
server.js:
const { createServer } = require("http")
const { parse } = require("url")
const next = require("next")
const dev = process.env.NODE_ENV !== "production"
const app = next({ dev })
const handle = app.getRequestHandler()
module.exports = {
startServer: async function startServer() {
return app.prepare().then(() => {
createServer((req, res) => {
const parsedUrl = parse(req.url, true)
handle(req, res, parsedUrl)
}).listen(8080, (err) => {
if (err) throw err
console.log("> Ready on http://localhost:8080")
})
})
},
}
this comes from this link.
I don't think it's a right and "clean" approach. How do I implant in a less "invasive" way?

What's the proper way for returning a response using Formidable on Nextjs Api?

I'm sending an uploaded file to a Next.js API route using FormData. The file is then processed on the API route using formidable and passed to sanity client in order to upload the asset, but I can't return the data to the client... I get this message in console:
API resolved without sending a response for /api/posts/uploadImage, this may result in stalled requests.
When console logging the document inside the API everything is in there, I just can't send back that response to client side. Here's my client upload function:
const addPostImage = (e) => {
const selectedFile = e.target.files[0];
if (
selectedFile.type === "image/jpeg" ||
selectedFile.type === "image/png" ||
selectedFile.type === "image/svg" ||
selectedFile.type === "image/gif" ||
selectedFile.type === "image/tiff"
) {
const form = new FormData();
form.append("uploadedFile", selectedFile);
axios
.post("/api/posts/uploadImage", form, {
headers: { "Content-Type": "multipart/form-data" },
})
.then((image) => {
setPostImage(image);
toast.success("Image uploaded!");
})
.catch((error) => {
toast.error(`Error uploading image ${error.message}`);
});
} else {
setWrongImageType(true);
}
};
This is my API:
import { client } from "../../../client/client";
import formidable from "formidable";
import { createReadStream } from "fs";
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const form = new formidable.IncomingForm();
form.keepExtensions = true;
form.parse(req, async (err, fields, files) => {
const file = files.uploadedFile;
const document = await client.assets.upload(
"image",
createReadStream(file.filepath),
{
contentType: file.mimetype,
filename: file.originalFilename,
}
);
console.log(document);
res.status(200).json(document);
});
};
Solution:
As stated in the comments by #juliomalves, I had to promisify the form parsing function and await its results like so:
import { client } from "../../../client/client";
import formidable from "formidable";
import { createReadStream } from "fs";
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const form = new formidable.IncomingForm();
form.keepExtensions = true;
const formPromise = await new Promise((resolve, reject) => {
form.parse(req, async (err, fields, files) => {
if (err) reject(err);
const file = files.uploadedFile;
const document = await client.assets.upload(
"image",
createReadStream(file.filepath),
{
contentType: file.mimetype,
filename: file.originalFilename,
}
);
resolve(document);
});
});
res.json(formPromise);
};
Then I checked for the response's status on the client-side.
Your code is not working because by default formidable saves files to disk, which is not available on vercel. This works.
const chunks = []
let buffer;
const form = formidable({
fileWriteStreamHandler: (/* file */) => {
const writable = new Writable();
// eslint-disable-next-line no-underscore-dangle
writable._write = (chunk, enc, next) => {
chunks.push(chunk);
next();
};
return writable;
},
})
form.parse(req, (err, fields) => {
if (err) {
res.end(String(err));
return;
}
buffer = Buffer.concat(chunks);
res.end();
});

Batch Geocode using Axios

Testing the HERE Batch GeoCode life-cycle through node application. We have similar working with Azure Mappings but they are crazy expensive.
Seems as if the initial post request is succeeding. But is stuck on "submitted" status during status check. And failing during result check with 404. Using axius to make the queries - with the documented examples.
const getStatus = async requestId => {
const url = statusURL(requestId);
const res = await axios.get(url);
const response = res.data.Response;
return response;
};
const getResult = async requestId => {
const url = resultURL(requestId);
const config = { headers: { 'Content-type': 'text/plain' } };
const res = await axios.get(url, config);
const response = res.data.Response;
return response;
};
const requestGeo = async input => {
const url = requestURL;
const res = await axios.post(url, input, {
headers: { 'Content-type': 'text/plain' },
});
const requestId = res.data.Response.MetaInfo.RequestId;
return requestId;
};
getStatus(requestId)
.then(res => {
console.log(res);
})
.catch(e => {
console.log(e);
});
const input = `recId|street|city|postalCode|country
1|425 Randolph St|Chicago||USA
2|31 St James Ave|Boston|02116|USA
3|Invalidenstrasse 117|Berlin|10115|DEU`;
requestGeo(input)
.then(console.log)
.catch(e => {
console.log(e);
});
If you don't specify the "&action=run" parameter in your initial request, then the job is being checked, stored and set as "submitted". This does not mean that it will be executed.
Alternatively you can send an "action=start"-request to start the job.
Having applied one of these two options, the job will be scheduled for execution and flagged as "accepted".

TypeError: Cannot read property 'file' of null busboy in firebase cloud function

I'm trying to write a simple function which takes a request containing multipart/form data and uploads the image to the firebase cloud storage using busboy, but in the cloud functions console it throw the following error "TypeError: Cannot read property 'file' of null". I have checked if the postman sends the correct header 'Content-type: multipart/form-data' and everything seems to be ok. Any help will be much appreciated.
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
admin.initializeApp();
const os = require('os');
const path = require('path');
const fs = require('fs');
const Busboy = require('busboy');
var { Storage } = require('#google-cloud/storage');
var gcs = new Storage({
projectId: 'xxxxxxxxxxxxxxx',
keyFilename: 'xxxxxxxxk-y7fqf-38xxx5121.json'
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
res.status(500).json({
message: "Not allowed"
});
}
console.log(req.body)
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on('file', function (fieldname, file, filename, encoding,
mimetype) {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("xxxxxxxxxxx.appspot.com");
// const bucket = admin.storage().bucket();
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
req.pipe(busboy)
});
});`

How can I upload a uint8 pixel data to Storage on Firebase Functions?

bucket.upload only accepts file directories and it seems that in functions the method put can not be used. With put, I think I could be able to upload with new Uint8Array(data). But it does not work with upload method.
exports.modificarImagen = functions.storage.object().onChange(event => {
const THUMB_PREFIX = 'thumb_';
const object = event.data;
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const resourceState = object.resourceState;
const metageneration = object.metageneration;
const SIZES = [64];
const bucket = gcs.bucket(fileBucket);
const fileName = filePath.split('/').pop();
const tempIconoPath = path.join(os.tmpdir(), 'icono-amarillo.png');
const tempPerfilPath = path.join(os.tmpdir(), 'perfil64.jpg');
return bucket.file('images/icono-amarillo.png').download({
destination: tempIconoPath
}).then(() => {
bucket.file('images/perfil64.jpg').download({
destination: tempPerfilPath
}).then(() => {
_.each(SIZES, (size) => {
let newFileName = 'nueva_imagen.png';
let newFileTemp = path.join(os.tmpdir(), newFileName);
let newFilePath = `images/${newFileName};`
sharp(tempIconoPath)
.flatten()
.background('#ff6600')
.overlayWith(tempPerfilPath, { gravity: sharp.gravity.southeast } )
.sharpen()
.withMetadata()
.raw()
.toBuffer().then(function(outputBuffer) {
//here is the problem. outputBuffer is a raw file uint8array and
// storage only allows file path.
bucket.upload(outputBuffer, {
destination: newFilePath
}).then(() => { console.log("do another thing"); });
});
})//each
})
})
})
You can use Google Cloud Storage and Sharp along with streams. The following should work:
function transform(fileBucket, tempPerfilPath, filePath, newFilePath) {
const thumbnailUploadStream = fileBucket.file(newFilePath).createWriteStream();
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.flatten()
.background('#ff6600')
.overlayWith(tempPerfilPath, { gravity: sharp.gravity.southeast } )
.sharpen()
.withMetadata()
.raw()
.pipe(thumbnailUploadStream);
fileBucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
return console.log('Image created and uploaded successfully');
});
}

Resources