FormData using BusBoy for firebase works in serve but not in deploy - firebase

Situation
I have a firebase function that updates the user image.
Problem
When I run locally my function using firebase serve, I successfully upload the image to firestore using Postman. However, when I run firebase deploy and I try to upload the image using Postman, I get a 500 Internal Server Error. The other functions (not dealing with FormData, just json) work perfectly when I deploy them.
I don't understand why it works locally, but not on deploy when I am doing the exact same thing. Not sure if this is something in the config I am missing, or if I am doing something wrong. Any help would be appreciated!
Code
users.js
const { admin, db, firebase } = require('../util/admin');
const config = require('../util/config');
exports.postUserImage = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
let imgFileName;
let imgToBeUploaded = {};
const busboy = new BusBoy({ headers: req.headers });
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Invalid file type
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res.status(400).json({ error: 'Invalid file type' });
}
// Extract img extension
const imgDotLength = filename.split('.').length;
const imgExtension = filename.split('.')[imgDotLength - 1];
// Create img file name
imgFileName = `${Math.round(Math.random() * 1000000)}.${imgExtension}`;
// Create img path
const filepath = path.join(os.tmpdir(), imgFileName);
// Create img object to be uploaded
imgToBeUploaded = { filepath, mimetype };
// Use file system to create the file
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(imgToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imgToBeUploaded.mimetype
}
}
})
.then(() => {
// Create img url to add to our user
const imgUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imgFileName}?alt=media`;
// Add img url to user document
return db.doc(`/users/${req.user.handle}`).update({ imgUrl });
})
.then(() => {
return res.json({ message: 'Image uploaded succesfully' });
})
.catch((err) => {
console.error(err);
return res.status(500).json({ error });
});
});
busboy.end(req.rawBody);
};
index.js
const { app, functions } = require('./util/admin');
const FirebaseAuth = require('./util/firebaseAuth');
const {
postUserImage,
} = require('./handlers/users');
app.post('/user/image', FirebaseAuth, postUserImage);

Related

how to Upload a file from ue4 to Js server, using multer

I'm currently making a project that requires me to send a png image from unreal engine to a next JS server which uses multer to pass the file on to another server.
When sending my file as a binary the JS server (intermediate server) is not receiving a file from unreal.
I've tried the two following methods
TArray<uint8> rawFileData;
FFileHelper::LoadFileToArray(rawFileData, *media);
Request->SetURL(API_HP_URL + "nude_upload");
Request->SetHeader(TEXT("Content-Type"), TEXT("multipart/form-data; boundary=----WebKitFormBoundarywpp9S2IUDici8hpI"));
Request->SetHeader(TEXT("Connection"), TEXT("keep-alive"));
Request->SetHeader(TEXT("accept"), TEXT("application/json, text/plain, */*"));
Request->SetContent(rawFileData);
Request->SetVerb("POST");
Request->OnProcessRequestComplete().BindUObject(this, &AHttpCommunicator::OnPostNudeSSResponse);
Request->ProcessRequest();
and
FString JsonString;
TArray<uint8> rawFileData;
TSharedRef<TJsonWriter<TCHAR>> JsonWriter = JsonWriterFactory<TCHAR>::Create(&JsonString);
JsonWriter->WriteObjectStart();
JsonWriter->WriteValue("fileName", pPathToFile);
JsonWriter->WriteValue("file", FBase64::Encode(rawFileData));
JsonWriter->WriteObjectEnd();
JsonWriter->Close();
Request->SetURL(API_HP_URL + "nude_upload");
Request->SetHeader(TEXT("Content-Type"), TEXT("multipart/form-data; boundary=----WebKitFormBoundarywpp9S2IUDici8hpI"));
Request->SetHeader(TEXT("Connection"), TEXT("keep-alive"));
Request->SetHeader(TEXT("accept"), TEXT("application/json, text/plain, */*"));
Request->SetContentAsString(JsonString);
Request->SetVerb("POST");
Request->OnProcessRequestComplete().BindUObject(this, &AHttpCommunicator::OnPostNudeSSResponse);
Request->ProcessRequest();
both of these methods have the server return an undefined file obj
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import path from 'path';
import MulterGoogleCloudStorage from "multer-google-storage";
import nextConnect from 'next-connect';
const Multer = require('multer');
const { Storage } = require('#google-cloud/storage');
const CLOUD_BUCKET = 'nude_locks';
const PROJECT_ID = 'hp-production-338902';
const KEY_FILE = path.resolve('./hp-production-key.json')
const storage = new Storage({
projectId: PROJECT_ID,
keyFilename: KEY_FILE
});
const bucket = storage.bucket(CLOUD_BUCKET);
const upload = Multer({
storage: Multer.memoryStorage(),
limits: {
fileSize: 5 * 1024 * 1024,
}
}).single('file');
const apiRoute = nextConnect({
onNoMatch(req, res) {
res.status(405).json({ error: `Method '${req.method}' Not Allowed` });
},
});
apiRoute.use(upload);
apiRoute.post((req, res) => {
console.log(req.file);
if (!req.file) {
res.status(400).send("No file uploaded.");
return;
}
const blob = bucket.file(req.file.originalname);
// Make sure to set the contentType metadata for the browser to be able
// to render the image instead of downloading the file (default behavior)
const blobStream = blob.createWriteStream({
metadata: {
contentType: req.file.mimetype
}
});
blobStream.on("error", err => {
next(err);
return;
});
blobStream.on("finish", () => {
console.log('finish');
console.log(blob);
// The public URL can be used to directly access the file via HTTP.
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
// Make the image public to the web (since we'll be displaying it in browser)
blob.makePublic().then(() => {
res.status(200).send(`Success!\n Image uploaded to ${publicUrl}`);
});
});
blobStream.end(req.file.buffer);
});
export default apiRoute;
export const config = {
api: {
bodyParser: false,
},
}
const fileSelectedHandler = e => {
const file = new File("D:/_Spectre/VHS/P210107_VHS_Configurator/P04_Unreal/Human_Configurator/Saved/Screenshots/Windows/-1-nude-2022-2-13.png");
console.log(file);
const formData = new FormData();
formData.append('file', file);
axios.post('/api/nude_upload', formData, {
headers: {
'Content-Type': 'multipart/form-data',
}
})
.then(res => {
console.log(res);
});
}
Is there a way to create a file object from UE4?
alternatively is there a way to retrieve google cloud storage access tokens from UE4

Saving a buffer in Google Cloud Bucket

I'm trying to find a solution that will let me stream in-memory created zip to Google Cloud Bucket (I'm using Firebase, but seems like it's beyond it so I need to handle it through GCB).
I have nailed down file creation part (code below) and when it's working locally on my machine it saves it in the main folder where server files reside. So far so good.
Now I found this link that lets stream transfers, but not sure how to connect them. Should it be after zip is created? Instead? Any suggestions are welcome!
const express = require('express')
var router = express.Router()
var archiver = require('archiver')
var admin = require("firebase-admin");
var serviceAccount = require("../servicekey.json")
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://myName.firebaseio.com",
storageBucket: "myName.appspot.com"
})
var bucket = admin.storage().bucket()
const {
convertCSVtoJSON,
generateDocuments,
generateDocx,
isCorrectTemplateFileType
} = require('./generateServices')
router.post('/', async (req, res) => {
try {
if(!isCorrectTemplateFileType(req.files.template))
return res.status(403).send({
message: 'Wrong file type. Please provide .docx file.'
})
const template = req.files.template.data
const data = await convertCSVtoJSON(req.files.data1)
let zip = archiver('zip')
zip.on('warning', function(err) {
console.log(err)
});
zip.on('error', function(err) {
res.status(500).send({error: err.message})
});
zip.on('entry', function(ars) {
// console.log(ars)
});
zip.on('end', function() {
console.log('Archive wrote %d bytes', zip.pointer())
});
// res.attachment('archive-name.zip')
// zip.pipe(output)
// zip.pipe(res)
data.forEach((docData, index) => {
let buff = generateDocx(template, docData, 'title')
zip.append(buff, { name: `${index}.docx` })
})
zip.finalize()
console.log(zip)
const file = bucket.file("pliki.zip") // nazwa do zmiany
file.save(zip, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
res.sendStatus(201)
} catch (error) {
console.log(error)
res.send(error)
}
})
module.exports = router

Unable to upload image to firebase storage with firebase functions

here is my code:-
exports.uploadImage = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers });
let imageFileName;
let imageToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const imageExtension = filename.split('.')[filename.split('.').length - 1];
imageFileName = `${Math.round(Math.random() * 100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('finish', () => {
console.log('Busboy on started');
//code breaks here
admin.storage().bucket().upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`;
console.log('logging image url' + imageUrl);
return db.doc(`/users/${req.user.handle}`).update({ imageUrl })
})
.then(() => {
return res.json({ message: 'Image uploaded successfully' });
})
.catch(err => {
console.error(err);
return res.status(500).json({ error: err.code });
})
});
busboy.end(req.rawBody);
}
I have mentioned where my code is breaking in a comment and the error I am getting is Error: Cannot parse response as JSON: Not Found
message: 'Cannot parse response as JSON: Not Found'
The error message says cannot parse response as JSON. Does that mean the response from firebase is not JSON? I have a token in the header of the request and an image in the body as form-data. I really have not clue what wrong, please help
I unfortunately can't identify the JSON parsing error, so I've instead rewritten the code to be more streamlined as #robsiemb eluded to.
Your uploadImage function appears to be configured as some middleware, so I have done the same below. This code will stream the uploaded data straight to Cloud Storage under a unique file name as generated from Reference.push().key to prevent conflicts.
In the code below,
The uploaded file will be stored at a location similar to: userData/someUserId/images/-JhLeOlGIEjaIOFHR0xd.png
The image's raw URL is not stored in the database because unless the file object or containing bucket is made public it will require a signed URL which can only last up to 7 days (see below).
More than one file can be accepted and uploaded. If this is undesired, configure the limits for the BusBoy instance.
Basic error handling for non-POST requests and missing file entries was added.
// import Firebase libraries & initialize
const admin = require('firebase-admin');
admin.initializeApp(); // initializes from environment variables
// import required modules
const BusBoy = require('busboy');
exports.uploadImage = (req, res) => {
if (req.method !== 'POST') {
res.sendStatus(405); // 405 METHOD_NOT_ALLOWED
return;
}
let busboy = new BusBoy({headers: req.headers}); // add {limits: {files: 1}} to limit to only a single file upload
let bucket = admin.storage().bucket();
let db = admin.firestore();
let storageFilepath;
let storageFile;
// Note: Currently only the last file is saved to `/users/${req.user.handle}`
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
let fileext = filename.match(/\.[0-9a-z]+$/i)[0];
storageFilepath = `userData/${req.user.handle}/images/` + getUniqueName() + fileext;
storageFile = bucket.file(storageFilepath);
file.pipe(storageFile.createWriteStream({ gzip: true }));
})
.on('finish', () => {
if (!storageFile) {
res.status(400).json({error: 'expected file'}); // 400 BAD_REQUEST
return;
}
db.doc(`/users/${req.user.handle}`).update({ imagePath: storageFilepath })
.then(() => {
res.status(201).json({ message: 'Image uploaded successfully' }); // 201 CREATED
})
.catch((err) => {
console.error(err);
res.status(500).json({ error: err.code }); // 500 INTERNAL_SERVER_ERROR
});
})
.on('error', (err) => {
console.error(err);
res.status(500).json({ error: err.code });
});
req.pipe(busboy);
});
function getUniqueName() {
// push() without arguments returns a ThennableReference, which we'll abuse for it's key generation
return admin.database().ref().push().key;
}
If you did want the uploaded image to be publicly accessible, you could use the following .on('finish', ...) handler that adds in the File.makePublic() function:
.on('finish', () => {
if (!storageFile) {
res.status(400).json({error: 'expected file'}); // 400 BAD_REQUEST
return;
}
storageFile.makePublic()
.then(() => {
return db.doc(`/users/${req.user.handle}`).update({
imagePath: storageFilepath,
imageUrl: `https://storage.googleapis.com/${config.storageBucket}/${storageFilepath}`
});
})
.then(() => {
res.status(201).json({ message: 'Image uploaded successfully' }); // 201 CREATED
})
.catch((err) => {
console.error(err);
res.status(500).json({ error: err.code }); // 500 INTERNAL_SERVER_ERROR
});
})
Found a solution to the issue!
Essentially - you need to set up your Google Application Credentials. Go into firebase and look into your settings. You need to set up the environment variable GOOGLE_APPLICATION_CREDENTIALS so that firebase has your credentials when you access these files.
https://firebase.google.com/docs/admin/setup?authuser=1 for more information.
After you've done that, check the security settings in firebase, in every area you're dealing with. This should solve the problem (it's definitely a security issue and not your code).
This was the tutorial in question as well for those looking on . https://www.youtube.com/watch?v=m_u6P5k0vP0&t=7661s .
In my case it was wrong bucket Id configured - after correcting that i was able to upload file

TypeError: Cannot read property 'file' of null busboy in firebase cloud function

I'm trying to write a simple function which takes a request containing multipart/form data and uploads the image to the firebase cloud storage using busboy, but in the cloud functions console it throw the following error "TypeError: Cannot read property 'file' of null". I have checked if the postman sends the correct header 'Content-type: multipart/form-data' and everything seems to be ok. Any help will be much appreciated.
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
admin.initializeApp();
const os = require('os');
const path = require('path');
const fs = require('fs');
const Busboy = require('busboy');
var { Storage } = require('#google-cloud/storage');
var gcs = new Storage({
projectId: 'xxxxxxxxxxxxxxx',
keyFilename: 'xxxxxxxxk-y7fqf-38xxx5121.json'
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
res.status(500).json({
message: "Not allowed"
});
}
console.log(req.body)
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on('file', function (fieldname, file, filename, encoding,
mimetype) {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("xxxxxxxxxxx.appspot.com");
// const bucket = admin.storage().bucket();
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
req.pipe(busboy)
});
});`

database triggers firebase function to download images from URL and save it to storage

I want to download the image and save it to storage when my database is updated with the 'photo_url' field
exports.saveToStorage = functions.database.ref(`/images/${itemImageRef}`)
.onWrite(event => {
const filePath = event.data.val();
const filename = filePath.split('/').pop();
var download = request.get(filePath).on('error', (err) => {
console.log(err)
})
.pipe(fs.createWriteStream(filename));
download.on('finish', () => {
const bucket = gcs.bucket('id.appspot.com');
const storagePath = `images/${filename}`;
return bucket.upload(download, { destination: storagePath })
.then(() => {
console.log('success upload');
});
});
});
it logs "Error: EROFS: read-only file system, open 'image.jpg' at Error (native)." I suppose I cannot retrieve the file saved by createWriteStream?
So how should I download images from the web?
with the post suggested by #Jobsamuel, the code now works:
exports.saveToStorage = functions.database.ref(`/images/${itemImageRef}`)
.onWrite(event => {
const filePath = event.data.val();
const filename = filePath.split('/').pop();
const bucket = gcs.bucket('id.appspot.com');
const remoteWriteStream = bucket.file(filename).createWriteStream({
metadata: { contentType: 'image/jpeg' }
});
request(filePath).pipe(remoteWriteStream)
.on('error', (err) => console.log(err))
.on('finish', () => console.log('success save image'));
});
By pipe the request result directly to the bucket, it solves the problem by skipping the step writing to a local file, which I suspect is the reason my original code fails. Also, don't forget to set contentType for images.

Resources