Firebase Functions get files from storage - firebase

I have to send a file to an API, therefor I have to use fs.readFileSync(). After uploading the picture to the storage, I am calling my function to execute the API call. But I cannot get the file from the storage. This is a section of the code, which always gets null in the result. I tried also to .getFiles() without a parameter and then I got all files but I dont want to filter them by iteration.
exports.stripe_uploadIDs = functions.https //.region("europe-west1")
.onCall((data, context) => {
const authID = context.auth.uid;
console.log("request is authentificated? :" + authID);
if (!authID) {
throw new functions.https.HttpsError("not authorized", "not authorized");
}
let accountID;
let result_fileUpload;
let tempFile = path.join(os.tmpdir(), "id_front.jpg");
const options_id_front_jpeg = {
prefix: "/user/" + authID + "/id_front.jpg"
};
const storageRef = admin
.storage()
.bucket()
.getFiles(options_id_front)
.then(results => {
console.log("JPG" + JSON.stringify(results));
// need to write this file to tempFile
return results;
});
const paymentRef = storageRef.then(() => {
return admin
.database()
.ref("Payment/" + authID)
.child("accountID")
.once("value");
});
const setAccountID = paymentRef.then(snap => {
accountID = snap.val();
return accountID;
});
const fileUpload = setAccountID.then(() => {
return Stripe.fileUploads.create(
{
purpose: "identity_document",
file: {
data: tempFile, // Documentation says I should use fs.readFileSync("filepath")
name: "id_front.jpg",
type: "application/octet-stream"
}
},
{ stripe_account: accountID }
);
});
const fileResult = fileUpload.then(result => {
result_fileUpload = result;
console.log(JSON.stringify(result_fileUpload));
return result_fileUpload;
});
return fileResult;
});
Result is:
JPG[[]]

You need to download your file from a bucket to your local function context env.
After your Firebase function start executing you can call the below:
More or less the below should work, just tweak to your needs. Call this within you .onCall context, you get the idea
import admin from 'firebase-admin';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
admin.initializeApp();
const { log } = console;
async function tempFile(fileBucket: string, filePath: string) {
const bucket = admin.storage().bucket(fileBucket);
const fileName = 'MyFile.ext';
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: 'DONT_FORGET_CONTEN_TYPE'
};
// Donwload the file to a local temp file
// Do whatever you need with it
await bucket.file(filePath).download({ destination: tempFilePath });
log('File downloaded to', tempFilePath);
// After you done and if you need to upload the modified file back to your
// bucket then uploaded
// This is optional
await bucket.upload(tempFilePath, {
destination: filePath,
metadata: metadata
});
//free up disk space by realseasing the file.
// Otherwise you might be charged extra for keeping memory space
return fs.unlinkSync(tempFilePath);
}

Related

how to Upload a file from ue4 to Js server, using multer

I'm currently making a project that requires me to send a png image from unreal engine to a next JS server which uses multer to pass the file on to another server.
When sending my file as a binary the JS server (intermediate server) is not receiving a file from unreal.
I've tried the two following methods
TArray<uint8> rawFileData;
FFileHelper::LoadFileToArray(rawFileData, *media);
Request->SetURL(API_HP_URL + "nude_upload");
Request->SetHeader(TEXT("Content-Type"), TEXT("multipart/form-data; boundary=----WebKitFormBoundarywpp9S2IUDici8hpI"));
Request->SetHeader(TEXT("Connection"), TEXT("keep-alive"));
Request->SetHeader(TEXT("accept"), TEXT("application/json, text/plain, */*"));
Request->SetContent(rawFileData);
Request->SetVerb("POST");
Request->OnProcessRequestComplete().BindUObject(this, &AHttpCommunicator::OnPostNudeSSResponse);
Request->ProcessRequest();
and
FString JsonString;
TArray<uint8> rawFileData;
TSharedRef<TJsonWriter<TCHAR>> JsonWriter = JsonWriterFactory<TCHAR>::Create(&JsonString);
JsonWriter->WriteObjectStart();
JsonWriter->WriteValue("fileName", pPathToFile);
JsonWriter->WriteValue("file", FBase64::Encode(rawFileData));
JsonWriter->WriteObjectEnd();
JsonWriter->Close();
Request->SetURL(API_HP_URL + "nude_upload");
Request->SetHeader(TEXT("Content-Type"), TEXT("multipart/form-data; boundary=----WebKitFormBoundarywpp9S2IUDici8hpI"));
Request->SetHeader(TEXT("Connection"), TEXT("keep-alive"));
Request->SetHeader(TEXT("accept"), TEXT("application/json, text/plain, */*"));
Request->SetContentAsString(JsonString);
Request->SetVerb("POST");
Request->OnProcessRequestComplete().BindUObject(this, &AHttpCommunicator::OnPostNudeSSResponse);
Request->ProcessRequest();
both of these methods have the server return an undefined file obj
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import path from 'path';
import MulterGoogleCloudStorage from "multer-google-storage";
import nextConnect from 'next-connect';
const Multer = require('multer');
const { Storage } = require('#google-cloud/storage');
const CLOUD_BUCKET = 'nude_locks';
const PROJECT_ID = 'hp-production-338902';
const KEY_FILE = path.resolve('./hp-production-key.json')
const storage = new Storage({
projectId: PROJECT_ID,
keyFilename: KEY_FILE
});
const bucket = storage.bucket(CLOUD_BUCKET);
const upload = Multer({
storage: Multer.memoryStorage(),
limits: {
fileSize: 5 * 1024 * 1024,
}
}).single('file');
const apiRoute = nextConnect({
onNoMatch(req, res) {
res.status(405).json({ error: `Method '${req.method}' Not Allowed` });
},
});
apiRoute.use(upload);
apiRoute.post((req, res) => {
console.log(req.file);
if (!req.file) {
res.status(400).send("No file uploaded.");
return;
}
const blob = bucket.file(req.file.originalname);
// Make sure to set the contentType metadata for the browser to be able
// to render the image instead of downloading the file (default behavior)
const blobStream = blob.createWriteStream({
metadata: {
contentType: req.file.mimetype
}
});
blobStream.on("error", err => {
next(err);
return;
});
blobStream.on("finish", () => {
console.log('finish');
console.log(blob);
// The public URL can be used to directly access the file via HTTP.
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
// Make the image public to the web (since we'll be displaying it in browser)
blob.makePublic().then(() => {
res.status(200).send(`Success!\n Image uploaded to ${publicUrl}`);
});
});
blobStream.end(req.file.buffer);
});
export default apiRoute;
export const config = {
api: {
bodyParser: false,
},
}
const fileSelectedHandler = e => {
const file = new File("D:/_Spectre/VHS/P210107_VHS_Configurator/P04_Unreal/Human_Configurator/Saved/Screenshots/Windows/-1-nude-2022-2-13.png");
console.log(file);
const formData = new FormData();
formData.append('file', file);
axios.post('/api/nude_upload', formData, {
headers: {
'Content-Type': 'multipart/form-data',
}
})
.then(res => {
console.log(res);
});
}
Is there a way to create a file object from UE4?
alternatively is there a way to retrieve google cloud storage access tokens from UE4

FormData using BusBoy for firebase works in serve but not in deploy

Situation
I have a firebase function that updates the user image.
Problem
When I run locally my function using firebase serve, I successfully upload the image to firestore using Postman. However, when I run firebase deploy and I try to upload the image using Postman, I get a 500 Internal Server Error. The other functions (not dealing with FormData, just json) work perfectly when I deploy them.
I don't understand why it works locally, but not on deploy when I am doing the exact same thing. Not sure if this is something in the config I am missing, or if I am doing something wrong. Any help would be appreciated!
Code
users.js
const { admin, db, firebase } = require('../util/admin');
const config = require('../util/config');
exports.postUserImage = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
let imgFileName;
let imgToBeUploaded = {};
const busboy = new BusBoy({ headers: req.headers });
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Invalid file type
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res.status(400).json({ error: 'Invalid file type' });
}
// Extract img extension
const imgDotLength = filename.split('.').length;
const imgExtension = filename.split('.')[imgDotLength - 1];
// Create img file name
imgFileName = `${Math.round(Math.random() * 1000000)}.${imgExtension}`;
// Create img path
const filepath = path.join(os.tmpdir(), imgFileName);
// Create img object to be uploaded
imgToBeUploaded = { filepath, mimetype };
// Use file system to create the file
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(imgToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imgToBeUploaded.mimetype
}
}
})
.then(() => {
// Create img url to add to our user
const imgUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imgFileName}?alt=media`;
// Add img url to user document
return db.doc(`/users/${req.user.handle}`).update({ imgUrl });
})
.then(() => {
return res.json({ message: 'Image uploaded succesfully' });
})
.catch((err) => {
console.error(err);
return res.status(500).json({ error });
});
});
busboy.end(req.rawBody);
};
index.js
const { app, functions } = require('./util/admin');
const FirebaseAuth = require('./util/firebaseAuth');
const {
postUserImage,
} = require('./handlers/users');
app.post('/user/image', FirebaseAuth, postUserImage);

Firebase storage upload string with Cloud function

I would like to upload a string of text and have that string uploaded to Cloud storage. I've build it in plain JS, but having issues hacking it into a cloud function.
function download(exportObj){
var databuk = gcs.bucket('******.appspot.com');
// var bucket = admin.storage().bucket();
//var tocfileloc = storageRef.child('toctest.json');
// const name = "toctest.json";
// const bucketdes = bucket.name;
var dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(exportObj));
databuk.putString(dataStr, 'data_url').then(snapshot => {
console.log('Uploaded a data_url string!');
return true;
}).catch(err=>{
console.log("error",err);
})
}
I have some code above! The string is "exportObj"
You'll want to use the Admin SDK for this. It'll be something along the lines of:
const admin = require('firebase-admin');
admin.initializeApp();
// ... then later, in your function
const file = admin.storage().bucket().file('path/to/your/file.txt');
return file.save('This will get stored in my storage bucket.', {
gzip: true,
contentType: 'text/plain'
}).then(() => {
console.log('all done!');
});
The specific "save" method is documented here.

Firebase Storage - Is there any way to resize the original Image using cloud functions without renaming the file, without infinite loop in trigger

I have analyzed the thumbnail creation code using firebase storage trigger.
I need to resize the current image without renaming the file and it should not cause infinite loop.
Basically, whenever I am uploading the image to firebase storage, it should be resized to specific size, but, other properties like download url, name should not be changed.
Below is the code for generating thumbnail. But, I need to make it to resize the current image. Please help.
Here, if the filename is having thumb_, then only the infinite loop will stop, I need to stop it using other properties like meta data or anything
exports.generateThumbnail = functions.storage.object().onChange((event) => {
const object = event.data;
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const resourceState = object.resourceState;
const metageneration = object.metageneration;
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
const fileName = path.basename(filePath);
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return null;
//Here, if the filename is having thumb_, then only the infinite loop will stop, I need to stop it using other properties like meta data or anything else
}
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return null;
}
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return null;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
return bucket.file(filePath).download({
destination: tempFilePath,
}).then(() => {
console.log('Image downloaded locally to', tempFilePath);
return spawn('convert', [tempFilePath, '-thumbnail', '200x200>', tempFilePath]);
}).then(() => {
console.log('Thumbnail created at', tempFilePath);
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
return bucket.upload(tempFilePath, {
destination: thumbFilePath,
metadata: metadata,
});
}).then(() => fs.unlinkSync(tempFilePath));
});
Pass custom metadata to the upload function:
return bucket.upload(tempFilePath, {
destination: filePath,
metadata: {
contentType,
metadata: {
isThumb: 'true',
}
},
})
When you replace the file, the cloud function will be triggered again. To break the loop check the custom metadata:
/**
* File metadata.
*/
const meta = object.metadata;
/**
* Exit if the image is already a thumbnail.
*/
if (meta && meta.isThumb == 'true') {
console.log('Already a Thumbnail.');
return null;
}

Get Firebase storage Object on a database-triggered cloud function

I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})

Resources