I'm trying to find a solution that will let me stream in-memory created zip to Google Cloud Bucket (I'm using Firebase, but seems like it's beyond it so I need to handle it through GCB).
I have nailed down file creation part (code below) and when it's working locally on my machine it saves it in the main folder where server files reside. So far so good.
Now I found this link that lets stream transfers, but not sure how to connect them. Should it be after zip is created? Instead? Any suggestions are welcome!
const express = require('express')
var router = express.Router()
var archiver = require('archiver')
var admin = require("firebase-admin");
var serviceAccount = require("../servicekey.json")
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://myName.firebaseio.com",
storageBucket: "myName.appspot.com"
})
var bucket = admin.storage().bucket()
const {
convertCSVtoJSON,
generateDocuments,
generateDocx,
isCorrectTemplateFileType
} = require('./generateServices')
router.post('/', async (req, res) => {
try {
if(!isCorrectTemplateFileType(req.files.template))
return res.status(403).send({
message: 'Wrong file type. Please provide .docx file.'
})
const template = req.files.template.data
const data = await convertCSVtoJSON(req.files.data1)
let zip = archiver('zip')
zip.on('warning', function(err) {
console.log(err)
});
zip.on('error', function(err) {
res.status(500).send({error: err.message})
});
zip.on('entry', function(ars) {
// console.log(ars)
});
zip.on('end', function() {
console.log('Archive wrote %d bytes', zip.pointer())
});
// res.attachment('archive-name.zip')
// zip.pipe(output)
// zip.pipe(res)
data.forEach((docData, index) => {
let buff = generateDocx(template, docData, 'title')
zip.append(buff, { name: `${index}.docx` })
})
zip.finalize()
console.log(zip)
const file = bucket.file("pliki.zip") // nazwa do zmiany
file.save(zip, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
res.sendStatus(201)
} catch (error) {
console.log(error)
res.send(error)
}
})
module.exports = router
Related
I want to use next.js routes api as a backend service and serve database result in json format. I see, there is no way to keep database up and running since all files located at pages/api/ it's ephemeral
Below it's my code
import { models } from "../models/index"
export default async function handler(req, res) {
const User = models.User
try {
const result = await User.findAll()
return res.json({ result })
} catch (err) {
console.error("Error occured ", err)
return res.json({ result: [] })
}
}
anyone who has encountered this problem?
The only possible way that I found is to use node js server and attach database model to request object. By doing this we pass database conection/models through routes api
my node.js server
const express = require("express")
const { sequelize } = require("./models/index")
const next = require("next")
const dev = process.env.NODE_ENV !== "production"
const app = next({ dev })
const handle = app.getRequestHandler()
const appExpress = express()
app.prepare().then(() => {
appExpress.use(express.json())
appExpress.get("*", (req, res) => {
req.db = sequelize
handle(req, res)
})
appExpress.listen(5000, () => console.log("> Ready on http://localhost:5000"))
}).catch((ex) => {
console.error(ex.stack)
process.exit(1)
})
my routes api file changed to
export default async function handler(req, res) {
const User = req.db.models.User
try {
const result = await User.findAll()
return res.json({ result })
} catch (err) {
console.error("Error occured ", err)
return res.json({ result: [] })
}
}
with these changes the database is always up and running and used from all routes api files.
I tested and work like charm
Situation
I have a firebase function that updates the user image.
Problem
When I run locally my function using firebase serve, I successfully upload the image to firestore using Postman. However, when I run firebase deploy and I try to upload the image using Postman, I get a 500 Internal Server Error. The other functions (not dealing with FormData, just json) work perfectly when I deploy them.
I don't understand why it works locally, but not on deploy when I am doing the exact same thing. Not sure if this is something in the config I am missing, or if I am doing something wrong. Any help would be appreciated!
Code
users.js
const { admin, db, firebase } = require('../util/admin');
const config = require('../util/config');
exports.postUserImage = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
let imgFileName;
let imgToBeUploaded = {};
const busboy = new BusBoy({ headers: req.headers });
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Invalid file type
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res.status(400).json({ error: 'Invalid file type' });
}
// Extract img extension
const imgDotLength = filename.split('.').length;
const imgExtension = filename.split('.')[imgDotLength - 1];
// Create img file name
imgFileName = `${Math.round(Math.random() * 1000000)}.${imgExtension}`;
// Create img path
const filepath = path.join(os.tmpdir(), imgFileName);
// Create img object to be uploaded
imgToBeUploaded = { filepath, mimetype };
// Use file system to create the file
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(imgToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imgToBeUploaded.mimetype
}
}
})
.then(() => {
// Create img url to add to our user
const imgUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imgFileName}?alt=media`;
// Add img url to user document
return db.doc(`/users/${req.user.handle}`).update({ imgUrl });
})
.then(() => {
return res.json({ message: 'Image uploaded succesfully' });
})
.catch((err) => {
console.error(err);
return res.status(500).json({ error });
});
});
busboy.end(req.rawBody);
};
index.js
const { app, functions } = require('./util/admin');
const FirebaseAuth = require('./util/firebaseAuth');
const {
postUserImage,
} = require('./handlers/users');
app.post('/user/image', FirebaseAuth, postUserImage);
I'm able to successfully generate a signedUrl for my thumbnails I am creating, but after about a week they no longer work...
I am not getting any errors or information as to why they are expiring, they just are.
I have been using firebase to develop my app and now all of a sudden I have to deal with all this google cloud storage permission and what not - really have no idea whats going on... It's got to be some sort of permission issue?
I have tried generating a new service account from the firebase console but no luck... I am tired of waiting weeks to see if they are going to expire again or not. I hope someone can guide me to a solution for this - it seems like its a problem for many people... We cant afford to go live and have gray thumbnails all over the app because they expire.
Here is how we are generating the signedUrl with firebase cloud functions:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [150, 256];
const bucketDir = dirname(filePath);
if (!filePath.startsWith('categories/')) {
console.log('This is not in the categories directory.');
return false;
}
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
if (err) {
console.error(err);
return;
}
if (size === 150) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else if (size === 256) {
return admin.database().ref('categories').child(fileName).child('thumb').set(url)
.then(() => {
admin.database().ref('categories').child(fileName).child('tempThumb').remove();
})
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})
After setting the expiration date to 03-17-2100, we don't expect this type of behaviour, but like I said I feel like its something to do with gcs permissions - I tried to contact them but after about a week I am still waiting for their response.
I appreciate all the feedback!
I've been having trouble viewing the image files I've uploaded to firebase and just noticed the issue is with the file type in firebase.
Two files in my firebase storage console. One uploaded from my IOS simulator (octet-stream) and the other uploaded directly into the console from the browser which uploads properly and is viewable.
Here are my select and upload functions:
_selectPhoto = async () => {
const status = await getPermission(Permissions.CAMERA_ROLL);
if (status) {
let imageName = "pic"
const result = await ImagePicker.launchImageLibraryAsync(options);
if (!result.cancelled) {
Animated.timing(this.animatedWidth, {
toValue: 600,
duration: 15000
}).start()
this.uploadImage(result.uri, imageName)
.then(() => {
this.props.navigation.navigate('Profile')
})
.catch((error) => {
Alert.alert('Must Sign In');
this.props.navigation.navigate('Login')
console.log(error);
})
}
}
};
uploadImage = async (uri, imageName) => {
const user = firebase.auth().currentUser;
const response = await fetch(uri);
const blob = await response.blob();
let storageRef = firebase.storage().ref().child(''images/'+user.displayName+'/'+imageName+'.jpg'');
const snapshot = await storageRef.put(blob);
blob.close();
snapshot.ref.getDownloadURL().then(function(downloadURL) {
console.log("File available at", downloadURL);
user.updateProfile({
photoURL: downloadURL.toString(),
}).then(function() {
console.log('saved photo')
}).catch(function(error) {
console.log('failed photo')
});
});
}
When I get the link in my console, it also has the media&token:
... .appspot.com/o/profile-pic.jpg?alt=media&token=56eb9c36-b5cd-4dbb-bec1-3ea5c3a74bdd
If I CMD+Click in VS Code I receive an error:
{
error: {
code: 400,
message: "Invalid HTTP method/URL pair."
}
}
So naturally, when I put that link in the browser it downloads a file with that name but says:
The file “pic.jpg” could not be opened.
It may be damaged or use a
file format that Preview doesn’t recognize.
Maybe it could be something with mediaTypes, but I'm not exactly sure how to use it.
mediaTypes : String -- Choose what type of media to pick. Usage:
ImagePicker.MediaTypeOptions., where is one of: Images,
Videos, All.
Thanks!
I've been fighting with this same issue for the past few days. I was finally able get images to upload and render as expected by following the Firebase Upload example in the Expo repo. I don't fully understand why it works, but it seems like Firebase doesn't like the blob that's generated by
const blob = await response.blob();
Try replacing the above with:
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function() {
resolve(xhr.response);
};
xhr.onerror = function(e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', uri, true);
xhr.send(null);
});
I want to download the image and save it to storage when my database is updated with the 'photo_url' field
exports.saveToStorage = functions.database.ref(`/images/${itemImageRef}`)
.onWrite(event => {
const filePath = event.data.val();
const filename = filePath.split('/').pop();
var download = request.get(filePath).on('error', (err) => {
console.log(err)
})
.pipe(fs.createWriteStream(filename));
download.on('finish', () => {
const bucket = gcs.bucket('id.appspot.com');
const storagePath = `images/${filename}`;
return bucket.upload(download, { destination: storagePath })
.then(() => {
console.log('success upload');
});
});
});
it logs "Error: EROFS: read-only file system, open 'image.jpg' at Error (native)." I suppose I cannot retrieve the file saved by createWriteStream?
So how should I download images from the web?
with the post suggested by #Jobsamuel, the code now works:
exports.saveToStorage = functions.database.ref(`/images/${itemImageRef}`)
.onWrite(event => {
const filePath = event.data.val();
const filename = filePath.split('/').pop();
const bucket = gcs.bucket('id.appspot.com');
const remoteWriteStream = bucket.file(filename).createWriteStream({
metadata: { contentType: 'image/jpeg' }
});
request(filePath).pipe(remoteWriteStream)
.on('error', (err) => console.log(err))
.on('finish', () => console.log('success save image'));
});
By pipe the request result directly to the bucket, it solves the problem by skipping the step writing to a local file, which I suspect is the reason my original code fails. Also, don't forget to set contentType for images.