Firebase storage - Download directory as ".zip" - firebase

If you use firebase storage, you could see it's not possible to download a folder (as zip) directly from firebase UI or using gcloud UI. So it can be hard to create backup of your firebase storage bucket, moreover if you use firestore you can export collections into firebase storage.

I created a nodejs script based on firebase-admin and jszip, with two args : first the download path in firebase storage and second is the path where stored the zip file
package.json:
{
"dependencies": {
"firebase-admin": "^9.6.0",
"jszip": "^3.6.0"
}
}
index.js:
const fs = require("fs");
const JSZip = require('jszip');
const admin = require('firebase-admin');
const serviceAccount = require("./service-account-key.json");
async function main (){
try{
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "stackd-56e96.appspot.com",
});
const bucket = admin.storage().bucket();
const src_storage_path = process.argv[2];
let dest_storage_path = process.argv[3];
if(dest_storage_path.includes(":"))
dest_storage_path = dest_storage_path.replace(":", "_")
const jszip = new JSZip();
const files = (await bucket.getFiles({
prefix: `${src_storage_path}/`
}))[0]
const filesContent = await Promise.all(
files.map(file => file.download())
);
filesContent.forEach((content, i) => {
jszip.file(files[i].name, content[0])
});
const content = await jszip.generateAsync({ type: 'nodebuffer' });
await fs.promises.writeFile(dest_storage_path, content)
} catch (error){
console.error(error)
}
}
main();
command line exemple:
node index.js 2021-04-16T11:47:46_54052 backup.zip

Related

Uploading to Firebase Storage from a Google Cloud Function

I'm trying to create a Firebase Function that allows me to pass an array of image URLs in to create generate a montage, upload the file to Firebase Storage and then return the generated Download URL. This will be called from my app, so I'm using functions.https.onCall.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, context) => {
var storageRef = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var createdMontage = storageRef.file('createdMontage.jpg');
function generateMontage(list){
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.write(createdMontage, function(err) {
if(!err) console.log("Written montage image.");
});
return true
}
generateMontage(data)
return createdMontage.getDownloadURL();
});
The function generateMontage() works locally on NodeJs (with a local write destination).
Thank you.
Have a look at this example from the docs:
https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-code-sample
2021-01-11 Update
Here's a working example. I'm using regular Cloud Functions and it's limited in that the srcObject, dstObject and bucketName are constants but, it does create montages which is your goal.
PROJECT=[[YOUR-PROJECT]]
BILLING=[[YOUR-BILLING]]
REGION=[[YOUR-REGION]]
FUNCTION=[[YOUR-FUNCTION]]
BUCKET=[[YOUR-BUCKET]]
OBJECT=[[YOUR-OBJECT]] # Path from ${BUCKET} root
gcloud projects create ${PROJECT}
gcloud beta billing projects link ${PROJECT} \
--billing-account=${BILLING}
gcloud services enable cloudfunctions.googleapis.com \
--project=${PROJECT}
gcloud services enable cloudbuild.googleapis.com \
--project=${PROJECT}
gcloud functions deploy ${FUNCTION} \
--memory=4gib \
--max-instances=1
--allow-unauthenticated \
--entry-point=montager \
--set-env-vars=BUCKET=${BUCKET},OBJECT=${OBJECT} \
--runtime=nodejs12 \
--trigger-http \
--project=${PROJECT} \
--region=${REGION}
ENDPOINT=$(\
gcloud functions describe ${FUNCTION} \
--project=${PROJECT} \
--region=${REGION} \
--format="value(httpsTrigger.url)")
curl \
--request GET \
${ENDPOINT}
`package.json`:
```JSON
{
"name": "montage",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "5.7.1",
"gm": "^1.23.1"
}
}
And index.js:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const gm = require('gm').subClass({ imageMagick: true });
const bucketName = process.env["BUCKET"];
const srcObject = process.env["OBJECT"];
const dstObject = "montage.png";
// Creates 2x2 montage
const list = [
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`
];
const montager = async (req, res) => {
// Download GCS `srcObject` to `/tmp`
const f = await storage
.bucket(bucketName)
.file(srcObject)
.download({
destination: `/tmp/${srcObject}`
});
// Creating GCS write stream for montage
const obj = await storage
.bucket(bucketName)
.file(dstObject)
.createWriteStream();
let g = gm();
list.forEach(f => {
g.montage(f);
});
console.log(`Returning`);
g
.geometry('+81+81')
.density(5000, 5000)
.stream()
.pipe(obj)
.on(`finish`, () => {
console.log(`finish`);
res.status(200).send(`ok`);
})
.on(`error`, (err) => {
console.log(`error: ${err}`);
res.status(500).send(`uhoh!`);
});
}
exports.montager = montager;
I have never used 'gm', but, according to its npm page, it has a toBuffer function.
So maybe something like this could work:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gm = require('gm').subClass({ imageMagick: true });
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, _context) => {
const bucketName = 'xyz-zyx'; // not sure, I've always used the default bucket
const bucket = admin.storage().bucket(bucketName);
const storagePath = 'createdMontage.jpg';
const fileRef = bucket.file(storagePath);
const generateMontage = async (list) => {
const g = gm();
list.forEach(function (p) {
g.montage(p);
});
g.geometry('+81+81');
g.density(5000, 5000);
return new Promise(resolve => {
g.toBuffer('JPG', (_err, buffer) => {
const saveTask = fileRef.save(buffer, { contentType: 'image/jpeg' });
const baseStorageUrl = `https://firebasestorage.googleapis.com/v0/b/${bucket.name}/o/`;
const encodedPath = encodeURIComponent(storagePath);
const postfix = '?alt=media'; // see stackoverflow.com/a/58443247/6002078
const publicUrl = baseStorageUrl + encodedPath + postfix;
saveTask.then(() => resolve(publicUrl));
});
});
};
return generateMontage(data);
});
But it seems it can be done more easily. As Methkal Khalawi commented:
here is a full example on how to use ImageMagic with Functions. Though they are using it for blurring an image but the idea is the same. And here is a tutorial from the documentation.
I think you can pipe output stream from gm module to firebase storage object write stream.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall(async (data, context) => {
var storage = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var downloadURL = await new Promise((resolve, reject) => {
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.stream((err, stdout, stderr) => {
if (err) {
reject();
}
stdout.pipe(
storage.file('generatedMotent.png).createWriteStream({
metadata: {
contentType: 'image/png',
},
})
).on('finish', () => {
storage
.file('generatedMotent')
.getSignedUrl({
action: 'read',
expires: '03-09-2491', // Non expring public url
})
.then((url) => {
resolve(url);
});
});
})
});
return downloadURL;
});
FYI, Firebase Admin SDK storage object does not have getDownloadURL() function.
You should generate non-expiring public signed URL from the storage object.
In addition to, it should cause another problem after some period of time according to this issue.
To get rid of this issue happening, you should initialize firebase app with permanent service account.
const admin = require('firebase-admin');
const serviceAccount = require('../your-service-account.json');
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
projectId: JSON.parse(process.env.FIREBASE_CONFIG).projectId,
databaseURL: JSON.parse(process.env.FIREBASE_CONFIG).databaseURL,
storageBucket: JSON.parse(process.env.FIREBASE_CONFIG).storageBucket,
});

Error: ****#appspot.gserviceaccount.com does not have storage.objects.get access

I have a simple firebase function that triggers on a file being uploaded to Firebase Storage. It was working on the non-main bucket, but once I changed it to listen to the main bucket I began receiving these error messages....
Error: *****#appspot.gserviceaccount.com does not have storage.objects.get access to *****.appspot.com/ff-icon-01.png.
The function is in the same project as the storage bucket.
const admin = require('firebase-admin');
admin.initializeApp();
const functions = require('firebase-functions');
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const makeThumbnail = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});
They have the same rules. Not sure what's up.
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write: if request.auth != null;
}
}
}
For someone else who runs into this issue.
The problem for me was that I were using the wrong project in my gcloud setup when uploading my functions. So I used one project in the firebase cli while using another project in the gcloud cli.
It worked for me when I deleted all the functions, change the gcloud cli project to the right one and then uploaded the functions again.

Uploading files from Firebase Cloud Functions to Cloud Storage

The documentation is too complex for me to understand. It shows how to download a file from Cloud Storage to Cloud Functions, manipulate the file, and then upload the new file to Cloud Storage. I just want to see the basic, minimum instructions for uploading a file from Cloud Functions to Cloud Storage. Why doesn't this work:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.storage = functions.firestore.document('Test_Value').onUpdate((change, context) => {
var metadata = {
contentType: 'text',
};
admin.storage().ref().put( {'test': 'test'}, metadata)
.then(function() {
console.log("Document written.");
})
.catch(function(error) {
console.error(error);
})
});
The error message is admin.storage(...).ref is not a function. I'm guessing that firebase-admin includes Firestore but not Storage? Instead of firebase-admin should I use #google-cloud/storage? Why doesn't this work:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {Storage} = require('#google-cloud/storage')();
const storage = new Storage();
admin.initializeApp();
exports.storage = functions.firestore.document('Test_Value').onUpdate((change, context) => {
storage.bucket().upload( {'test': 'test'} , {
metadata: {
contentType: 'text'
}
})
});
I can't even deploy this code, the error message is
Error parsing triggers: Cannot find module './clone.js'
Apparently a npm module dependency is missing? But the module isn't called clone.js? I tried requiring child-process-promise, path, os, and fs; none fixed the missing clone.js error.
Why does admin.initializeApp(); lack parameters, when in my index.html file I have:
firebase.initializeApp({
apiKey: 'swordfish',
authDomain: 'myapp.firebaseapp.com',
databaseURL: "https://myapp.firebaseio.com",
projectId: 'myapp',
storageBucket: "myapp.appspot.com"
});
Another issue I'm seeing:
npm list -g --depth=0
/Users/TDK/.nvm/versions/node/v6.11.2/lib
├── child_process#1.0.2
├── UNMET PEER DEPENDENCY error: ENOENT: no such file or directory, open '/Users/TDK/.nvm/versions/node/v6.11.2/lib/node_modules/firebase-admin/package.json
├── firebase-functions#2.1.0
├── firebase-tools#6.0.1
├── firestore-backup-restore#1.3.1
├── fs#0.0.2
├── npm#6.4.1
├── npm-check#5.9.0
├── protractor#5.4.1
├── request#2.88.0
└── watson-developer-cloud#3.13.0
In other words, there's something wrong with firebase-admin, or with Node 6.11.2. Should I use a Node Version Manager to revert to an older version of Node?
Go to https://console.cloud.google.com/iam-admin/iam
Click the pencil icon next to your App Engine default service account
+ ADD ANOTHER ROLE
Add Cloud Functions Service Agent
In my specific use case, I needed to decode a base64 string into a byte array and then use that to save the image.
var serviceAccount = require("./../serviceAccountKey.json");
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
admin.initializeApp({
projectId: serviceAccount.project_id,
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://your_project_id_here.firebaseio.com", //update this
storageBucket: "your_bucket_name_here.appspot.com" //update this
});
function uploadProfileImage(imageBytes64Str: string): Promise<any> {
const bucket = admin.storage().bucket()
const imageBuffer = Buffer.from(imageBytes64Str, 'base64')
const imageByteArray = new Uint8Array(imageBuffer);
const file = bucket.file(`images/profile_photo.png`);
const options = { resumable: false, metadata: { contentType: "image/jpg" } }
//options may not be necessary
return file.save(imageByteArray, options)
.then(stuff => {
return file.getSignedUrl({
action: 'read',
expires: '03-09-2500'
})
})
.then(urls => {
const url = urls[0];
console.log(`Image url = ${url}`)
return url
})
.catch(err => {
console.log(`Unable to upload image ${err}`)
})
}
Then you can call the method like this and chain the calls.
uploadProfileImage(image_bytes_here)
.then(url => {
//Do stuff with the url here
})
Note: You must initialize admin with a service account and specify the default bucket. If you simply do admin.initializeApp() then your image urls will expire in 10 days.
Steps to properly use a service account.
Go to Service Accounts and generate a private key
Put the JSON file in your functions folder (next to src and node_modules)
Go to Storage and copy the URL not including the "gs://" in the front. Use this for the storage bucket url when initializing admin.
Use your project ID above for the database URL.
See Introduction to the Admin Cloud Storage
API for further
details on how to use the Cloud Storage service in Firebase Admin SDK.
var admin = require("firebase-admin");
var serviceAccount = require("path/to/serviceAccountKey.json");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "<BUCKET_NAME>.appspot.com"
});
var bucket = admin.storage().bucket();
// 'bucket' is an object defined in the #google-cloud/storage library.
// See https://googlecloudplatform.github.io/google-cloud-node/#/docs/storage/latest/storage/bucket
// for more details.
Regarding uploading objects, see Cloud Storage Documentation Uploading Objects sample code:
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'Local file to upload, e.g. ./local/path/to/file.txt';
// Uploads a local file to the bucket
await storage.bucket(bucketName).upload(filename, {
// Support for HTTP requests made with `Accept-Encoding: gzip`
gzip: true,
metadata: {
// Enable long-lived HTTP caching headers
// Use only if the contents of the file will never change
// (If the contents will change, use cacheControl: 'no-cache')
cacheControl: 'public, max-age=31536000',
},
});
console.log(`${filename} uploaded to ${bucketName}.`);
I uploaded a file from my hard drive to Firebase Cloud Storage via Google Cloud Functions. First, I found the documentation for Google Cloud Functions bucket.upload.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.Storage = functions.firestore.document('Storage_Value').onUpdate((change, context) => {
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket('myapp.appspot.com');
const options = {
destination: 'Test_Folder/hello_world.dog'
};
bucket.upload('hello_world.ogg', options).then(function(data) {
const file = data[0];
});
return 0;
});
The first three lines are Cloud Functions boilerplate. The next line
exports.Storage = functions.firestore.document('Storage_Value').onUpdate((change, context) => {
creates the Cloud Function and sets the trigger. The next three lines are more Google Cloud boilerplate.
The rest of the code locates the file hello_world.ogg on my computer's hard drive in the functions folder of my project directory and uploads it to the directory Test_Folder and changes the name of the file to hello_world.dog in my Firebase Cloud Storage. This returns a promise, and the next line const file = data[0]; is unnecessary unless you want to do something else with the file.
Lastly we return 0;. This line does nothing except prevent the error message
Function returned undefined, expected Promise or Value
if (req.rawBody) {
busboy.end(req.rawBody);
}
else {
req.pipe(busboy);
}
As described in this issue: https://github.com/GoogleCloudPlatform/cloud-functions-emulator/issues/161#issuecomment-376563784

Cloud Storage API doesn't work when deploy on Google Cloud Functions using Firebase

This work perfectly in local serve with firebase :
const gCloudConfig = {
projectId: 'XXXX-X1234',
keyFilename: './key.json'
}
const Storage = require('#google-cloud/storage')(gCloudConfig);
const storageBucket = Storage.bucket(bucketUrl);
storageBucket.upload(file.path, {destination: file.name})
.then(() => {
//
});
But this doesn't work when i deploy to firebase :
const Storage = require('#google-cloud/storage')();
const storageBucket = Storage.bucket(bucketUrl);
storageBucket.upload(file.path, {destination: file.name})
.then(() => {
//
});
I put this line after the admin.initializeApp(...), since i saw that it fixed the problem for someone, but it still doesn't work.
I've tried a lot of stuff :
const gCloudConfig = { projectId: 'XXXX-X1234' };
const gCloudConfig = { key: API_KEY };
const gCloudConfig = { key: API_KEY, projectId: 'XXXX-X1234' };
const gCloudConfig = functions.config().firebase;
I'm kinda lost, please help me !
It's easier if you just initialize the Firebase Admin SDK with its default credentials, then access the Cloud Storage APIs via that. There's no need to initialize Storage on its own.
const admin = require('firebase-admin')
admin.initializeApp()
const bucket = admin.storage().bucket()
bucket.upload(localPath, {
destination: remotePath
})
Here, bucket is your project default storage bucket, just like you would have gotten it from the Cloud Storage API.
Note that the no-argument init of the Admin SDK is available when using firebase-functions#1.0.0 or later (current 1.0.2).

Unzip file at Firebase storage

I am currently learning Firebase.
I have a requirement that from HTML I can transfer the file(.zip) successfully to firebase storage bucket.
My question, is it possible to unzip the file after uploading complete at firebase storage server.
I can do that using PHP, I just wonder if same is possible using Firebase without any server code.
I believe you can achieve it by leveraging cloud functions.
You can write storage triggers for paths and once a zip file is uploaded, it triggers the cloud function which unzips the file to where you want it. To save space you can also delete the zip file after unzipping it.
No, the file you upload from the client gets stored as exactly the same file in your storage bucket. There is not currently a way to automatically change that stored file after it's been uploaded.
There is a way to perform it with FirebaseFunctions. We can modify the code in Aeyrium's answer to this Stack Overflow question to suite our requirements, as follows:
const functions = require('firebase-functions');
const admin = require("firebase-admin");
const path = require('path');
const fs = require('fs');
const os = require('os');
const unzip = require('unzipper')
var serviceAccount = require("./serviceAccountKey.json");
const firebaseConfig = {
apiKey: "*",
authDomain: "*",
databaseURL: "*.firebaseio.com",
projectId: "*",
storageBucket: "p*.appspot.com",
messagingSenderId: "*",
appId: "*",
measurementId: "*",
credential: admin.credential.cert(serviceAccount)
};
admin.initializeApp(firebaseConfig);
const storage = admin.storage();
const runtimeOpts = {
timeoutSeconds: 540,
memory: '256MB'
}
exports.unzip = functions.runWith(runtimeOpts).storage.object().onFinalize((object) => {
return new Promise((resolve, reject) => {
//console.log("objct is:",object)
if (object.contentType !== 'application/x-zip') {
reject();
} else {
//const bucket = admin.storage.bucket(object.bucket)
const bucket = admin.storage().bucket()
const remoteFile = bucket.file(object.name)
const remoteDir = object.name.replace('.zip', '')
console.log(`Downloading ${remoteFile}`)
remoteFile.createReadStream()
.on('error', err => {
console.error(err)
reject(err);
})
.on('response', response => {
// Server connected and responded with the specified status and headers.
//console.log(response)
})
.on('end', () => {
// The file is fully downloaded.
console.log("Finished downloading.")
resolve();
})
.pipe(unzip.Parse())
.on('entry', entry => {
const file = bucket.file(`${remoteDir}/${entry.path}`)
entry.pipe(file.createWriteStream())
.on('error', err => {
console.log(err)
reject(err);
})
.on('finish', () => {
console.log(`Finsihed extracting ${remoteDir}/${entry.path}`)
});
//entry.autodrain();
});
}
})
});
Also, there is a short tutorial about that here in TypeScript.

Resources