i have a problem, when i upload an image to Cloud Storage it returns me this error:
{
"error": {
"code": 400,
"message": "Invalid HTTP method/URL pair."
}
}
im using expo with image pick to get the image from my device
const elegirImagen =async () => {
let res = await imagePicker.launchImageLibraryAsync({
mediaTypes: imagePicker.MediaTypeOptions.Images,
allowsEditing:true,
aspect:[4,3],
quality:1,
});
if(!res.cancelled){
const source = {uri:res.uri}
console.log('imgage picker if',source)
setImagen(source)
}
}
this saves the image
const uploadImage = async()=>{
if(!imagen) return
const metadata = {
contentType: 'image/jpeg',
};
console.log('imagen uri upload', imagen.uri)
const res = await fetch(imagen.uri);
const blob = await res.blob();
const filename = imagen.uri.substring(imagen.uri.lastIndexOf('/')+1)
// Upload file and metadata to the object 'images/mountains.jpg'
const storageRef = ref(storage, `./images/${filename}` + filename);
const uploadTask = uploadBytesResumable(storageRef, blob, metadata);
// Listen for state changes, errors, and completion of the upload.
uploadTask.on('state_changed',
(snapshot) => {
// Get task progress, including the number of bytes uploaded and the total number of bytes to be uploaded
const progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
setProgreso(progress)
switch (snapshot.state) {
case 'paused':
console.log('Upload is paused');
break;
case 'running':
console.log('Upload is running');
break;
}
},
(error) => {
// A full list of error codes is available at
// https://firebase.google.com/docs/storage/web/handle-errors
switch (error.code) {
case 'storage/unauthorized':
// User doesn't have permission to access the object
break;
case 'storage/canceled':
// User canceled the upload
break;
// ...
case 'storage/unknown':
// Unknown error occurred, inspect error.serverResponse
break;
}
},
() => {
// Upload completed successfully, now we can get the download URL
getDownloadURL(uploadTask.snapshot.ref).then((downloadURL) => {
console.log('File available at', downloadURL);
});
}
)}
and this uploads the image as firebase docs says but when i check de database i always get this
Finally solved it. the solution was to change the path in the database storage
const storageRef = ref(storage, `images/` + filename);
Related
I'm able to successfully generate a signedUrl for my thumbnails I am creating, but after about a week they no longer work...
I am not getting any errors or information as to why they are expiring, they just are.
I have been using firebase to develop my app and now all of a sudden I have to deal with all this google cloud storage permission and what not - really have no idea whats going on... It's got to be some sort of permission issue?
I have tried generating a new service account from the firebase console but no luck... I am tired of waiting weeks to see if they are going to expire again or not. I hope someone can guide me to a solution for this - it seems like its a problem for many people... We cant afford to go live and have gray thumbnails all over the app because they expire.
Here is how we are generating the signedUrl with firebase cloud functions:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [150, 256];
const bucketDir = dirname(filePath);
if (!filePath.startsWith('categories/')) {
console.log('This is not in the categories directory.');
return false;
}
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
if (err) {
console.error(err);
return;
}
if (size === 150) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else if (size === 256) {
return admin.database().ref('categories').child(fileName).child('thumb').set(url)
.then(() => {
admin.database().ref('categories').child(fileName).child('tempThumb').remove();
})
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})
After setting the expiration date to 03-17-2100, we don't expect this type of behaviour, but like I said I feel like its something to do with gcs permissions - I tried to contact them but after about a week I am still waiting for their response.
I appreciate all the feedback!
I've been having trouble viewing the image files I've uploaded to firebase and just noticed the issue is with the file type in firebase.
Two files in my firebase storage console. One uploaded from my IOS simulator (octet-stream) and the other uploaded directly into the console from the browser which uploads properly and is viewable.
Here are my select and upload functions:
_selectPhoto = async () => {
const status = await getPermission(Permissions.CAMERA_ROLL);
if (status) {
let imageName = "pic"
const result = await ImagePicker.launchImageLibraryAsync(options);
if (!result.cancelled) {
Animated.timing(this.animatedWidth, {
toValue: 600,
duration: 15000
}).start()
this.uploadImage(result.uri, imageName)
.then(() => {
this.props.navigation.navigate('Profile')
})
.catch((error) => {
Alert.alert('Must Sign In');
this.props.navigation.navigate('Login')
console.log(error);
})
}
}
};
uploadImage = async (uri, imageName) => {
const user = firebase.auth().currentUser;
const response = await fetch(uri);
const blob = await response.blob();
let storageRef = firebase.storage().ref().child(''images/'+user.displayName+'/'+imageName+'.jpg'');
const snapshot = await storageRef.put(blob);
blob.close();
snapshot.ref.getDownloadURL().then(function(downloadURL) {
console.log("File available at", downloadURL);
user.updateProfile({
photoURL: downloadURL.toString(),
}).then(function() {
console.log('saved photo')
}).catch(function(error) {
console.log('failed photo')
});
});
}
When I get the link in my console, it also has the media&token:
... .appspot.com/o/profile-pic.jpg?alt=media&token=56eb9c36-b5cd-4dbb-bec1-3ea5c3a74bdd
If I CMD+Click in VS Code I receive an error:
{
error: {
code: 400,
message: "Invalid HTTP method/URL pair."
}
}
So naturally, when I put that link in the browser it downloads a file with that name but says:
The file “pic.jpg” could not be opened.
It may be damaged or use a
file format that Preview doesn’t recognize.
Maybe it could be something with mediaTypes, but I'm not exactly sure how to use it.
mediaTypes : String -- Choose what type of media to pick. Usage:
ImagePicker.MediaTypeOptions., where is one of: Images,
Videos, All.
Thanks!
I've been fighting with this same issue for the past few days. I was finally able get images to upload and render as expected by following the Firebase Upload example in the Expo repo. I don't fully understand why it works, but it seems like Firebase doesn't like the blob that's generated by
const blob = await response.blob();
Try replacing the above with:
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function() {
resolve(xhr.response);
};
xhr.onerror = function(e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', uri, true);
xhr.send(null);
});
I am uploading a base64 image to cloud storage for firebase (aka firebase storage), using the firebase web interface.
The code that I am using for uploading (in React Native) is the following:
const ref = await firebase.storage().ref(user).child(curTime.toString());
await ref.putString(base64String, 'base64', { contentType: 'image/jpeg' })
However, when I try to view the image it cannot be displayed.
The uploaded string is available in my dropbox, here (download to view it).
The file in firebase cloud storage is available here.
I have downloaded this file into my dropbox, you can access it here (download to view it).
Here is the relevant code:
const uploadImageAsync = async(userId, curTime, photoUri) => {
const user = normalizeUserId(userId); // prepare for firebase write
let firebaseImageUrl = null;
let base64String;
try {
base64String = await FileSystem.readAsStringAsync(photoUri,
{ encoding: FileSystem.EncodingTypes.Base64 });
} catch (error) {
console.log(error);
return null;
}
const ref = await firebase.storage().ref(user).child(curTime.toString());
await ref.putString(base64String, 'base64', { contentType: 'image/jpeg' })
.then(async(snapshot) => {
await snapshot.ref.getDownloadURL().then(uploadURL => {
firebaseImageUrl = uploadURL;
}, (error) => { // failed to get uploadedURL location
console.log(error);
return null;
});
}, (error) => { // failed to upload image to firebase storage);
console.log(error);
return null;
});
return firebaseImageUrl;
};
I have to send a file to an API, therefor I have to use fs.readFileSync(). After uploading the picture to the storage, I am calling my function to execute the API call. But I cannot get the file from the storage. This is a section of the code, which always gets null in the result. I tried also to .getFiles() without a parameter and then I got all files but I dont want to filter them by iteration.
exports.stripe_uploadIDs = functions.https //.region("europe-west1")
.onCall((data, context) => {
const authID = context.auth.uid;
console.log("request is authentificated? :" + authID);
if (!authID) {
throw new functions.https.HttpsError("not authorized", "not authorized");
}
let accountID;
let result_fileUpload;
let tempFile = path.join(os.tmpdir(), "id_front.jpg");
const options_id_front_jpeg = {
prefix: "/user/" + authID + "/id_front.jpg"
};
const storageRef = admin
.storage()
.bucket()
.getFiles(options_id_front)
.then(results => {
console.log("JPG" + JSON.stringify(results));
// need to write this file to tempFile
return results;
});
const paymentRef = storageRef.then(() => {
return admin
.database()
.ref("Payment/" + authID)
.child("accountID")
.once("value");
});
const setAccountID = paymentRef.then(snap => {
accountID = snap.val();
return accountID;
});
const fileUpload = setAccountID.then(() => {
return Stripe.fileUploads.create(
{
purpose: "identity_document",
file: {
data: tempFile, // Documentation says I should use fs.readFileSync("filepath")
name: "id_front.jpg",
type: "application/octet-stream"
}
},
{ stripe_account: accountID }
);
});
const fileResult = fileUpload.then(result => {
result_fileUpload = result;
console.log(JSON.stringify(result_fileUpload));
return result_fileUpload;
});
return fileResult;
});
Result is:
JPG[[]]
You need to download your file from a bucket to your local function context env.
After your Firebase function start executing you can call the below:
More or less the below should work, just tweak to your needs. Call this within you .onCall context, you get the idea
import admin from 'firebase-admin';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
admin.initializeApp();
const { log } = console;
async function tempFile(fileBucket: string, filePath: string) {
const bucket = admin.storage().bucket(fileBucket);
const fileName = 'MyFile.ext';
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: 'DONT_FORGET_CONTEN_TYPE'
};
// Donwload the file to a local temp file
// Do whatever you need with it
await bucket.file(filePath).download({ destination: tempFilePath });
log('File downloaded to', tempFilePath);
// After you done and if you need to upload the modified file back to your
// bucket then uploaded
// This is optional
await bucket.upload(tempFilePath, {
destination: filePath,
metadata: metadata
});
//free up disk space by realseasing the file.
// Otherwise you might be charged extra for keeping memory space
return fs.unlinkSync(tempFilePath);
}
I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})