I am using cors-anywhere within the firebase functions platform and it works perfectly for my needs, until i need to upload a file via the proxy to an external address.
From what I have gathered over struggling with this for a few days this is due to the way that firebase parses the request body, specifically the multipart/form-data. I have seen examples of people using busboy or multer to extract the buffers to store in firebase storage or similar, but not any examples that would allow the function to reverse proxy the file through to the targetted address.
I am quite desperate to come up with a solution for this and am at my wits end.
for my firebase function (working great for everything except posting multipart/form data):
const { app } = require("firebase-admin");
const functions = require("firebase-functions");
const cors_proxy = require('cors-anywhere').createServer({
originWhitelist: [], // Allow all origins temporarily whilst in development
requireHeader: [],
removeHeaders: [],
redirectSameOrigin: true,
httpProxyOptions: {
xfwd: false,
},
});
exports.app = functions.https.onRequest(function(req, res) {
functions.logger.info('method', req.method);
functions.logger.info('raw body', req.rawBody);
functions.logger.info('body', req.body);
req.url = '/https:/' + req.url;
functions.logger.info(req.url);
cors_proxy.emit('request', req, res);
});
and the method i am using to send from my client application:
static UploadItems$ = (reviewId, files, artist, description) => {
const progressSubscriber = new Subject();
const request$ = of(files).pipe(
concatAll(),
switchMap(f => {
const formdata = new FormData();
console.log(f.name, f);
formdata.append("reviewFile", f, f.name);
formdata.append("artist", artist);
formdata.append("description", description);
return ajax({
url: SyncsketchPosts.UploadFile(reviewId),
method: 'POST',
headers: UploadHeaders,
body: formdata,
crossDomain: true
//progressSubscriber,
})
}),
tap(console.log),
).subscribe(res => console.log(res))
}
Related
I have an .onCreate cloud function, that once triggered, creates a task and runs it.
If I simply run admin.messaging().sendToDevice(payload.tokens, payload.message); inside the exports.createScheduledNotification function, then it runs fine, but I wanted to utilise Cloud Tasks so that I can schedule the task a little further into the future.
However, even if the entire payload is inside the scheduledNotification function, it still doesn't run. I have a feeling that it's actually not being called at all. However, I've checked in the Google Cloud Tasks console, and it shows that it has run, but I get no response from the Firebase Messaging at all, so from that I can deduce it's not running.
Is there something wrong with how I'm sending the task request? Is the cloud function that receives the call written incorrectly?
Cloud Functions & Tasks Set up and Imports
'use strict';
const functions = require('firebase-functions');
var admin = require("firebase-admin");
const { CloudTasksClient } = require('#google-cloud/tasks')
var serviceAccount = require("./secret_file.json");
admin.initializeApp({
credential: admin.credential.cert(placeholder_for_account_credentials)
});
Function that is meant to create and send a Google Task, which is meant to call an .onRequest function, to send a Firebase Message
exports.createScheduledNotification = functions.firestore.document('/followers/{followedUid}')
.onCreate(async (snapshot) => {
const data = snapshot.data();
const tasksClient = new CloudTasksClient();
const queuePath = tasksClient.queuePath(project, location, queue);
const url = `https://{placeholder_for_location}-{placeholder_for_project_id}.cloudfunctions.net/scheduledNotification`;
const docPath = snapshot.ref.path;
const task = {
httpRequest: {
httpMethod: 'POST',
url,
body: Buffer.from(JSON.stringify({})).toString('base64'),
headers: {
'Content-Type': 'application/json',
},
},
scheduleTime: {
seconds: 10 + Date.now() / 1000
}
};
try {
await tasksClient.createTask({ parent: queuePath, task });
} catch (error) {
console.log(error);
}
});
Function to accept a call from Google Tasks, and then sends a message to a selected device_id
exports.scheduledNotification = functions.https.onRequest((req, res) => {
const payload = {
message: {
notification: {
title: 'You have a new follower!',
body: `Moe is now following you.`,
sound: 'default'
},
},
tokens: ["placeholder_for_device_id"]
};
admin.messaging().sendToDevice(payload.tokens, payload.message);
});
I would also like to mention that I saw in another question I needed to allow the principal firebase account to be able to create tasks, and I've added Cloud Task Admin and Cloud Task Enqeuer to every single account, and that the task are showing up in the Cloud Tasks Console.
I'm using Firebase v9.6.2 in a NextJS project. I would need to read JSON files from Firebase Storage. I've created a Flutter App that does that, and this app stores files in Storage, and now I need to read these files from a web project.
I have a simple page in which I implement the following function:
export async function getStaticProps() {
const revalidate = 3;
const storage = getStorage();
const dataPath = "data/";
const reference = ref(storage, dataPath);
const listResult = await listAll(reference);
let listOfFiles = [];
for (let item of listResult.items) {
if (item.name.endsWith(".json")) {
await storage
.getDownloadURL(skillsPath + item.name)
.then(async (url) => {
let content = await fetch(url, {
method: "GET",
headers: {
"Content-Type": "application/json",
},
});
// the error happens before this line so I comment it out:
// content = await content.json();
listOfFiles.push(content);
})
}
}
console.log(listOfFiles);
return { props: { listOfFiles }, revalidate };
}
listOfFiles is this:
[
Response {
size: 0,
timeout: 0,
[Symbol(Body internals)]: { body: [PassThrough], disturbed: false, error: null },
[Symbol(Response internals)]: {
url: 'https://firebasestorage.googleapis.com/v0/b/blablablabla...',
status: 403,
statusText: 'Forbidden',
headers: [Headers],
counter: 0
}
},
...
]
Notes:
The issue does not come from the rules
The user is authenticated
I have enabled Firebase Storage and there are my files, I get the correct URL.
Basically it looks like the example on the official documentation here, however it does not work with NextJS (with XMLHttpRequest either: XMLHttpRequest is not defined).
If this problem is too complicated, or impossible, maybe I can use Firestore instead, however my JSON files are pretty big and I don't know if there is a limitation on the size of the data's keys.
I hope this is clear, please tell me if not.
I am generating an excel file and want it to be downloaded for the client by triggering an API route using the Next.js framework. I am having trouble triggering the download by using fetch. The download can be triggered by window.open(//urlhere, '_self') but the API call using fetch gives this response on request:
API resolved without sending a response for /api/download?Students= this may result in stalled requests.
The excel4node documentation says we can send an excel document through an API like this:
// sends Excel file to web client requesting the / route
// server will respond with 500 error if excel workbook cannot be generated
var express = require('express');
var app = express();
app.get('/', function(req, res) {
wb.write('ExcelFile.xlsx', res);
});
app.listen(3000, function() {
console.log('Example app listening on port 3000!');
});
Here is my backend download.js which lives in pages/api/:
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import Link from "next/link";
import getPartners from "../../components/main";
import excel from "excel4node";
export default function handler(req, res) {
const students = req.query.Students.split(",");
const partners = JSON.stringify(getPartners(students));
let workbook = createExcelList(partners);
workbook.write("PartnerList.xlsx", res);
}
const createExcelList = (partnersJSON) => {
const workbook = new excel.Workbook();
const partnersObject = JSON.parse(partnersJSON);
/* Using excel4node a workbook is generated formatted the way I want */
return workbook;
};
export const config = {
api: {
bodyParser: true,
},
};
And here is the function that is triggered on a button press in the front end.
const makePartners = async () => {
let queryStudents = studentList.join(",");
const url = "http://localhost:3000/api/download?Students=" + queryStudents;
if (studentList.length !== 0) {
try {
const res = await fetch(url, {
headers: {
"Content-Type":
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
},
});
console.log(res);
} catch (e) {
console.log(e);
}
}
};
Which does not trigger the download. But using window.open(url, '_self) does. So, I can trigger the download by changing the function to the following. However I don't think this is the correct way of doing things and would like to be able to understand how to use fetch correctly.
const makePartners = () => {
let queryStudents = studentList.join(",");
const url = "http://localhost:3000/api/download?Students=" + queryStudents;
if (studentList.length !== 0) {
window.open(url, "_Self");
}
};
I am not sure if this is a Next.js issue or not. Does anyone have any insight? Any help would be appreciated.
I am using firestore-store with an express-session to store a cookie for an oauth process. I am adding the session as a middleware layer on the express instance. But I am noticing that a new cookie is generated and stored at every function invocation rather than reading and updating one cookie.
Maybe I am misunderstanding how this should all work: shouldn't there only be one cookie? If each invocation creates a new cookie, how can I retrieve information properly from a cookie in a future invocation?
I have been running this currently with firebase serve --only functions from the terminal and running a local instance. I am not emulating anything but having the sessions stored on the production firebase.
Here is my setup:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const express = require("express");
const app = express();
const session = require('express-session')
const FirestoreStore = require('firestore-store')(session);
admin.initializeApp({
credential: admin.credential.cert(<json>),
databaseURL: 'https://<appUrl>.firebaseio.com'
});
app.set('trust proxy', true)
app.use(cors())
app.use(session({
store: new FirestoreStore({
database: admin.firestore(),
}),
secret: 'My secret',
name: '__session',
resave: true,
saveUninitialized: true,
cookie: {maxAge: 60000, secure: false, httpOnly: false}
}))
var client = etsyjs.client({
key: process.env.ETSY_KEY,
secret: process.env.ESTY_SECRET,
callbackURL: `http://localhost:5000/${process.env.BACKEND_ADDRESS}/authorize`
});
// Etsy oauth
app.get('/register', (req, res) => {
res.setHeader('Cache-Control', 'private');
return client.requestToken((err, response) => {
console.log(response)
if (err) {
return console.log(err);
}
req.session.cookie.token = response.token;
req.session.cookie.sec = response.tokenSecret;
res.status('200').send(response)
});
});
app.get('/authorize', (req, res) => {
console.log(req.session) // does not include any information from the registration
...
}
I can find the information being stored on Cloud Firestore, but I've noticed the session name ('__session') is not being stored. Is this an indication that something else isn't working properly?
Here is a sample document:
session: {"cookie":{"originalMaxAge":60000,"expires":"2020-08-26T20:10:04.337Z","secure":false,"httpOnly":false,"path":"/"}}
Okay this is now all working. There were a few things that made it work.
I included credentials in the backend call so that now looks like this:
return await fetch(url, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
credentials: 'include',
}).then(...)
I added credentials to the cors settings server-side. So that now looks like this:
var corsOptions = {
credentials: true,
origin: true
}
app.use(cors(corsOptions))
These still hadn't fully fixed the issue. What did ultimately solve the problem is that I cleared my cache from my browser. I believe this is the thing that really solved the issue! Definitely painful how long it took to try this simple thing.
I'm trying to set up a testing pattern for a new Hapi app. I've used Mocha and Chai in the past with Express, but I'm trying to use Lab and Code to stay in the Hapi ecosystem. I'm also using Bookshelf and Knex to handle database interaction.
So I have a simple health endpoint I'd like to test.
'use strict';
const controller = require('../controller/healthController');
module.exports = [
{
method: 'GET',
path: '/health',
config: {
handler: controller.health,
description: 'The health endpoint returns 200',
tags: ['api', 'health']
}
}
];
In the handler it just does a quick query to make sure it can connect to the DB.
'use strict';
const bookshelf = require('../config/db');
const knex = bookshelf.knex;
module.exports = {
health: function (request, reply) {
knex.raw('SELECT version()').then(() => {
reply('service is running');
}).catch((err) => {
reply({err: err, code: 500});
});
}
};
As far as I understand it, requiring the server and then using server.inject doesn't actually start the server so I don't believe I should have a db connection, which would mean I should have to mock it out the db call. What is odd to me is that this test passes:
'use strict';
const Code = require('code');
const Lab = require('lab');
const lab = exports.lab = Lab.script();
const describe = lab.describe;
const it = lab.test;
const expect = Code.expect;
const before = lab.before;
let server;
describe('health controller', () => {
before((done) => {
server = require('../../server');
done();
});
it('health check replies 200 when successful call to db', (done) => {
const options = {
method: 'GET',
url: '/health'
};
server.inject(options, (res) => {
expect(res.payload).to.include('is running');
expect(res.statusCode).to.equal(200);
done();
});
});
});
So I have two problems. First, I feel like the test above shouldn't really pass. Unless it's loading everything up and thus connecting to the db I suppose. Maybe I should be testing just the controller/handler method? But I haven't found any examples of that.
Second, I've tried to stub out the knex.raw call anyway and when I try to do it like below I get a 500 error.
'use strict';
const Code = require('code');
const Lab = require('lab');
const Sinon = require('sinon');
const lab = exports.lab = Lab.script();
const describe = lab.describe;
const it = lab.test;
const expect = Code.expect;
const before = lab.before;
let server;
let knex = require('../../app/config/db').knex;
describe('health controller', () => {
before((done) => {
server = require('../../server');
done();
});
it('health check replies 200 when successful call to db', (done) => {
const stub = Sinon.stub(knex, 'raw').returns({});
const options = {
method: 'GET',
url: '/health'
};
server.inject(options, (res) => {
expect(res.payload).to.include('is running');
expect(res.statusCode).to.equal(200);
expect(stub.calledOnce).to.be.true();
done();
});
});
});
I'm not really sure why that's happening.
I think Sinon.stub(knex, 'raw').resolves({}); is a better solution
server.inject works exactly as if you made a request to a real server. So if your database is up when the test are run, the endpoint will return data from the database just like it does when you start the server manually.
Sinon.stub(knex, 'raw').returns({}); won't work. knex.raw(…) is expected to return a Promise, not an empty object. Please try the following:
Sinon.stub(knex, 'raw').returns(Promise.resolve({}));
Just a side note: Remember to call server.stop() after each test in order to ensure there's no state persisted between tests. In general, I think you can take a look at example test files in Hapi University repository.