telemetry with next.js and opentelemetry - next.js

is there any way to deploy OpenTelemetry in next.js without having to create a custom server?
what I found would boil down to the following codes:
tracing.js:
'use strict'
const opentelemetry = require('#opentelemetry/sdk-node');
const { getNodeAutoInstrumentations } = require('#opentelemetry/auto-instrumentations-node');
const { OTLPTraceExporter } = require('#opentelemetry/exporter-otlp-grpc');
const { Resource } = require('#opentelemetry/resources');
const { SemanticResourceAttributes } = require('#opentelemetry/semantic-conventions');
// custom nextjs server
const { startServer } = require('./server');
// configure the SDK to export telemetry data to the console
// enable all auto-instrumentations from the meta package
const exporterOptions = {
url: 'http://localhost:4317',
}
const traceExporter = new OTLPTraceExporter(exporterOptions);
const sdk = new opentelemetry.NodeSDK({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: 'SigNoz-Nextjs-Example'
}),
traceExporter,
instrumentations: [getNodeAutoInstrumentations()]
});
// initialize the SDK and register with the OpenTelemetry API
// this enables the API to record telemetry
sdk.start()
.then(() => console.log('Tracing initialized'))
.then(() => startServer())
.catch((error) => console.log('Error initializing tracing', error));
// gracefully shut down the SDK on process exit
process.on('SIGTERM', () => {
sdk.shutdown()
.then(() => console.log('Tracing terminated'))
.catch((error) => console.log('Error terminating tracing', error))
.finally(() => process.exit(0));
});
module.exports = sdk
server.js:
const { createServer } = require("http")
const { parse } = require("url")
const next = require("next")
const dev = process.env.NODE_ENV !== "production"
const app = next({ dev })
const handle = app.getRequestHandler()
module.exports = {
startServer: async function startServer() {
return app.prepare().then(() => {
createServer((req, res) => {
const parsedUrl = parse(req.url, true)
handle(req, res, parsedUrl)
}).listen(8080, (err) => {
if (err) throw err
console.log("> Ready on http://localhost:8080")
})
})
},
}
this comes from this link.
I don't think it's a right and "clean" approach. How do I implant in a less "invasive" way?

Related

What's the proper way for returning a response using Formidable on Nextjs Api?

I'm sending an uploaded file to a Next.js API route using FormData. The file is then processed on the API route using formidable and passed to sanity client in order to upload the asset, but I can't return the data to the client... I get this message in console:
API resolved without sending a response for /api/posts/uploadImage, this may result in stalled requests.
When console logging the document inside the API everything is in there, I just can't send back that response to client side. Here's my client upload function:
const addPostImage = (e) => {
const selectedFile = e.target.files[0];
if (
selectedFile.type === "image/jpeg" ||
selectedFile.type === "image/png" ||
selectedFile.type === "image/svg" ||
selectedFile.type === "image/gif" ||
selectedFile.type === "image/tiff"
) {
const form = new FormData();
form.append("uploadedFile", selectedFile);
axios
.post("/api/posts/uploadImage", form, {
headers: { "Content-Type": "multipart/form-data" },
})
.then((image) => {
setPostImage(image);
toast.success("Image uploaded!");
})
.catch((error) => {
toast.error(`Error uploading image ${error.message}`);
});
} else {
setWrongImageType(true);
}
};
This is my API:
import { client } from "../../../client/client";
import formidable from "formidable";
import { createReadStream } from "fs";
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const form = new formidable.IncomingForm();
form.keepExtensions = true;
form.parse(req, async (err, fields, files) => {
const file = files.uploadedFile;
const document = await client.assets.upload(
"image",
createReadStream(file.filepath),
{
contentType: file.mimetype,
filename: file.originalFilename,
}
);
console.log(document);
res.status(200).json(document);
});
};
Solution:
As stated in the comments by #juliomalves, I had to promisify the form parsing function and await its results like so:
import { client } from "../../../client/client";
import formidable from "formidable";
import { createReadStream } from "fs";
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const form = new formidable.IncomingForm();
form.keepExtensions = true;
const formPromise = await new Promise((resolve, reject) => {
form.parse(req, async (err, fields, files) => {
if (err) reject(err);
const file = files.uploadedFile;
const document = await client.assets.upload(
"image",
createReadStream(file.filepath),
{
contentType: file.mimetype,
filename: file.originalFilename,
}
);
resolve(document);
});
});
res.json(formPromise);
};
Then I checked for the response's status on the client-side.
Your code is not working because by default formidable saves files to disk, which is not available on vercel. This works.
const chunks = []
let buffer;
const form = formidable({
fileWriteStreamHandler: (/* file */) => {
const writable = new Writable();
// eslint-disable-next-line no-underscore-dangle
writable._write = (chunk, enc, next) => {
chunks.push(chunk);
next();
};
return writable;
},
})
form.parse(req, (err, fields) => {
if (err) {
res.end(String(err));
return;
}
buffer = Buffer.concat(chunks);
res.end();
});

Jest useFakeTimers() blocks when used with firebase emulators

I'm trying to use jest.useFakeTimers() to make my firebase + jest tests stable when working with dates and timeouts. However, if I try to read anything from the emulated database the test times out and throws Exceeded timeout of 5000 ms for a test. Using jest.setTimeout(30000) doesn't help.
Github repro: https://github.com/vojdan/jest-fake-timers-firebase
Here's the test:
import * as admin from "firebase-admin";
// jest.setTimeout(30000);
describe("emulated firebase test", () => {
beforeAll(() => {
jest.useFakeTimers();
jest.setSystemTime(1349852318000);
admin.initializeApp({
projectId: "test-project",
databaseURL: "http://localhost:9000?ns=test-project",
});
});
afterAll(() => {
jest.useRealTimers();
});
it("only passes with fake timers", () => {
expect.assertions(1);
expect(new Date().valueOf()).toBe(1349852318000);
});
it("hangs when fake timers are used", async () => {
expect.assertions(1);
try {
const firestore = admin.firestore().doc("anything/really");
const realtime = admin.database().ref("anyhting/really");
console.log("this runs:", new Date().valueOf());
// ----- EVERYTHING RUNS UP TO HERE -----
// ----- any of the following commands will block when used with jest.useFakeTimers(); -----
// await firestore.get();
const ref = await realtime.once("value");
const value = ref.val();
console.log("this doesn't run:", value);
expect(1).toBe(1);
} catch (error) {
console.log("THERE WAS AN ERROR:", error);
}
});
});
Since useFakeTimers isn't working for me, I'm using this as a workaround:
jest.mock('date-fns', () => {
// Require the original module to not be mocked...
const originalModule = jest.requireActual('date-fns');
return {
...originalModule,
getTime: jest.fn(() => mockedTimestamp),
getUnixTime: jest.fn(() => mockedTimestampUnix),
format: jest.fn(input => 'mockedTimeString'),
};
});

next.js API routes share database

I want to use next.js routes api as a backend service and serve database result in json format. I see, there is no way to keep database up and running since all files located at pages/api/ it's ephemeral
Below it's my code
import { models } from "../models/index"
export default async function handler(req, res) {
const User = models.User
try {
const result = await User.findAll()
return res.json({ result })
} catch (err) {
console.error("Error occured ", err)
return res.json({ result: [] })
}
}
anyone who has encountered this problem?
The only possible way that I found is to use node js server and attach database model to request object. By doing this we pass database conection/models through routes api
my node.js server
const express = require("express")
const { sequelize } = require("./models/index")
const next = require("next")
const dev = process.env.NODE_ENV !== "production"
const app = next({ dev })
const handle = app.getRequestHandler()
const appExpress = express()
app.prepare().then(() => {
appExpress.use(express.json())
appExpress.get("*", (req, res) => {
req.db = sequelize
handle(req, res)
})
appExpress.listen(5000, () => console.log("> Ready on http://localhost:5000"))
}).catch((ex) => {
console.error(ex.stack)
process.exit(1)
})
my routes api file changed to
export default async function handler(req, res) {
const User = req.db.models.User
try {
const result = await User.findAll()
return res.json({ result })
} catch (err) {
console.error("Error occured ", err)
return res.json({ result: [] })
}
}
with these changes the database is always up and running and used from all routes api files.
I tested and work like charm

NextJS multiple public folders

I'm using a NextJs server and I need to have two different public assets folders.
Already using the regular /public folder to serve some assets but now I need to add a second public folder that's somewhere else on the server
My code
const express = require('express');
const next = require('next');
const expressApp = express();
const dev = process.env.NODE_ENV !== 'production';
const app = next({ dev });
const handle = app.getRequestHandler();
app.prepare()
.then(() => {
expressApp.all('*', (req, res) => {
return handle(req, res);
});
expressApp.use(express.static('/usr/local/somewhereelse/assets', {caseSensitive: true}));
spdy.createServer(httpsOptions, expressApp)
.listen(port, (err) => {
if(err) {
throw err;
}
console.log('Listening #:' + port);
});
});
What am I missing here?
Thanks in advance :)
Everyone, rookie mistake I needed to add the static route before the wildcard.
app.prepare()
.then(() => {
expressApp.use(express.static('/usr/local/somewhereelse/assets', {caseSensitive: true}));
expressApp.all('*', (req, res) => {
return handle(req, res);
});
spdy.createServer(httpsOptions, expressApp)
.listen(port, (err) => {
if(err) {
throw err;
}
console.log('Listening #:' + port);
});
});

Handling multipart/form-data POST with Express in Cloud Functions

I've been trying to handle POSTs (multipart/form-data) with a Firebase function and Express but it just doesn't work. Tried this in local server and it works just fine. Everything's the same except it's not contained in a Firebase function.
Besides screwing up the request object it seems it also screws up the way busboy works.
I've tried different solutions presented here but they just don't work. As one user mentions, the callbacks passed to busboy (to be called when a 'field' is found or when it finishes going through the data) are never called and the function just hangs.
Any ideas?
Here's my function's code for reference:
const functions = require('firebase-functions');
const express = require('express');
const getRawBody = require('raw-body');
const contentType = require('content-type')
const Busboy = require('busboy');
const app = express();
const logging = (req, res, next) => {
console.log(`> request body: ${req.body}`);
next();
}
const toRawBody = (req, res, next) => {
const options = {
length: req.headers['content-length'],
limit: '1mb',
encoding: contentType.parse(req).parameters.charset
};
getRawBody(req, options)
.then(rawBody => {
req.rawBody = rawBody
next();
})
.catch(error => {
return next(error);
});
};
const handlePostWithBusboy = (req, res) => {
const busboy = new Busboy({ headers: req.headers });
const formData = {};
busboy.on('field', (fieldname, value) => {
formData[fieldname] = value;
});
busboy.on('finish', () => {
console.log(`> form data: ${JSON.stringify(formData)}`);
res.status(200).send(formData);
});
busboy.end(req.rawBody);
}
app.post('/', logging, toRawBody, handlePostWithBusboy);
const exchange = functions.https.onRequest((req, res) => {
if (!req.path) {
req.url = `/${req.url}`
}
return app(req, res)
})
module.exports = {
exchange
}
The documentation Doug referred to in his answer is good. An important caveat though is that rawBody does not work in the emulator. The workaround is to do:
if (req.rawBody) {
busboy.end(req.rawBody);
}
else {
req.pipe(busboy);
}
As described in this issue:
https://github.com/GoogleCloudPlatform/cloud-functions-emulator/issues/161#issuecomment-376563784
Please read the documentation for handling multipart form uploads.
... if you want your Cloud Function to process multipart/form-data, you can use the rawBody property of the request.
Because of the way Cloud Functions pre-processes some requests, you can expect that some Express middleware will not work, and that's what you're running into.
I've combined the previous two answers into a easy-to-use async function.
const Busboy = require('busboy');
const os = require('os');
const path = require('path');
const fs = require('fs');
module.exports = function extractMultipartFormData(req) {
return new Promise((resolve, reject) => {
if (req.method != 'POST') {
return reject(405);
} else {
const busboy = new Busboy({ headers: req.headers });
const tmpdir = os.tmpdir();
const fields = {};
const fileWrites = [];
const uploads = {};
busboy.on('field', (fieldname, val) => (fields[fieldname] = val));
busboy.on('file', (fieldname, file, filename) => {
const filepath = path.join(tmpdir, filename);
const writeStream = fs.createWriteStream(filepath);
uploads[fieldname] = filepath;
file.pipe(writeStream);
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
busboy.on('finish', async () => {
const result = { fields, uploads: {} };
await Promise.all(fileWrites);
for (const file in uploads) {
const filename = uploads[file];
result.uploads[file] = fs.readFileSync(filename);
fs.unlinkSync(filename);
}
resolve(result);
});
busboy.on('error', reject);
if (req.rawBody) {
busboy.end(req.rawBody);
} else {
req.pipe(busboy);
}
}
});
};

Resources