PWA - cached video will not play in Mobile Safari (11.4) - mobile-safari

I'm struggling to create a simple POC for iOS PWA with a small video.
https://test-service-worker.azurewebsites.net/
I have simple service worker registration and I cache a small (700kB) video. When I'm online the page works just fine. When I turn on airplane mode and go offline, the page is still reloaded but video will not play.
This POC is based on Google Chrome example
https://googlechrome.github.io/samples/service-worker/prefetch-video/
The video from this example will not work in iOS offline for sure because it only caches 50MB. Mine is only 700kB so well below the limit.
My POC works just fine in Chrome but it won't in the latest mobile Safari (iOS 11.4).
What do I need to change in order to make this work on iOS 11.4+? Is this a bug in Safari?

It turns out, Safari is just quite strict. I'm leaving the question here - hopefully it will save someones time.
What's happening:
Safari requests only part of the video - first it will request 'range: bytes=0-1' response. It expects HTTP 206 response which will reveal size of the file
Based on the response it learns what is the length of the video and then it asks for individual byte ranges of the file (for example range: bytes=0-20000 etc.)
If your response is longer than requested Safari will immediately stop processing subsequent requests.
This is exactly what is happening in Google Chrome example and what was happening in my POC. So if you use fetch like this it will work both online & offline:
//This code is based on https://googlechrome.github.io/samples/service-worker/prefetch-video/
self.addEventListener('fetch', function(event) {
headersLog = [];
for (var pair of event.request.headers.entries()) {
console.log(pair[0]+ ': '+ pair[1]);
headersLog.push(pair[0]+ ': '+ pair[1])
}
console.log('Handling fetch event for', event.request.url, JSON.stringify(headersLog));
if (event.request.headers.get('range')) {
console.log('Range request for', event.request.url);
var rangeHeader=event.request.headers.get('range');
var rangeMatch =rangeHeader.match(/^bytes\=(\d+)\-(\d+)?/)
var pos =Number(rangeMatch[1]);
var pos2=rangeMatch[2];
if (pos2) { pos2=Number(pos2); }
console.log('Range request for '+ event.request.url,'Range: '+rangeHeader, "Parsed as: "+pos+"-"+pos2);
event.respondWith(
caches.open(CURRENT_CACHES.prefetch)
.then(function(cache) {
return cache.match(event.request.url);
}).then(function(res) {
if (!res) {
console.log("Not found in cache - doing fetch")
return fetch(event.request)
.then(res => {
console.log("Fetch done - returning response ",res)
return res.arrayBuffer();
});
}
console.log("FOUND in cache - doing fetch")
return res.arrayBuffer();
}).then(function(ab) {
console.log("Response procssing")
let responseHeaders= {
status: 206,
statusText: 'Partial Content',
headers: [
['Content-Type', 'video/mp4'],
['Content-Range', 'bytes ' + pos + '-' +
(pos2||(ab.byteLength - 1)) + '/' + ab.byteLength]]
};
console.log("Response: ",JSON.stringify(responseHeaders))
var abSliced={};
if (pos2>0){
abSliced=ab.slice(pos,pos2+1);
}else{
abSliced=ab.slice(pos);
}
console.log("Response length: ",abSliced.byteLength)
return new Response(
abSliced,responseHeaders
);
}));
} else {
console.log('Non-range request for', event.request.url);
event.respondWith(
// caches.match() will look for a cache entry in all of the caches available to the service worker.
// It's an alternative to first opening a specific named cache and then matching on that.
caches.match(event.request).then(function(response) {
if (response) {
console.log('Found response in cache:', response);
return response;
}
console.log('No response found in cache. About to fetch from network...');
// event.request will always have the proper mode set ('cors, 'no-cors', etc.) so we don't
// have to hardcode 'no-cors' like we do when fetch()ing in the install handler.
return fetch(event.request).then(function(response) {
console.log('Response from network is:', response);
return response;
}).catch(function(error) {
// This catch() will handle exceptions thrown from the fetch() operation.
// Note that a HTTP error response (e.g. 404) will NOT trigger an exception.
// It will return a normal response object that has the appropriate error code set.
console.error('Fetching failed:', error);
throw error;
});
})
);
}
});

Related

Am I doing Firestore Transactions correct?

I've followed the Firestore documentation with relation to transactions, and I think I have it all sorted correctly, but in testing I am noticing issues with my documents not getting updated properly sometimes. It is possible that multiple versions of the document could be submitted to the function in a very short interval, but I am only interested in only ever keeping the most recent version.
My general logic is this:
New/Updated document is sent to cloud function
Check if document already exists in Firestore, and if not, add it.
If it does exist, check that it is "newer" than the instance in firestore, if it is, update it.
Otherwise, don't do anything.
Here is the code from my function that attempts to accomplish this...I would love some feedback if this is correct/best way to do this:
const ocsFlight = req.body;
const procFlight = processOcsFlightEvent(ocsFlight);
try {
const ocsFlightRef = db.collection(collection).doc(procFlight.fltId);
const originalFlight = await ocsFlightRef.get();
if (!originalFlight.exists) {
const response = await ocsFlightRef.set(procFlight);
console.log("Record Added: ", JSON.stringify(procFlight));
res.status(201).json(response); // 201 - Created
return;
}
await db.runTransaction(async (t) => {
const doc = await t.get(ocsFlightRef);
const flightDoc = doc.data();
if (flightDoc.recordModified <= procFlight.recordModified) {
t.update(ocsFlightRef, procFlight);
console.log("Record Updated: ", JSON.stringify(procFlight));
res.status(200).json("Record Updated");
return;
}
console.log("Record isn't newer, nothing changed.");
console.log("Record:", JSON.stringify("Same Flight:", JSON.stringify(procFlight)));
res.status(200).json("Record isn't newer, nothing done.");
return;
});
} catch (error) {
console.log("Error:", JSON.stringify(error));
res.status(500).json(error.message);
}
The Bugs
First, you are trusting the value of req.body to be of the correct shape. If you don't already have type assertions that mirror your security rules for /collection/someFlightId in processOcsFlightEvent, you should add them. This is important because any database operations from the Admin SDKs will bypass your security rules.
The next bug is sending a response to your function inside the transaction. Once you send a response back the client, your function is marked inactive - resources are severely throttled and any network requests may not complete or crash. As a transaction may be retried a handful of times if a database collision is detected, you should make sure to only respond to the client once the transaction has properly completed.
You use set to write the new flight to Firestore, this can lead to trouble when working with transactions as a set operation will cancel all pending transactions at that location. If two function instances are fighting over the same flight ID, this will lead to the problem where the wrong data can be written to the database.
In your current code, you return the result of the ocsFlightRef.set() operation to the client as the body of the HTTP 201 Created response. As the result of the DocumentReference#set() is a WriteResult object, you'll need to properly serialize it if you want to return it to the client and even then, I don't think it will be useful as you don't seem to use it for the other response types. Instead, a HTTP 201 Created response normally includes where the resource was written to as the Location header with no body, but here we'll pass the path in the body. If you start using multiple database instances, including the relevant database may also be useful.
Fixing
The correct way to achieve the desired result would be to do the entire read->check->write process inside of a transaction and only once the transaction has completed, then respond to the client.
So we can send the appropriate response to the client, we can use the return value of the transaction to pass data out of it. We'll pass the type of the change we made ("created" | "updated" | "aborted") and the recordModified value of what was stored in the database. We'll return these along with the resource's path and an appropriate message.
In the case of an error, we'll return a message to show the user as message and the error's Firebase error code (if available) or general message as the error property.
// if not using express to wrangle requests, assert the correct method
if (req.method !== "POST") {
console.log(`Denied ${req.method} request`);
res.status(405) // 405 - Method Not Allowed
.set("Allow", "POST")
.end();
return;
}
const ocsFlight = req.body;
try {
// process AND type check `ocsFlight`
const procFlight = processOcsFlightEvent(ocsFlight);
const ocsFlightRef = db.collection(collection).doc(procFlight.fltId);
const { changeType, recordModified } = await db.runTransaction(async (t) => {
const flightDoc = await t.get(ocsFlightRef);
if (!flightDoc.exists) {
t.set(ocsFlightRef, procFlight);
return {
changeType: "created",
recordModified: procFlight.recordModified
};
}
// only parse the field we need rather than everything
const storedRecordModified = flightDoc.get('recordModified');
if (storedRecordModified <= procFlight.recordModified) {
t.update(ocsFlightRef, procFlight);
return {
changeType: "updated",
recordModified: procFlight.recordModified
};
}
return {
changeType: "aborted",
recordModified: storedRecordModified
};
});
switch (changeType) {
case "updated":
console.log("Record updated: ", JSON.stringify(procFlight));
res.status(200).json({ // 200 - OK
path: ocsFlightRef.path,
message: "Updated",
recordModified,
changeType
});
return;
case "created":
console.log("Record added: ", JSON.stringify(procFlight));
res.status(201).json({ // 201 - Created
path: ocsFlightRef.path,
message: "Created",
recordModified,
changeType
});
return;
case "aborted":
console.log("Outdated record discarded: ", JSON.stringify(procFlight));
res.status(200).json({ // 200 - OK
path: ocsFlightRef.path,
message: "Record isn't newer, nothing done.",
recordModified,
changeType
});
return;
default:
throw new Error("Unexpected value for 'changeType': " + changeType);
}
} catch (error) {
console.log("Error:", JSON.stringify(error));
res.status(500) // 500 - Internal Server Error
.json({
message: "Something went wrong",
// if available, prefer a Firebase error code
error: error.code || error.message
});
}
References
Cloud Firestore Transactions
Cloud Firestore Node SDK Reference
HTTP Event Cloud Functions

Angular HttpClient calls are missing query string and Authorization header

When Angular makes a GET call using HttpClient, the query parameters and Authorization header are missing on the request in our QA environment. When running Angular locally, pointed to the QA APIs, it sends them both as expected.
Here's how the query parameters are set:
const params = new HttpParams().set('schedulingOnly', schedulingOnly ? 'true' : 'false');
return this.httpClient.get<any>(this.getBaseUrl() + '/domain/getAll', { params });
Here's how the Authorization header is set (interceptor):
intercept(request: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
if (environment.useHttpMockRequestInterceptor) {
return this.useMockData(request);
} else {
request = this.AddAuthenticationHeader(request);
return next.handle(request);
}
}
private AddAuthenticationHeader(request: HttpRequest<any>) {
const request = request.clone({
headers: request.headers
.set('Authorization', 'Bearer ' + sessionStorage.getItem('access_token'))
});
return request;
}
Here's what Chrome dev tools is showing:
That's all the basic information, but below is additional information about things I've tried without success.
Is this a CORS issue? - While searching for others with this issue, I came across a lot of CORS issues. I do not believe that's the case here because Angular and the APIs are on the same domain and I can run Angular locally and hit the APIs no problem.
Do query params get sent if I hardcode them into the url? - Yes. The following worked for the query params: return this.httpClient.get(this.getBaseUrl() + '/domain/getAll?schedulingOnly=true');
Is this something wrong with the interceptor? - I don't believe so. Console.log() statements show all the expected points in code being hit. In fact, the request object after the interceptor adds the auth header shows it on there.
I also tried setting directly without the interceptor, but no luck.
const obj = {
headers: { 'Authorization': 'Bearer ' + sessionStorage.getItem('access_token') },
params: { 'schedulingOnly': schedulingOnly ? 'true' : 'false' }
};
return this.httpClient.get<any>(this.getBaseUrl() + '/domain/getAll', obj);
There are no js errors in the console except the 401 error
QA web server is IIS
APIs are ASP.NET Core
Angular is embedded within an ASP.NET Web Forms project (due to migrating that legacy code into Angular incrementally)
The issue was that PrototypeJs was interfering with Angular. This led to the issue, but no warnings or errors, so it was just silently causing this issue. PrototypeJs is used in the containing ASP.NET Web Forms app that Angular is embedded into. The reason this was working locally, but not in QA is because I actually did have functionality to not load PrototypeJs if it was an Angular page, due to noticing other issues before, but that wasn't working in QA due to the site starting on a subpath, not directly on the host, so that functionality of not loading PrototypeJs wasn't working.
Have you tried with the shorter version of adding header in your interceptor:
const request = request.clone({
setHeaders: { 'Authorization': 'Bearer ' + sessionStorage.getItem('access_token') }
});
Interceptor
intercept(request: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
if (environment.useHttpMockRequestInterceptor) {
return this.useMockData(request);
} else {
request = this.AddAuthenticationHeader(request);
return next.handle(request);
}
}
private AddAuthenticationHeader(request: HttpRequest<any>) {
return request.clone({
setHeaders: {
Authorization: `Bearer ${sessionStorage.getItem('access_token')}`
}
});
return request;
}

PWA opens Safari on POST. Is there a way to make it not?

I'm working on converting an existing app to a PWA. Part of that PWA deals with POST from a login form with a redirect to another page. In Safari, if you open the app from the home screen and try to log in, it opens a new tab in Safari. I would love this not to happen.
I've tried just return-ing on a POST and caching the redirected page. It seem to work for a bit but then would return to it's old ways. I've debugged through the code and it seems to switch once it gets a response back from the fetch. Here's my fetch handler
if ((e.request.cache === 'only-if-cached' && e.request.mode !== 'same-origin') || (e.request.type === "POST") {
return;
}
e.respondWith(
caches.match(e.request)
.then(function (response) {
//Cache Hit
// IMPORTANT: Clone the request. A request is a stream and
// can only be consumed once. Since we are consuming this
// once by cache and once by the browser for fetch, we need
// to clone the response.
var fetchRequest = e.request.clone();
return fetch(fetchRequest).then(
function (response) {
//Check to see if we are valid
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
// IMPORTANT: Clone the response. A response is a stream
// and because we want the browser to consume the response
// as well as the cache consuming the response, we need
// to clone it so we have two streams.
var responseToCache = response.clone();
caches.open(CACHE_NAME)
.then(function (cache) {
cache.put(e.request, responseToCache);
});
return response;
})
}))
I expected that Safari wouldn't open up but it does and I'm all out of ideas on how to fix this.

Jquery Ajax, not working in Internet explorer

I'm trying to do some jQuery ajax and it works in Firfox and Chrome, but not in internet explorer 9.
The final code will have to go across sub-domains, and this doesn't work in ie with the default transport.
So I'm trying to create a custom transport to use in internet explorer
Method 1
$.ajaxTransport("+*", function (options, originalOptions, jqXHR) {
if (jQuery.browser.msie && window.XDomainRequest) {
var xdr;
return {
send: function (headers, completeCallback) {
// Use Microsoft XDR
xdr = new XDomainRequest();
xdr.open("get", options.url);
xdr.onload = function () {
if (this.contentType.match(/\/xml/)) {
var dom = new ActiveXObject("Microsoft.XMLDOM");
dom.async = false;
dom.loadXML(this.responseText);
completeCallback(200, "success", [dom]);
} else {
completeCallback(200, "success", [this.responseText]);
}
};
xdr.ontimeout = function () {
completeCallback(408, "error", ["The request timed out."]);
};
xdr.onerror = function () {
completeCallback(404, "error", ["The requested resource could not be found."]);
};
xdr.send();
},
abort: function () {
if (xdr) xdr.abort();
}
};
}
});
I've created a simple sample page to demonstrate the first technique here:
http://services.whygo.net/sendAjax.htm
Please note if you use the custom transport the normal transport will then fail unless you refresh
The idea comes from here:
http://forum.jquery.com/topic/cross-domain-ajax-and-ie#14737000002203097
This give no error message other than 'error' inside the 'error' method called on $ajax, when it fails. I do get a 405 Method not allowed on the 'Network' tab of if dev tools, but the server side stuff does execute.
Method 2
I have also tried another method as described here:
Cross-subdomain AJAX works in Chrome, not IE
if ('XDomainRequest' in window && window.XDomainRequest !== null) {
// override default jQuery transport
jQuery.ajaxSettings.xhr = function() {
try { return new XDomainRequest(); }
catch(e) { }
};
}
This can be found here:
http://www.whygo.net/sendAjax2.html
On this one I actually get 200 codes on the 'network' tab of ie dev tools, but doesn't call the 'error' or the 'success' pararm of $ajax.
If I put a timeout on this second one, then it returns to the 'error' function with a message of 'timeout'.
Here's the solution I went with after about a day of struggling with this inconsistency...
// new method as to not overwrite jQuery's defaults
var cors = (window.XDomainRequest) ? function(url, callback) {
var xdr = new XDomainRequest();
xdr.open('get', url);
xdr.onload = function() { callback(xdr.responseText); }
xdr.send();
} : $.get; // else, use jQuery's method
Use...
cors(url, function(msg) { alert(msg); }); // pretty well same as $.get
Copy and paste, this of course doesn't serve all purposes, but it's a start and it works.
On the http://services.whygo.net/sendAjax2.html page, I see that you've got the expected dataType of the AJAX response coming back from the server as JSON, but the response actually comes back as a plain text string ("Email successfully sent.").
Perhaps you could try commenting out dataType and let jQuery figure out what type of response comes back.

Express / NodeJS Can't send headers after they are sent caused by http requests

First time working with NodeJS (yes, it's awesome) and also using Express as well. Got the web app / service working great but I run in to problems when trying to make more than one http request. Here's a video of how the app causes 2 http requests - http://screencast.com/t/yFKdIajs0XD - as you can see I click on 'articles' it loads an rss feed, then click videos and it loads a youtube feed - both work just fine but after the second call is made it throws an exception. I get the following when I attempt two separate http requests using node's http module:
http.js:527
throw new Error("Can't set headers after they are sent.");
^
Error: Can't set headers after they are sent.
at ServerResponse.<anonymous> (http.js:527:11)
at ServerResponse.setHeader (/Users/rickblalock/node/auti_node/node_modules/express/node_modules/connect/lib/patch.js:47:22)
at /Users/rickblalock/node/auti_node/node_modules/express/node_modules/connect/lib/middleware/errorHandler.js:72:19
at [object Object].<anonymous> (fs.js:107:5)
at [object Object].emit (events.js:61:17)
at afterRead (fs.js:878:12)
at wrapper (fs.js:245:17)
Sample code provided here:
Using my controller module to render the request - http://pastie.org/2317698
One of the tabs (article tab) - the video code is identical minus referencing a video feed: http://pastie.org/2317731
try using the "end" event not "data" like this:
var data = "";
app.get('/', function(req, res){
var options = {
host: 'http://www.engadget.com',
path: '/rss.xml',
method: 'GET'
};
if (data === "") {
var myReq = http.request(options, function(myRes) {
myRes.setEncoding('utf8');
myRes.on('data', function(chunk) {
console.log("request on data ");
data += chunk;
});
myRes.on('end', function () {
console.log("request on END");
res.render('index', {
title: 'Express',
data: data
});
});
});
myReq.write('data\n');
myReq.end();
}
else {
res.render('index', {
title: 'Express',
data: data
});
}
});
old answer
i also think that this is the culprit:
var req = http.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
parseArticle(chunk);
});
});
req.write('data\n');
req.end();
the first line is async so everything inside the callback is called after you do req.write() and req.end()
put these two lines into the callback.

Resources