It seems like:
ga('send', 'pageview');
Doesn't know how to handle large payload (over 8K), when we're sending a large transaction with more than 100 products, the page impression just tries to send all the items in a single beacon post.
products.forEach(product => ga('ec:addProduct', ...) ) // 100 products
ga('ec:setAction', 'purchase', ...)
ga('send', 'pageview');
Which results in
raven.js:80 Payload size is too large (11352). Max allowed is 8192.
We are just following the documentation on: enhanced-ecommerce#measuring-transactions
The limit for a HTTP request to the Google Analytics endpoint is 8Kb (or more exactly 8192 bytes).
There is an excellent blog here discussing how to manage this overflow.
The idea is if the number of objects (products) in the array is larger than your defined number, let’s say 35, and a visitor has selected to show 100 products, the solution is to automatically send the data in 3 hits to avoid hitting the 8Kb limit.
<script>
if (product.length > 0 || promo.length > 0) {
var maxProducts = 35; // Max objects that will be sent with 1 hit.
var ecomm = product.concat(promo); // Merge product & promo into 1 array that we use in the add to cart & click tracking.
while(product.length || promo.length) {
var p1 = product.splice(0,maxProducts); // Split the product array into arrays with max 35 objects
var p2 = promo.splice(0,maxProducts); // Split the promo array into arrays with max 35 objects
dataLayer.push({
'ecommerce': {
'promoView': {
'promotions': p2
},
'impressions': p1
},
'event': 'impression', // GTM Event for Impression tracking
'eventCategory':'Ecommerce','eventAction':'Impression'
});
};
};
</script>
After couple of tests it seems like we found the solution, we broke the transaction to batches of 20 items and at the end we send the transaction global data (like tax and shipping). Each batch is connected to the transaction by sending the transaction id.
//break the transaction of batches of 20 items
var idArrays = splitArray(Object.keys(cart.lines), 20),
transaction = { id: order.id };
angular.forEach(idArrays, function(ids){
angular.forEach(ids, function (id) {
var analyticsLine = analyticsCart(cart.lines[id]);
ga('ec:addProduct', analyticsLine);
});
// connect the batch to the transaction
ga('ec:setAction', 'purchase', transaction);
ga('send', 'event', 'Checkout', 'Purchase', 'items batch');
});
//Send the transaction total data
var fullTransaction = {
id: order.id,
tax: cart.tax,
shipping: cart.deliveryCost
};
ga('ec:setAction', 'purchase', fullTransaction);
ga('send', 'event', 'Checkout', 'Purchase', 'transaction details');
Related
We have three custom dimensions defined in Google Analytics:
ClientId (dimension1): Session-scoped
SessionId (dimension2): Session-scoped
Hit Timestamp (dimension3): Hit-scoped
And these are being fed from an on-page script:
$(document).ready(function() {
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
function getClientId() {
try {
var trackers = ga.getAll();
var i, len;
for (i= 0, len = trackers.length; i < len; i += 1) {
if (trackers[i].get('trackingId') === 'UA-XXXXXXXX-1') {
return trackers[i].get('clientId');
}
}
}
catch(e){
//do nothing
}
return 'false';
}
function getSessionId() {
return new Date().getTime() + '.' + Math.random().toString(36).substring(5);
}
function getTimeStamp() {
return new Date().getTime();
}
ga('create', 'UA-XXXXXXXX-1', 'auto');
ga('require', 'displayfeatures');
ga('require', 'linkid', 'linkid.js');
var clientId = getClientId();
var sessionId = getSessionId();
var timeStamp = getTimeStamp();
ga('send', 'pageview', {
'dimension1' : clientId,
'dimension2' : sessionId,
'dimension3' : timeStamp
});
});
</script>
Now, the marketing team tells us that ClientId is not getting captured. They shared data where we had some 24,000 rows, out of which only two had valid client ids. By contrast, Session ID and Hit Timestamp are being captured perfectly.
When I do a quick check on the website (by assigning the code for getClientId() to another temporary function and calling it), I get the ClientId.
I'm really not sure what's causing this to be missed on the live website. Can someone point out something that might be awry?
You should consider loading the GA library as recommended in the <head> section of your site rather than on $(document).ready. This is so that the analytics tracking has the soonest possibility to start tracking user engagements. In addition, if you load the analytics after DOM is ready or after the page has completely loaded, then there is the chance you miss some metrics if, for example, a user lands on your page, and then navigates away before the analytics library has a chance to capture their data.
My eCommerce tracking is working with one exception. All transactions are being grouped as though they came from the same source/medium 'direct/none'.
This is my code;
<script>
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r; i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date(); a = s.createElement(o),
m = s.getElementsByTagName(o)[0]; a.async = 1; a.src = g; m.parentNode.insertBefore(a, m)
})(window, document, 'script', 'https://www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-12345678-2', 'auto', 'ecommerceTracker', { 'allowLinker': true });
ga('ecommerceTracker.require', 'linker');
ga('ecommerceTracker.linker:autoLink', ['securedomain.com']);
ga('ecommerceTracker.send', 'pageview');
ga('ecommerceTracker.require', 'ecommerce');
ga('ecommerceTracker.ecommerce:addTransaction', {
id: '123456', // Transaction ID - this is normally generated by your system.
affiliation: '1', // Affiliation or store name
revenue: '99.99', // Grand Total
shipping: '0' , // Shipping cost
tax: '0' }); // Tax.
ga('ecommerceTracker.ecommerce:send');
</script>
I know that at least one of the transactions should of been listed under 'trivago / cpc', as I created this transaction myself.
The only other point to note, is that this transaction process is being carried out across 2 domains.
This is a sample path a user will take;
Visit 3rd party site, in this case Trivago
Click on link, redirecting to our domainname.com
Navigate through our domainname.com pages
Find rooms you want to book
Once you have selected the rooms, navigate to the checkout page
The checkout page is hosted on the domain securedomain.com
From our domainname.com we are redirected to securedomain.com, which contains the same Google Analytics code.
The reservation is saved, and transaction recorded in Google Analytics, but is being shown as 'direct/none'.
One other point to add is that the source domain is http and the destination domain is https. In order to get from the http domain to the https domain, there is a form post.
It may be because the _ga identifier isn't carried through the booking process, so that on the point of conversion the dynamic URL identifier is removed and therefore Google Analytics is referencing the conversion as if it came from a direct source.
I have the same problem and found that this was the case.
See here: https://groups.google.com/forum/#!topic/google-analytics-analyticsjs/kZ8W4iMxAQQ
I'm following along with the Discover Meteor.js book and creating the link sharing app but want to do pagination based based on week instead of by post recency.
Currently the code is structured to show a certain number of posts based on the URL: http://localhost:3000/<#ofDisplayedPosts>
But I want to display every post submitted in the most recent week, followed by the previous week, etc.
Here's the publication of the posts mongo collection:
Meteor.publish('posts', function(options){
check(options, {
sort: Object,
limit: Number,
});
return Posts.find({}, options);
});
And here is how the router passes the data to the client
PostsListController = RouteController.extend({
template: 'postsList',
increment: 5,
postsLimit: function() {
return parseInt(this.params.postsLimit) || this.increment;
},
findOptions: function() {
return {sort: {submitted: -1}, limit: this.postsLimit()};
},
subscriptions: function() {
this.postsSub = Meteor.subscribe('posts', this.findOptions());
},
posts: function() {
return Posts.find({}, this.findOptions());
},
data: function() {
var hasMore = this.posts().count() === this.postsLimit();
var nextPath = this.route.path({postsLimit: this.postsLimit() + this.increment});
return {
posts: this.posts(),
ready: this.postsSub.ready,
nextPath: hasMore ? nextPath : null
};
}
});
Similar to Product Hunt, How can I group posts by week, encode that information into the URL & create a link at the bottom of the page to view the previous week's posts?
Thanks!
Basic process:
Replace your route parameter :numberOfDisplayedPosts with :startdate so that your route knows what date to use as a starting point. For even more flexibility you could use two route parameters, startdate and enddate and then you could look at weeks, days, months or whatever.
Remove all references to limit as you're only going to be using date ranges.
Compute start and end datetimes based on your route parameter(s). Note that mongodb stores datetimes in UTC.
Use those datetimes in your query to select within a date range.
Compute the parameter(s) required for the next/previous page routes.
You also have to essentially run the next/previous page queries to see if there's any data there because there's no way to tell if you've reached the end or not. Alternatively you can use the min/max datetimes to figure it out.
Do sorts on the client, not the server, unless you're trying to get the min or max of the whole collection.
I understand that a a subscription is a way to flow records into a client-side collection, from this post, and others...
However, per this post, You can have multiple subscriptions that flow into the same collection.
// server
Meteor.publish('posts-current-user', function publishFunction() {
return BlogPosts.find({author: this.userId}, {sort: {date: -1}, limit: 10});
// this.userId is provided by Meteor - http://docs.meteor.com/#publish_userId
}
Meteor.publish('posts-by-user', function publishFunction(who) {
return BlogPosts.find({authorId: who._id}, {sort: {date: -1}, limit: 10});
}
// client
Meteor.subscribe('posts-current-user');
Meteor.subscribe('posts-by-user', someUser);
Now - I obtained my records via two different subscriptions, can I use the subscription to get to the records that it pulled back? Or must I requery my collection? What is the best practice for sharing that query between client and server?
I hope I'm not missing something obvious here, but executing the Meteor.subscribe function only for its side-effects seems to be losing a very useful piece of information - namely which subscription a record came from. Presumably the names of publications and subscriptions are chosen to be meaningful - it would be nice if I could get to records associated with that name.
What you seem to want to do is maintain two separate collections of records, where each collection is populated by a different publication. If you read the DDP specification, you'll see that the server tells the client which collection (not publication) each record belongs to, and multiple publications can actually provide different fields to the same record.
However, Meteor actually lets you send records to any arbitrary collection name, and the client will see if it has that collection. For example:
if (Meteor.isServer) {
Posts = new Mongo.Collection('posts');
}
if (Meteor.isClient) {
MyPosts = new MongoCollection('my-posts');
OtherPosts = new MongoCollection('other-posts');
}
if (Meteor.isServer) {
Meteor.publish('my-posts', function() {
if (!this.userId) throw new Meteor.Error();
Mongo.Collection._publishCursor(Posts.find({
userId: this.UserId
}), this, 'my-posts');
this.ready();
});
Meteor.publish('other-posts', function() {
Mongo.Collection._publishCursor(Posts.find({
userId: {
$ne: this.userId
}
}), this, 'other-posts');
this.ready();
});
}
if (Meteor.isClient) {
Meteor.subscribe('my-posts', function() {
console.log(MyPosts.find().count());
});
Meteor.subscribe('other-posts', function() {
console.log(OtherPosts.find().count());
});
}
This is what's happening:
Say that your server-side BlogPosts Mongo collection contains 500 posts from 10 different users. You then subscribe to two different subscriptions on the client:
Meteor.subscribe('posts-current-user'); // say that this has 50 documents
Meteor.subscribe('posts-by-user', someUser); // say that this has 100 documents
Meteor will see Meteor.subscribe('posts-current-user'); and proceed to download the posts of the current user to the client-side Mini-Mongo's BlogPosts collection.
Meteor will then see Meteor.subscribe('posts-by-user', someUser); and proceed to download the posts of someuser to the client-side Mini-Mongo's BlogPosts collection.
So now the client-side Mini-Mongo BlogPosts collection has 150 documents, which is a subset of the 500 total documents in the server-side BlogPosts collection.
So if you did BlogPosts.find().fetch().count in your client (Chrome Console) the result would be 150.
Of course! It just depends on where you write your subscriptions. In a lot of cases you might be using Iron Router, in which case you would have a given route subscribe to just the data that you need. Then from within that route template's helper you can only query documents within that subscription.
But the general idea is that you hook up a particular subscription to a particular template.
Template.onePost.helpers({
post: function() {
Meteor.subscribe('just-one-post', <id of post>);
return Posts.findOne();
}
});
Template.allPosts.helpers({
posts: function() {
Meteor.subscribe('all-posts');
return Posts.find();
}
));
We are using two trackers on our website GaTracker and KISSmetrics on our website. We are using Segment.io as an even abstraction layer. Now we want to extend the functionality of our script that is for some events I want to call GaTracker and for some events I want to call KISSmetrics, how can I do that ?
Firstly, I'd like to mention that Segment.io uses analytics.js as the javascript client, which is open source and you can check out the source code anytime.
If you enable GA and KM on Segment.io, analytics.js will bring the GA and KM snippets onto the page, so you can still interact with the "_gaq" or "_kmq" globals as you have done in the past. You'll want to wrap your calls to the globals using the analytics.ready(..) method, like so:
analytics.ready(function () {
_gaq.push(['_addTrans',
'1234', // transaction ID - required
'Acme Clothing', // affiliation or store name
'11.99', // total - required
'1.29', // tax
'5', // shipping
'San Jose', // city
'California', // state or province
'USA' // country
]);
});
analytics.ready(..) will call the callback function when the _gaq and km variables have been loaded (think of it like the jquery $.ready document ready handler).
You can set the integrations key in the options argument for alias, group, identify, page and track
analytics.identify('019mr8mf4r', {
email: 'achilles#segment.com',
plan: 'Premium'
}, {
integrations: {
'All': false,
'Mixpanel': true,
'KISSMetrics': true,
'Google Analytics': false
}
});
https://segment.com/docs/libraries/analytics.js/#selecting-integrations