I'm trying to add polynomial regression line to my google trends live graph but it does not work properly:
as you can see the polynomial regression trendline is not fitting the data points properly (it's straight trendline and not a polynomial trendline).
My code is:
google.charts.load("current", {packages:["corechart"]});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time of Day');
data.addColumn('number', 'Depth');
var options = {
title: 'graph',
legend: 'none',
crosshair: { trigger: 'both', orientation: 'both' },
trendlines: {
0: {
type: 'polynomial',
degree: 3,
visibleInLegend: true,
}
},
hAxis: {
viewWindow: {
min: new Date(),
}
},
vAxis: {
viewWindow: {
min: 0,
max: 100
}
}
};
var chart = new google.visualization.ScatterChart(document.getElementById('chart'));
chart.draw(data, options);
var interval = setInterval(function() {
var time = new Date();
data.addRow([time, newValue])
chart.draw(data, options);
}, 10);
Related
I want to generate a NDWI chart, but I get this error:
Error generating chart: No features contain non-null values of "system:time_start".
enter code here
var L8 = ee.ImageCollection("LANDSAT/LC08/C02/T1_RT_TOA")
.filterDate('2021-01-01', '2021-12-31')
.filterBounds(VolRn)
.filterMetadata('CLOUD_COVER', 'less_than', 1.5)
.mean()
.clip(VolRn);
var green = L8.select('B3');
var nir = L8.select('B5');
var ndwi = green.subtract(nir).divide(green.add(nir)).rename('NDWI');
var ndwiParams = {
min: -1,
max: 1,
palette: ['green', 'white', 'blue']
};
var ndwiMasked = ndwi.updateMask(ndwi.gte(0.001))
Map.setCenter(44.5858, 48.8047, 7);
Map.addLayer(ndwi, ndwiParams, 'NDWI image');
var options = {
title: 'Landsat Spectral Indexes',
hAxis: {title: 'Date'},
vAxis: {title: 'Value'},
lineWidth: 1,
maxPixels: 1e9,
series: {
0: {color: 'blue'}, // NDWI
}};
print(Chart.image.series(ndwi, VolRn, ee.Reducer.mean(), 200).setOptions(options));
enter code here
Can you please explain what this error is and how to fix it.
I am completely new to GEE and can't find information about this error.
I will be very grateful
I'm using vis js to build an application and when I generate the first search I don't have edges overlapping but after expanding a node I got them.
I have a loop to fill the nodes and edges using nodes.update and edges.update
Graph:
var container = document.getElementById("graph-container");
var data = {
nodes: nodes,
edges: edges,
};
var options = {
interaction:{
dragNodes:true,
dragView: true,
hover: true,
hoverConnectedEdges: true,
selectConnectedEdges: true,
zoomView: true
},
layout:{
randomSeed : 2,
improvedLayout:true,
clusterThreshold: 150,
},
physics: {
barnesHut: {
avoidOverlap: 0.5
}
},
nodes: {
physics:false,
borderWidth:3,
size:14,
font:{color:'#000000'}
},
edges:{
length: 200,
smooth: {
roundness: 0.2,
type: 'dynamic'
},
font:{
size:9,
}
}
};
if(is not expand node){
network = new vis.Network(container, data, options);
}else{
network.fit();
network.redraw();
}
What am I doing wrong?? I think its not reading my options = {...} in the expansion
I want to show my custom (Day-Month-Year Hour:Min:Sec -->ex: 01-05-2019 14:06:47 PM) time format on chartjs chart
How Can i Show On chart xAxis Date Format Like This >>
Day-Month-Year Hour:Min:Sec -->ex: 01-05-2019 14:06:47 PM
time format is timeFormat = 'DD/MM/YYYY h:mm:ss a' but on chart only shows Month,Day,Year
This is my code below and:
Online Code On >>> https://codepen.io/sibche2013/pen/XQWWbb
var timeFormat = 'DD/MM/YYYY h:mm:ss a';
var config = {
type: 'line',
data: {
datasets: [
{
label: "UK Dates",
data: [{
x: "01-04-2014 02:15:50", y: 175
}, {
x: "12-04-2014 12:19:27", y: 177
}, {
x: "23-04-2014 22:25:47", y: 178
}, {
x: "28-04-2014 14:46:40", y: 182
}],
fill: false,
borderColor: 'blue'
}
]
},
options: {
responsive: true,
title: {
display: true,
text: "Chart.js Time Scale"
},
scales: {
xAxes: [{
type: "time",
time: {
format: timeFormat,
tooltipFormat: 'll'
},
scaleLabel: {
display: true,
labelString: 'Date'
}
}],
yAxes: [{
scaleLabel: {
display: true,
labelString: 'value'
}
}]
}
}
};
window.onload = function () {
var ctx = document.getElementById("canvas").getContext("2d");
window.myLine = new Chart(ctx, config);
};
I have a latitude and longitude of some point. How to conver its to the OpenLayer 3 map coordinate system?
My code is:
...requiries...
var coord = [55.7522200, 37.61556005];
//coord = ol.proj.transform(coord,'EPSG:4326', 'EPSG:3857');
var vectorSource = new ol.source.GeoJSON(
({
object: {
'type': 'FeatureCollection',
'crs': {
'type': 'name',
'properties': {
'name': 'EPSG:3857'
}
},
'features': [
{
'type': 'Feature',
'geometry': {
'type': 'Point',
'coordinates': coord
}
}
]
}
}));
...
var map = ...
Could you gave me an example for converting the var coords on JavaScript.
You can see this example of code at link http://openlayers.org/en/v3.0.0/examples/geojson.html
If you have a coordinate [lon, lat] (in that order) you can transform it using:
var newCoord = ol.proj.transform([lon, lat], 'EPSG:4326', 'EPSG:3857');
try this
var Coord = ol.proj.transform([lon, lat], 'EPSG:4326', 'EPSG:3857');
I am trying to do a point cluster layer based on the JSON objects I obtained from database. Here is my JavaScript to plot a point cluster layer:
function addClusters() {
$.ajax({
url: "index.aspx/getBusCommuter",
type: "POST",
data: "",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (data) {
var parsed = JSON.parse(data.d);
$.each(parsed, function (i, jsondata) {
var coordXicon = jsondata.BusStopX;
var coordYicon = jsondata.BusStopY;
var commuterAmt = jsondata.CommuterAmt;
var latlng = new esri.geometry.Point({ "x": coordXicon, "y": coordYicon, "spatialReference": { "wkid": 4326 } });
// cluster layer that uses OpenLayers style clustering
clusterLayer = new ClusterLayer({
"data": commuterAmt,
"distance": 100,
"id": "clusters",
"labelColor": "#fff",
"labelOffset": 10,
"resolution": map.extent.getWidth() / map.width,
"singleColor": "#888"
});
var defaultSym = new SimpleMarkerSymbol().setSize(4);
var renderer = new ClassBreaksRenderer(defaultSym, "clusterCount");
var picBaseUrl = "http://static.arcgis.com/images/Symbols/Shapes/";
var blue = new PictureMarkerSymbol(picBaseUrl + "BluePin1LargeB.png", 32, 32).setOffset(0, 15);
var green = new PictureMarkerSymbol(picBaseUrl + "GreenPin1LargeB.png", 64, 64).setOffset(0, 15);
var red = new PictureMarkerSymbol(picBaseUrl + "RedPin1LargeB.png", 72, 72).setOffset(0, 15);
renderer.addBreak(0, 2, blue);
renderer.addBreak(2, 200, green);
renderer.addBreak(200, 1001, red);
clusterLayer.setRenderer(renderer);
map.addLayer(clusterLayer);
});
},
error: function (request, state, errors) {
}
});
}
However, when I try to run it, it told me an error message which is clusterLayer is not defined. I wonder which part I missed and am I doing in the correct way.
Also, I wonder is it possible/correct to set the commuterAmt I obtained to data so that for each point on the map will be attached with the correct amount?
I get the reference from: ArcGIS Documentation
Thanks in advance.
Try to download the sample code ArcGIS Documentation
Include ClusterLayer.js from extras direxctory
var dojoConfig = {
paths: {
extras: location.pathname.replace(/\/[^/]+$/, "") + "/extras"
}
};
then in your code use
define dojo.provide("extras.ClusterLayer");
and call
clusterLayer = new extras.ClusterLayer({
"data": commuterAmt,
"distance": 100,
"id": "clusters",
"labelColor": "#fff",
"labelOffset": 10,
"resolution": map.extent.getWidth() / map.width,
"singleColor": "#888"
});