Does anyone know of any inherent reductions that nvd3 does to y-values? Data seems to be correct after a JSON.parse, and the dates on the x-values are correct, but the y-values are charted much lower than they were input. Examples: the first y-value of 4.152 becomes 0 and 5.557 is graphed as 0.273, so it's not just subtracting the first y-value to start the chart at the origin. Any insight is greatly appreciated.
The Chart code (in a separate .js file):
$(function () {
// nvd3 charts
tisa_nvd3_charts.cumulativeLine();
})
// nvd3 charts
tisa_nvd3_charts = {
cumulativeLine: function () {
if ($('#nvd3_cumulativeLine').length) {
nv.addGraph(function () {
var chart = nv.models.cumulativeLineChart()
.useInteractiveGuideline(true)
.x(function (d) { return d[0] })
.y(function (d) { return d[1] })
.color(d3.scale.category20().range())
.transitionDuration(500)
.clipVoronoi(false);
chart.xAxis.tickFormat(function (d) {
return d3.time.format('%m/%d/%y')(new Date(d))
});
chart.yAxis.tickFormat(d3.format('$,.3f'));
d3.select('#nvd3_cumulativeLine svg').datum(cumulativeTestData()).call(chart);
nv.utils.windowResize(chart.update);
return chart;
});
}
}
}
The .datum function in the View:
<script type="text/javascript">
function cumulativeTestData() {
var closes = JSON.parse('@Html.Raw(Json.Encode(Model.Coordinates))')
return [
{
key: "Closing Prices",
mean: 60,
values: closes
},
];
}
</script>
HTML of the correct JSON.parse:
var closes = JSON.parse('[[1367341200000,4.152],[1369933200000,4.148],[1375203600000,3.459],[1377882000000,3.567], etc.]')
EDIT: Here is a working plunkr that shows the y-values of the data being graphed at incorrect y-axis points: plnkr.co/edit/enR6tKQmpKWxroVHDVOQ?p=preview