summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFreeyorp <TheFreeYorp@NOSPAM.G.m.a.i.l.replace>2013-05-13 02:53:37 +1200
committerFreeyorp <TheFreeYorp@NOSPAM.G.m.a.i.l.replace>2013-05-13 02:59:23 +1200
commit699368ab864f95bbb1cd3f3906f225b395ecfec0 (patch)
tree28e44b70cfb02626917e3ed73e41c80304a8245a
parent14618ab57d45ea94604b632f2ffb40b68657ac49 (diff)
downloadmanavis-699368ab864f95bbb1cd3f3906f225b395ecfec0.tar.gz
manavis-699368ab864f95bbb1cd3f3906f225b395ecfec0.tar.bz2
manavis-699368ab864f95bbb1cd3f3906f225b395ecfec0.tar.xz
manavis-699368ab864f95bbb1cd3f3906f225b395ecfec0.zip
Remove pc chart, postprocess records, make blob download available
The PC chart is removed, as records being linked to specific characters could never be released publically. The records are being postprocessed, as information about stats for unknown records could have been logged after the record (ie. STAT on LOGOUT) The processed records are available for download in JSON format. Currently, this is only accessible by calling parser.createBlobLink() from the console. The process can be quite resource intensive. The file should be saved with "scrubbed" appearing somewhere in the filename. The loader now properly informs its each method of the current file and the number of files. Files with "scrubbed" in the filename are interpreted as scrubbed logfiles and will be loaded in as JSON directly.
-rw-r--r--public/index.html3
-rw-r--r--public/js/mv/chart.js3
-rw-r--r--public/js/mv/heap.js1
-rw-r--r--public/js/mv/load.js2
-rw-r--r--public/js/mv/main.js9
-rw-r--r--public/js/mv/parse.js66
6 files changed, 75 insertions, 9 deletions
diff --git a/public/index.html b/public/index.html
index 97acd4b..1aa232a 100644
--- a/public/index.html
+++ b/public/index.html
@@ -47,9 +47,6 @@
<div id="date-chart">
<h3>Instance breakdown by Date <a class="reset" style="display: none;" href="javascript:mv.charts.date.filterAll();dc.redrawAll();">clear</a></h3>
</div>
- <div id="player-chart">
- <h3>Instance breakdown by Character ID <span class="help" title="Older to newer characters appear left to right, respectively.">[?]</span> <a class="reset" style="display: none;" href="javascript:mv.charts.pc.filterAll();dc.redrawAll();">clear</a></h3>
- </div>
</div>
</div>
<div id="mask"><noscript><h1>Javascript is required for this website.</h1></noscript>
diff --git a/public/js/mv/chart.js b/public/js/mv/chart.js
index d06d40e..e5fbcbb 100644
--- a/public/js/mv/chart.js
+++ b/public/js/mv/chart.js
@@ -13,9 +13,6 @@ var mv = function(mv) {
.xUnits(d3.time.hours)
.xAxisPadding(2)
;
- mv.charts.pc = bar(monoGroup(wide(dc.barChart("#player-chart")), "pc"))
- .x(d3.scale.linear().domain([mv.heap.pc.dim.bottom(1)[0].pc, mv.heap.pc.dim.top(1)[0].pc]).nice())
- ;
mv.charts.blvl = bar(monoGroup(med(dc.barChart("#blvl-chart")), "blvl"))
.x(d3.scale.linear().domain([0, mv.heap.blvl.dim.top(1)[0].pcstat.blvl]))
;
diff --git a/public/js/mv/heap.js b/public/js/mv/heap.js
index 03f3c00..c5a0a12 100644
--- a/public/js/mv/heap.js
+++ b/public/js/mv/heap.js
@@ -10,7 +10,6 @@ var mv = function(mv) {
heap.cfdata = crossfilter(mv.parser.records);
heap.all = heap.cfdata.groupAll().reduce(ea, es, ez);
monoGroup("date", function(d) { return d3.time.hour.round(d.date); });
- monoGroup("pc", function(d) { return d.pc; });
monoGroup("map", function(d) { return d.map; }).reduce(ea, es, ez);
monoGroup("blvl", function(d) { return d.pcstat ? d.pcstat.blvl : 0; });
monoGroup("type", function(d) { return d.type; });
diff --git a/public/js/mv/load.js b/public/js/mv/load.js
index 39dd391..2cb7936 100644
--- a/public/js/mv/load.js
+++ b/public/js/mv/load.js
@@ -40,7 +40,7 @@ var mv = function(mv) {
reader.onabort = function() { loader.onabort.apply(null, arguments) };
reader.onloadstart = function() { loader.onloadstart.apply(null, arguments) };
reader.onload = function(evt) {
- each(reader.result);
+ each(reader.result, curfile, numfiles);
++curfile;
if (curfile == numfiles) {
after();
diff --git a/public/js/mv/main.js b/public/js/mv/main.js
index b7fcb1e..f36e866 100644
--- a/public/js/mv/main.js
+++ b/public/js/mv/main.js
@@ -27,7 +27,13 @@ var mv = function(mv) {
mv.loader.init(handleFile, postLoading);
function handleFile(data, curFileNum, numFiles) {
loadbar.complete();
- mv.parser.parseRecords(data);
+ if (mv.loader.filenames()[curFileNum].indexOf("scrubbed") != -1) {
+ /* Scrubbed data! */
+ mv.parser.parseScrubbed(data);
+ } else {
+ /* Raw logs. */
+ mv.parser.parseRecords(data);
+ }
}
function postLoading() {
filesbar.complete();
@@ -35,6 +41,7 @@ var mv = function(mv) {
setTimeout(function() {
loadbar.hide();
}, 2000);
+ mv.parser.postProcessing();
mv.heap.init();
setTimeout(function() {
filesbar.hide();
diff --git a/public/js/mv/parse.js b/public/js/mv/parse.js
index 8094bc5..f8c1fd7 100644
--- a/public/js/mv/parse.js
+++ b/public/js/mv/parse.js
@@ -1,7 +1,16 @@
var mv = function(mv) {
mv.parser = function() {
var parser = {};
+ /* The most recent information of a pc's stat */
var pcstat = {};
+ /*
+ * The first recorded state of a pc's stat.
+ * This is saved for a second pass, in which instances unknown at the time can have the pc's stat applied.
+ */
+ var firstpcstat = {};
+ /*
+ * The time stamp of the last unknown instance.
+ */
var fullyDefinedCutoff = 0;
parser.records = [];
parser.fullyDefinedCutoff = function() { return fullyDefinedCutoff; };
@@ -93,16 +102,73 @@ var mv = function(mv) {
s.int = Math.floor(s.int / 10);
s.dex = Math.floor(s.dex / 10);
s.luk = Math.floor(s.luk / 10);
+ if (!(d[1] in firstpcstat)) {
+ firstpcstat = s;
+ }
pcstat[d[1]] = s;
return;
}
});
};
+ parser.postProcessing = function() {
+ /* Scrub reference to pc id, and scan up until the fully defined cutoff line, assigning the pcstat from those that logged off */
+ var i = 0;
+ /* This name has way too many warts; suggestions for a replacement welcome! */
+ var postProcessedfullyDefinedCutoff = 0;
+ for (; i != parser.records.length && parser.records[i].date <= fullyDefinedCutoff; ++i) {
+ /* See if we've found out what the stats were from information logged after the record. */
+ if (parser.records[i].pc in firstpcstat) {
+ parser.records[i].pcstat = firstpcstat[parser.records[i].pc];
+ } else {
+ /* If not, adjust the fully defined cutoff. */
+ postProcessedfullyDefinedCutoff = parser.records[i].date;
+ }
+ /* Remove references to pc from these records. */
+ delete parser.records[i].pc;
+ }
+ /* Remove references to pc from the remaining records. */
+ for (; i != parser.records.length; ++i) {
+ delete parser.records[i].pc;
+ }
+ fullyDefinedCutoff = postProcessedfullyDefinedCutoff;
+ }
function softAssert(expr, msg) {
if (!expr) {
console.error("SOFTASSERT FAILURE: " + msg);
}
}
+ parser.createBlobLink = function() {
+ /* Make the scrubbed data available for download as a blob. */
+ var blob = new Blob(JSON.stringify(parser.records));
+ var a = d3.select('body').append('a');
+ a
+ .text("Scrubbed records")
+ .attr("download", "map.scrubbed")
+ .attr("href", window.URL.createObjectURL(blob))
+ ;
+ }
+ parser.parseScrubbed = function(scrubbedRecords) {
+ scrubbedRecords = JSON.parse(scrubbedRecords);
+ console.log(scrubbedRecords, scrubbedRecords.length);
+ /*
+ * The work is mostly all done for us. Just scan through to see if there
+ * are any undefined records, and update the pointer if so.
+ */
+ /*
+ * Note that because we do not have the IDs, we cannot do a second pass
+ * to see if there is any information outside of the file that would
+ * tell us what the stats are, because we do not have that information.
+ * We can only get as good as what we were given!
+ */
+ for (var i = 0; i != scrubbedRecords.length; ++i) {
+ scrubbedRecords[i].date = new Date(scrubbedRecords[i].date);
+ if (scrubbedRecords[i].pcstat == undefined && (!fullyDefinedCutoff || scrubbedRecords[i].date > fullyDefinedCutoff)) {
+ fullyDefinedCutoff = scrubbedRecords[i].date;
+ }
+ }
+ /* It's simple when everything's already been done. */
+ parser.records = parser.records.concat(scrubbedRecords);
+ }
return parser;
}();
return mv;