Fix serializedobject append for excessively large bytes length

This commit is contained in:
wltsmrz
2015-05-20 20:17:53 -07:00
parent 6ea07139dc
commit e93f1ab6f4
3 changed files with 24 additions and 15 deletions

View File

@@ -157,13 +157,18 @@ SerializedObject.check_fields = function(typedef, obj) {
SerializedObject.prototype.append = function(bytes_) {
const bytes = bytes_ instanceof SerializedObject ? bytes_.buffer : bytes_;
// Make sure both buffer and bytes are Array. Either could potentially be a
// Buffer.
// Make sure both buffer and bytes are Array. Either could be a Buffer.
if (Array.isArray(this.buffer) && Array.isArray(bytes)) {
// Array::concat is horribly slow where buffer length is 100 kbytes + One
// transaction with 1100 affected nodes took around 23 seconds to convert
// from json to bytes.
Array.prototype.push.apply(this.buffer, bytes);
// `this.buffer = this.buffer.concat(bytes)` can be unbearably slow for
// large bytes length and acceptable bytes length is limited for
// `Array.prototype.push.apply(this.buffer, bytes)` as every element in the
// bytes array is pushed onto the stack, potentially causing a RangeError
// exception. Both of these solutions are known to be problematic for
// ledger 7501326. KISS instead
for (let i = 0; i < bytes.length; i++) {
this.buffer.push(bytes[i]);
}
} else {
this.buffer = this.buffer.concat(bytes);
}

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
/* eslint-disable max-len */
/* eslint-disable max-len, valid-jsdoc */
'use strict';
const assert = require('assert');
@@ -10,22 +10,23 @@ const Ledger = require('ripple-lib').Ledger;
* @param ledger_index {Number}
* Expects a corresponding ledger dump in $repo/test/fixtures/ folder
*/
function create_ledger_test(ledger_index) {
describe(String(ledger_index), function() {
const path = __dirname + '/fixtures/ledger-full-' + ledger_index + '.json';
const ledger_raw = fs.readFileSync(path);
const ledger_json = JSON.parse(ledger_raw);
const ledger = Ledger.from_json(ledger_json);
it('has account_hash of ' + ledger_json.account_hash, function() {
assert.equal(ledger_json.account_hash,
ledger.calc_account_hash({
sanity_test: true
}).to_hex());
});
const hasAccounts = Array.isArray(ledger_json.accountState)
&& ledger_json.accountState.length > 0;
if (hasAccounts) {
it('has account_hash of ' + ledger_json.account_hash, function() {
assert.equal(ledger_json.account_hash,
ledger.calc_account_hash({sanity_test: true}).to_hex());
});
}
it('has transaction_hash of ' + ledger_json.transaction_hash, function() {
assert.equal(ledger_json.transaction_hash,
ledger.calc_tx_hash().to_hex());
@@ -37,6 +38,8 @@ describe('Ledger', function() {
create_ledger_test(38129);
// Because, why not.
create_ledger_test(40000);
// 1311 AffectedNodes, no accounts
create_ledger_test(7501326);
describe('#calcAccountRootEntryHash', function() {
it('will calculate the AccountRoot entry hash for rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh', function() {