( function ( msgpackr , chai ) {
'use strict' ;
chai = chai && Object . prototype . hasOwnProperty . call ( chai , 'default' ) ? chai [ 'default' ] : chai ;
var metadata = {
Designs : [
"Randomized Controlled Trial"
] ,
Types : [
] ,
BriefSummary : "To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator." ,
Abstract : "To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator." ,
Acronym : null ,
ArticleId : "Qy3gwKWSoaWRmbmFEQA" ,
Authors : null ,
CochraneID : null ,
Confidential : false ,
CorporateAuthor : null ,
Country : "Bulgaria, Canada, Hungary, Israel, Norway, Slovakia, United Kingdom, United States" ,
CustomData : null ,
DatabaseType : "ClinicalTrials.gov" ,
DOI : null ,
EmbaseAccessionNumber : null ,
Emtree : null ,
ErrataText : null ,
FullTextURL : null ,
Institution : null ,
ISSN : null ,
Issue : null ,
JournalTitle : null ,
MedlineID : null ,
MeSH : "Hypercholesterolemia|Antibodies, Monoclonal" ,
Pages : null ,
ParentChildStatus : null ,
ParentID : null ,
PublicationDate : "March 21, 2017" ,
PublicationYear : 2017 ,
PubType : null ,
ReferenceStudy : null ,
SecondarySourceID : null ,
Source : "Regeneron Pharmaceuticals" ,
SourceReferenceId : "NCT01926782" ,
TaStudyDesign : "Randomized" ,
Title : "A Randomized, Double-Blind, Placebo-Controlled Study to Evaluate the Efficacy and Safety of an Every Four Weeks Treatment Regimen of Alirocumab in Patients With Primary Hypercholesterolemia" ,
TrialOutcome : null ,
Volume : null ,
Id : 179246831 ,
Created : "2020-04-10T14:48:20.4384957Z" ,
VersionNo : 2 ,
ExtractData : null ,
Digitized : true ,
IsRapidExtract : false ,
IsUploaded : false
} ;
var design = "Randomized Controlled Trial" ;
var conditions = [
{
label : "Cholesterol Total Increased" ,
id : "SUE_c"
}
] ;
var phase = 3 ;
var name = "NCT01926782" ;
var trialIds = [
"NCT01926782"
] ;
var acronyms = [
] ;
var outcomeCount = 156 ;
var id = 179246831 ;
var groups = [
{
Id : "4r" ,
RefId : "B5|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin" ,
OriginalName : "Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin" ,
N : 37 ,
age : 59.3 ,
ageSD : 11.3 ,
male : 37.83783783783784 ,
Interventions : [
{
termIds : [
[
"SUBYEL" ,
"SUB_Oc"
] ,
[
"SUNUVb"
]
]
}
] ,
analyzeAs : "Alirocumab" ,
analyzableScore : 1.0717734625362931 ,
matchingScore : 0
} ,
{
Id : "zB" ,
RefId : "B6|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin" ,
OriginalName : "Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin" ,
N : 146 ,
age : 59.2 ,
ageSD : 10.8 ,
male : 45.205479452054796 ,
Interventions : [
{
termIds : [
[
"SUBYEL" ,
"SUB_Oc"
]
]
}
] ,
analyzeAs : "Statins" ,
analyzableScore : 1.0717734625362931 ,
matchingScore : 0
} ,
{
Id : "3!" ,
RefId : "B4|O1~Placebo Q2W Without Concomitant Statin" ,
OriginalName : "Placebo Q2W Without Concomitant Statin" ,
N : 73 ,
age : 59.4 ,
ageSD : 10.2 ,
male : 54.794520547945204 ,
Interventions : [
{
termIds : [
[
"SUGeLS"
] ,
[
"SUBYEL" ,
"SUB_Oc"
]
]
}
] ,
analyzeAs : "Control" ,
analyzableScore : 1.2020833333333334 ,
matchingScore : 0
} ,
{
Id : "tv" ,
RefId : "E3" ,
OriginalName : "Alirocumab 300 mg Q4W/Up 150 mg Q2W" ,
Interventions : [
{
termIds : [
[
"SUCO54" ,
"SUNUVb"
]
]
}
]
} ,
{
Id : "jt" ,
RefId : "B3|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin" ,
OriginalName : "Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin" ,
N : 312 ,
age : 61.6 ,
ageSD : 10 ,
male : 60.8974358974359 ,
Interventions : [
{
termIds : [
[
"SUBYEL" ,
"SUB_Oc"
]
]
}
]
} ,
{
Id : "5!" ,
RefId : "E2" ,
OriginalName : "Alirocumab 75 mg Q2W/Up 150 mg Q2W" ,
Interventions : [
{
termIds : [
[
"SUNUVb"
]
]
}
]
} ,
{
Id : "4E" ,
RefId : "B2|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin" ,
OriginalName : "Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin" ,
N : 78 ,
age : 60.7 ,
ageSD : 9.1 ,
male : 65.38461538461539 ,
Interventions : [
{
termIds : [
[
"SUBYEL" ,
"SUB_Oc"
] ,
[
"SUNUVb"
]
]
}
]
} ,
{
Id : "i4" ,
Interventions : [
{
Id : "Ya" ,
Name : 178613599 ,
Treatments : [
{
Id : "((" ,
Phase : "k)"
}
] ,
Type : "Drug" ,
termIds : [
[
"SUGeLS"
] ,
[
"SUNUVb"
]
] ,
terms : [
[
"Placebo"
] ,
[
"Alirocumab"
]
]
} ,
{
Id : "o)" ,
Name : 2159990 ,
Treatments : [
{
Id : "1$" ,
Phase : "k)"
}
] ,
Type : "Drug" ,
termIds : [
[
"SUBYEL"
]
] ,
terms : [
[
"Statins"
]
]
}
] ,
RefId : "E1|Placebo Q2W" ,
OriginalName : "Placebo Q2W"
} ,
{
Id : "Ls" ,
RefId : "B1|O1~Placebo Q2W With Concomitant Statin" ,
OriginalName : "Placebo Q2W With Concomitant Statin" ,
N : 157 ,
age : 61.6 ,
ageSD : 9.7 ,
male : 64.3312101910828 ,
Interventions : [
{
termIds : [
[
"SUGeLS"
] ,
[
"SUBYEL" ,
"SUB_Oc"
]
]
}
]
}
] ;
var hasDocData = true ;
var hasRapidExtract = false ;
var N = 803 ;
var queryScore = 1.4868329805051381 ;
var matchingScore = 7.960635921410255 ;
var score = 22.084654254966498 ;
var outcomes = [
{
id : "179246387" ,
type : "Change" ,
unit : "%" ,
termIds : [
[
"SUF0R" ,
"SUBskP"
]
] ,
quantifiers : [
] ,
name : "Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis" ,
cells : [
{
number : - 0.4 ,
unit : "%" ,
group : "3!" ,
varType : "se" ,
N : 70 ,
se : 2 ,
sd : 16.73
} ,
{
number : - 54.6 ,
unit : "%" ,
group : "4r" ,
varType : "se" ,
N : 37 ,
se : 2.8 ,
sd : 17.03
} ,
{
number : - 59.4 ,
unit : "%" ,
group : "zB" ,
varType : "se" ,
N : 141 ,
se : 1.4 ,
sd : 16.62
}
] ,
time : {
Id : 67122072 ,
Low : {
Value : "Baseline"
} ,
High : {
"Number" : 24 ,
Unit : "wk"
} ,
Type : "Total" ,
days : 168 ,
description : "24wk"
} ,
score : 2.08 ,
matchingTerm : "SUF0R" ,
suggestedPositive : false ,
sourceUnit : "%"
} ,
{
id : "179246389" ,
type : "Change" ,
unit : "%" ,
termIds : [
[
"SUF0R" ,
"SUBskP"
]
] ,
quantifiers : [
] ,
name : "Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis" ,
cells : [
{
number : - 0.3 ,
unit : "%" ,
group : "Ls" ,
varType : "se" ,
N : 151 ,
se : 2.1 ,
sd : 25.81
} ,
{
number : - 55.1 ,
unit : "%" ,
group : "4E" ,
varType : "se" ,
N : 75 ,
se : 3 ,
sd : 25.98
} ,
{
number : - 62.3 ,
unit : "%" ,
group : "jt" ,
varType : "se" ,
N : 302 ,
se : 1.5 ,
sd : 26.07
}
] ,
time : {
Id : 67122072 ,
Low : {
Value : "Baseline"
} ,
High : {
"Number" : 24 ,
Unit : "wk"
} ,
Type : "Total" ,
days : 168 ,
description : "24wk"
} ,
score : 2.08 ,
matchingTerm : "SUF0R" ,
suggestedPositive : false ,
sourceUnit : "%"
} ,
{
id : "179246393" ,
type : "Change" ,
unit : "%" ,
termIds : [
[
"SUF0R" ,
"SUBskP"
]
] ,
quantifiers : [
] ,
name : "Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis" ,
cells : [
{
number : - 0.5 ,
unit : "%" ,
group : "3!" ,
varType : "se" ,
N : 70 ,
se : 2 ,
sd : 16.73
} ,
{
number : - 53.9 ,
unit : "%" ,
group : "4r" ,
varType : "se" ,
N : 37 ,
se : 2.7 ,
sd : 16.42
} ,
{
number : - 60 ,
unit : "%" ,
group : "zB" ,
varType : "se" ,
N : 141 ,
se : 1.4 ,
sd : 16.62
}
] ,
time : {
Id : 67122069 ,
Low : {
Value : "Baseline"
} ,
High : {
"Number" : 12 ,
Unit : "wk"
} ,
Type : "Total" ,
days : 84 ,
description : "12wk"
} ,
score : 2.08 ,
matchingTerm : "SUF0R" ,
suggestedPositive : false ,
sourceUnit : "%"
} ,
{
id : "179246394" ,
type : "Change" ,
unit : "%" ,
termIds : [
[
"SUF0R" ,
"SUBskP"
]
] ,
quantifiers : [
] ,
name : "Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis" ,
cells : [
{
number : 1.4 ,
unit : "%" ,
group : "Ls" ,
varType : "se" ,
N : 151 ,
se : 1.9 ,
sd : 23.35
} ,
{
number : - 47.3 ,
unit : "%" ,
group : "4E" ,
varType : "se" ,
N : 75 ,
se : 2.8 ,
sd : 24.25
} ,
{
number : - 58 ,
unit : "%" ,
group : "jt" ,
varType : "se" ,
N : 302 ,
se : 1.4 ,
sd : 24.33
}
] ,
time : {
Id : 67122069 ,
Low : {
Value : "Baseline"
} ,
High : {
"Number" : 12 ,
Unit : "wk"
} ,
Type : "Total" ,
days : 84 ,
description : "12wk"
} ,
score : 2.08 ,
matchingTerm : "SUF0R" ,
suggestedPositive : false ,
sourceUnit : "%"
}
] ;
var characteristics = [
{
id : "179246354" ,
type : "Binary" ,
isCharacteristic : true ,
termIds : [
[
"SUE_c" ,
"SUCbN" ,
"SUyJj"
]
] ,
quantifiers : [
] ,
name : "Patients not having adequate control of their hypercholesterolemia based on their individual level of CVD risk" ,
cells : [
] ,
number : 100
}
] ;
var outcomesScore = 18.97947630112307 ;
var sampleData = {
metadata : metadata ,
design : design ,
conditions : conditions ,
phase : phase ,
name : name ,
trialIds : trialIds ,
acronyms : acronyms ,
outcomeCount : outcomeCount ,
id : id ,
groups : groups ,
hasDocData : hasDocData ,
hasRapidExtract : hasRapidExtract ,
N : N ,
queryScore : queryScore ,
matchingScore : matchingScore ,
score : score ,
outcomes : outcomes ,
characteristics : characteristics ,
outcomesScore : outcomesScore
} ;
function tryRequire ( module ) {
try {
return require ( module )
} catch ( error ) {
return { }
}
}
//if (typeof chai === 'undefined') { chai = require('chai') }
var assert = chai . assert ;
//if (typeof msgpackr === 'undefined') { msgpackr = require('..') }
var Packr = msgpackr . Packr ;
var unpack = msgpackr . unpack ;
var unpackMultiple = msgpackr . unpackMultiple ;
var roundFloat32 = msgpackr . roundFloat32 ;
var pack = msgpackr . pack ;
var DECIMAL _FIT = msgpackr . FLOAT32 _OPTIONS . DECIMAL _FIT ;
var addExtension = msgpackr . addExtension ;
var zlib = tryRequire ( 'zlib' ) ;
var deflateSync = zlib . deflateSync ;
var inflateSync = zlib . inflateSync ;
var deflateSync = zlib . brotliCompressSync ;
var inflateSync = zlib . brotliDecompressSync ;
var constants = zlib . constants ;
var ITERATIONS = 4000 ;
suite ( 'msgpackr basic tests' , function ( ) {
test ( 'pack/unpack data' , function ( ) {
var data = {
data : [
{ a : 1 , name : 'one' , type : 'odd' , isOdd : true } ,
{ a : 2 , name : 'two' , type : 'even' } ,
{ a : 3 , name : 'three' , type : 'odd' , isOdd : true } ,
{ a : 4 , name : 'four' , type : 'even' } ,
{ a : 5 , name : 'five' , type : 'odd' , isOdd : true } ,
{ a : 6 , name : 'six' , type : 'even' , isOdd : null }
] ,
description : 'some names' ,
types : [ 'odd' , 'even' ] ,
convertEnumToNum : [
{ prop : 'test' } ,
{ prop : 'test' } ,
{ prop : 'test' } ,
{ prop : 1 } ,
{ prop : 2 } ,
{ prop : [ undefined ] } ,
{ prop : null }
]
} ;
let structures = [ ] ;
let packr = new Packr ( { structures } ) ;
var serialized = packr . pack ( data ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'mixed array' , function ( ) {
var data = [
'one' ,
'two' ,
'one' ,
10 ,
11 ,
null ,
true ,
'three' ,
'three' ,
'one' , [
3 , - 5 , - 50 , - 400 , 1.3 , - 5.3 , true
]
] ;
let structures = [ ] ;
let packr = new Packr ( { structures } ) ;
var serialized = packr . pack ( data ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( '255 chars' , function ( ) {
const data = 'RRZG9A6I7xupPeOZhxcOcioFsuhszGOdyDUcbRf4Zef2kdPIfC9RaLO4jTM5JhuZvTsF09fbRHMGtqk7YAgu3vespeTe9l61ziZ6VrMnYu2CamK96wCkmz0VUXyqaiUoTPgzk414LS9yYrd5uh7w18ksJF5SlC2e91rukWvNqAZJjYN3jpkqHNOFchCwFrhbxq2Lrv1kSJPYCx9blRg2hGmYqTbElLTZHv20iNqwZeQbRMgSBPT6vnbCBPnOh1W' ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . equal ( deserialized , data ) ;
} ) ;
test ( 'pack/unpack sample data' , function ( ) {
var data = sampleData ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'pack/unpack sample data with records' , function ( ) {
var data = sampleData ;
let structures = [ ] ;
let packr = new Packr ( { structures , useRecords : true } ) ;
var serialized = packr . pack ( data ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
if ( typeof Buffer != 'undefined' )
test ( 'replace data' , function ( ) {
var data1 = {
data : [
{ a : 1 , name : 'one' , type : 'odd' , isOdd : true , a : '13 characters' } ,
{ a : 2 , name : 'two' , type : 'even' , a : '11 characte' } ,
{ a : 3 , name : 'three' , type : 'odd' , isOdd : true , a : '12 character' } ,
{ a : 4 , name : 'four' , type : 'even' , a : '9 charact' } ,
{ a : 5 , name : 'five' , type : 'odd' , isOdd : true , a : '14 characters!' } ,
{ a : 6 , name : 'six' , type : 'even' , isOdd : null }
] ,
} ;
var data2 = {
data : [
{ foo : 7 , name : 'one' , type : 'odd' , isOdd : true } ,
{ foo : 8 , name : 'two' , type : 'even' } ,
{ foo : 9 , name : 'three' , type : 'odd' , isOdd : true } ,
{ foo : 10 , name : 'four' , type : 'even' } ,
{ foo : 11 , name : 'five' , type : 'odd' , isOdd : true } ,
{ foo : 12 , name : 'six' , type : 'even' , isOdd : null }
] ,
} ;
var serialized1 = pack ( data1 ) ;
var serialized2 = pack ( data2 ) ;
var b = Buffer . alloc ( 8000 ) ;
serialized1 . copy ( b ) ;
var deserialized1 = unpack ( b , serialized1 . length ) ;
serialized2 . copy ( b ) ;
var deserialized2 = unpack ( b , serialized2 . length ) ;
assert . deepEqual ( deserialized1 , data1 ) ;
assert . deepEqual ( deserialized2 , data2 ) ;
} ) ;
test ( 'extended class pack/unpack' , function ( ) {
function Extended ( ) {
}
Extended . prototype . getDouble = function ( ) {
return this . value * 2
} ;
var instance = new Extended ( ) ;
instance . value = 4 ;
instance . string = 'decode this: ᾜ' ;
var data = {
prop1 : 'has multi-byte: ᾜ' ,
extendedInstance : instance ,
prop2 : 'more string' ,
num : 3 ,
} ;
let packr = new Packr ( ) ;
addExtension ( {
Class : Extended ,
type : 11 ,
unpack : function ( buffer ) {
let e = new Extended ( ) ;
let data = packr . unpack ( buffer ) ;
e . value = data [ 0 ] ;
e . string = data [ 1 ] ;
return e
} ,
pack : function ( instance ) {
return packr . pack ( [ instance . value , instance . string ] )
}
} ) ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( data , deserialized ) ;
assert . equal ( deserialized . extendedInstance . getDouble ( ) , 8 ) ;
} ) ;
test ( 'extended class pack/unpack custom size' , function ( ) {
function TestClass ( ) {
}
addExtension ( {
Class : TestClass ,
type : 0x01 ,
pack ( ) {
return typeof Buffer != 'undefined' ? Buffer . alloc ( 256 ) : new Uint8Array ( 256 )
} ,
unpack ( data ) {
return data . length
}
} ) ;
let result = unpack ( pack ( new TestClass ( ) ) ) ;
assert . equal ( result , 256 ) ;
} ) ;
test ( 'extended class read/write' , function ( ) {
function Extended ( ) {
}
Extended . prototype . getDouble = function ( ) {
return this . value * 2
} ;
var instance = new Extended ( ) ;
instance . value = 4 ;
instance . string = 'decode this: ᾜ' ;
var data = {
prop1 : 'has multi-byte: ᾜ' ,
extendedInstance : instance ,
prop2 : 'more string' ,
num : 3 ,
} ;
let packr = new Packr ( ) ;
addExtension ( {
Class : Extended ,
type : 12 ,
read : function ( data ) {
let e = new Extended ( ) ;
e . value = data [ 0 ] ;
e . string = data [ 1 ] ;
return e
} ,
write : function ( instance ) {
return [ instance . value , instance . string ]
}
} ) ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( data , deserialized ) ;
assert . equal ( deserialized . extendedInstance . getDouble ( ) , 8 ) ;
} ) ;
test . skip ( 'text decoder' , function ( ) {
let td = new TextDecoder ( 'ISO-8859-15' ) ;
let b = Buffer . alloc ( 3 ) ;
for ( var i = 0 ; i < 256 ; i ++ ) {
b [ 0 ] = i ;
b [ 1 ] = 0 ;
b [ 2 ] = 0 ;
let s = td . decode ( b ) ;
if ( ! require ( 'msgpackr-extract' ) . isOneByte ( s ) ) {
console . log ( i . toString ( 16 ) , s . length ) ;
}
}
} ) ;
test ( 'structured cloning: self reference' , function ( ) {
let object = {
test : 'string' ,
children : [
{ name : 'child' }
]
} ;
object . self = object ;
object . children [ 1 ] = object ;
object . children [ 2 ] = object . children [ 0 ] ;
object . childrenAgain = object . children ;
let packr = new Packr ( {
structuredClone : true ,
} ) ;
var serialized = packr . pack ( object ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . equal ( deserialized . self , deserialized ) ;
assert . equal ( deserialized . children [ 0 ] . name , 'child' ) ;
assert . equal ( deserialized . children [ 1 ] , deserialized ) ;
assert . equal ( deserialized . children [ 0 ] , deserialized . children [ 2 ] ) ;
assert . equal ( deserialized . children , deserialized . childrenAgain ) ;
} ) ;
test ( 'structured cloning: types' , function ( ) {
let b = typeof Buffer != 'undefined' ? Buffer . alloc ( 20 ) : new Uint8Array ( 20 ) ;
let fa = new Float32Array ( b . buffer , 8 , 2 ) ;
fa [ 0 ] = 2.25 ;
fa [ 1 ] = 6 ;
let object = {
error : new Error ( 'test' ) ,
set : new Set ( [ 'a' , 'b' ] ) ,
regexp : /test/gi ,
float32Array : fa ,
uint16Array : new Uint16Array ( [ 3 , 4 ] )
} ;
let packr = new Packr ( {
structuredClone : true ,
} ) ;
var serialized = packr . pack ( object ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( Array . from ( deserialized . set ) , Array . from ( object . set ) ) ;
assert . equal ( deserialized . error . message , object . error . message ) ;
assert . equal ( deserialized . regexp . test ( 'TEST' ) , true ) ;
assert . equal ( deserialized . float32Array . constructor . name , 'Float32Array' ) ;
assert . equal ( deserialized . float32Array [ 0 ] , 2.25 ) ;
assert . equal ( deserialized . float32Array [ 1 ] , 6 ) ;
assert . equal ( deserialized . uint16Array . constructor . name , 'Uint16Array' ) ;
assert . equal ( deserialized . uint16Array [ 0 ] , 3 ) ;
assert . equal ( deserialized . uint16Array [ 1 ] , 4 ) ;
} ) ;
test ( 'object without prototype' , function ( ) {
var data = Object . create ( null ) ;
data . test = 3 ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'many shared structures' , function ( ) {
let data = [ ] ;
for ( let i = 0 ; i < 200 ; i ++ ) {
data . push ( { [ 'a' + i ] : i } ) ;
}
let structures = [ ] ;
let savedStructures ;
let packr = new Packr ( {
structures ,
saveStructures ( structures ) {
savedStructures = structures ;
}
} ) ;
var serializedWith32 = packr . pack ( data ) ;
assert . equal ( savedStructures . length , 32 ) ;
var deserialized = packr . unpack ( serializedWith32 ) ;
assert . deepEqual ( deserialized , data ) ;
structures = structures . slice ( 0 , 32 ) ;
packr = new Packr ( {
structures ,
maxSharedStructures : 100 ,
saveStructures ( structures ) {
savedStructures = structures ;
}
} ) ;
deserialized = packr . unpack ( serializedWith32 ) ;
assert . deepEqual ( deserialized , data ) ;
structures = structures . slice ( 0 , 32 ) ;
packr = new Packr ( {
structures ,
maxSharedStructures : 100 ,
saveStructures ( structures ) {
savedStructures = structures ;
}
} ) ;
let serialized = packr . pack ( data ) ;
assert . equal ( savedStructures . length , 100 ) ;
deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
deserialized = packr . unpack ( serializedWith32 ) ;
assert . deepEqual ( deserialized , data ) ;
assert . equal ( savedStructures . length , 100 ) ;
deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
assert . equal ( packr . structures . sharedLength , 100 ) ;
} ) ;
test ( 'more shared structures' , function ( ) {
const structures = [ ] ;
for ( let i = 0 ; i < 40 ; i ++ ) {
structures . push ( [ 'a' + i ] ) ;
}
const structures2 = [ ... structures ] ;
const packr = new Packr ( {
getStructures ( ) {
return structures
} ,
saveStructures ( structures ) {
} ,
maxSharedStructures : 100
} ) ;
const packr2 = new Packr ( {
getStructures ( ) {
return structures2
} ,
saveStructures ( structures ) {
} ,
maxSharedStructures : 100
} ) ;
const inputData = { a35 : 35 } ;
const buffer = packr . pack ( inputData ) ;
const outputData = packr2 . decode ( buffer ) ;
assert . deepEqual ( inputData , outputData ) ;
} ) ;
test ( 'big buffer' , function ( ) {
var size = 100000000 ;
var data = new Uint8Array ( size ) . fill ( 1 ) ;
var packed = pack ( data ) ;
var unpacked = unpack ( packed ) ;
assert . equal ( unpacked . length , size ) ;
} ) ;
test ( 'random strings' , function ( ) {
var data = [ ] ;
for ( var i = 0 ; i < 2000 ; i ++ ) {
var str = 'test' ;
while ( Math . random ( ) < 0.7 && str . length < 0x100000 ) {
str = str + String . fromCharCode ( 90 / ( Math . random ( ) + 0.01 ) ) + str ;
}
data . push ( str ) ;
}
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'map/date' , function ( ) {
var map = new Map ( ) ;
map . set ( 4 , 'four' ) ;
map . set ( 'three' , 3 ) ;
var data = {
map : map ,
date : new Date ( 1532219539733 ) ,
farFutureDate : new Date ( 3532219539133 ) ,
ancient : new Date ( - 3532219539133 ) ,
} ;
let packr = new Packr ( ) ;
var serialized = packr . pack ( data ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . equal ( deserialized . map . get ( 4 ) , 'four' ) ;
assert . equal ( deserialized . map . get ( 'three' ) , 3 ) ;
assert . equal ( deserialized . date . getTime ( ) , 1532219539733 ) ;
assert . equal ( deserialized . farFutureDate . getTime ( ) , 3532219539133 ) ;
assert . equal ( deserialized . ancient . getTime ( ) , - 3532219539133 ) ;
} ) ;
test ( 'map/date with options' , function ( ) {
var map = new Map ( ) ;
map . set ( 4 , 'four' ) ;
map . set ( 'three' , 3 ) ;
var data = {
map : map ,
date : new Date ( 1532219539011 ) ,
} ;
let packr = new Packr ( {
mapsAsObjects : true ,
useTimestamp32 : true ,
} ) ;
var serialized = packr . pack ( data ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . equal ( deserialized . map [ 4 ] , 'four' ) ;
assert . equal ( deserialized . map . three , 3 ) ;
assert . equal ( deserialized . date . getTime ( ) , 1532219539000 ) ;
} ) ;
test ( 'key caching' , function ( ) {
var data = {
foo : 2 ,
bar : 'test' ,
four : 4 ,
seven : 7 ,
foz : 3 ,
} ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
// do multiple times to test caching
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'strings' , function ( ) {
var data = [ '' ] ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
// do multiple times
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
data = 'decode this: ᾜ' ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
data = 'decode this that is longer but without any non-latin characters' ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'decimal float32' , function ( ) {
var data = {
a : 2.526 ,
b : 0.0035235 ,
c : 0.00000000000352501 ,
d : 3252.77 ,
} ;
let packr = new Packr ( {
useFloat32 : DECIMAL _FIT
} ) ;
var serialized = packr . pack ( data ) ;
assert . equal ( serialized . length , 32 ) ;
var deserialized = packr . unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'numbers' , function ( ) {
var data = {
bigEncodable : 48978578104322 ,
dateEpoch : 1530886513200 ,
realBig : 3432235352353255323 ,
decimal : 32.55234 ,
negative : - 34.11 ,
exponential : 0.234 e123 ,
tiny : 3.233 e - 120 ,
zero : 0 ,
//negativeZero: -0,
Infinity : Infinity
} ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'bigint' , function ( ) {
var data = {
bigintSmall : 352 n ,
bigintSmallNegative : - 333335252 n ,
bigintBig : 2 n * * 64 n - 1 n , // biggest possible
bigintBigNegative : - ( 2 n * * 63 n ) , // largest negative
mixedWithNormal : 44 ,
} ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
var tooBigInt = {
tooBig : 2 n * * 66 n
} ;
assert . throws ( function ( ) { serialized = pack ( tooBigInt ) ; } ) ;
let packr = new Packr ( {
largeBigIntToFloat : true
} ) ;
serialized = packr . pack ( tooBigInt ) ;
deserialized = unpack ( serialized ) ;
assert . isTrue ( deserialized . tooBig > 2 n * * 65 n ) ;
} ) ;
test ( 'roundFloat32' , function ( ) {
assert . equal ( roundFloat32 ( 0.00333000003 ) , 0.00333 ) ;
assert . equal ( roundFloat32 ( 43.29999999993 ) , 43.3 ) ;
} ) ;
test ( 'buffers' , function ( ) {
var data = {
buffer1 : new Uint8Array ( [ 2 , 3 , 4 ] ) ,
buffer2 : new Uint8Array ( pack ( sampleData ) )
} ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'notepack test' , function ( ) {
const data = {
foo : 1 ,
bar : [ 1 , 2 , 3 , 4 , 'abc' , 'def' ] ,
foobar : {
foo : true ,
bar : - 2147483649 ,
foobar : {
foo : new Uint8Array ( [ 1 , 2 , 3 , 4 , 5 ] ) ,
bar : 1.5 ,
foobar : [ true , false , 'abcdefghijkmonpqrstuvwxyz' ]
}
}
} ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
var deserialized = unpack ( serialized ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'utf16 causing expansion' , function ( ) {
this . timeout ( 10000 ) ;
let data = { fixstr : 'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ' , str8 : 'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ' } ;
var serialized = pack ( data ) ;
var deserialized = unpack ( serialized ) ;
assert . deepEqual ( deserialized , data ) ;
} ) ;
test ( 'unpackMultiple' , ( ) => {
let values = unpackMultiple ( new Uint8Array ( [ 1 , 2 , 3 , 4 ] ) ) ;
assert . deepEqual ( values , [ 1 , 2 , 3 , 4 ] ) ;
values = [ ] ;
unpackMultiple ( new Uint8Array ( [ 1 , 2 , 3 , 4 ] ) , value => values . push ( value ) ) ;
assert . deepEqual ( values , [ 1 , 2 , 3 , 4 ] ) ;
} ) ;
} ) ;
suite ( 'msgpackr performance tests' , function ( ) {
test ( 'performance JSON.parse' , function ( ) {
var data = sampleData ;
this . timeout ( 10000 ) ;
var serialized = JSON . stringify ( data ) ;
console . log ( 'JSON size' , serialized . length ) ;
for ( var i = 0 ; i < ITERATIONS ; i ++ ) {
var deserialized = JSON . parse ( serialized ) ;
}
} ) ;
test ( 'performance JSON.stringify' , function ( ) {
var data = sampleData ;
this . timeout ( 10000 ) ;
for ( var i = 0 ; i < ITERATIONS ; i ++ ) {
var serialized = JSON . stringify ( data ) ;
}
} ) ;
test ( 'performance unpack' , function ( ) {
var data = sampleData ;
this . timeout ( 10000 ) ;
let structures = [ ] ;
var serialized = pack ( data ) ;
console . log ( 'MessagePack size' , serialized . length ) ;
let packr = new Packr ( { structures } ) ;
var serialized = packr . pack ( data ) ;
console . log ( 'msgpackr w/ record ext size' , serialized . length ) ;
for ( var i = 0 ; i < ITERATIONS ; i ++ ) {
var deserialized = packr . unpack ( serialized ) ;
}
} ) ;
test ( 'performance pack' , function ( ) {
var data = sampleData ;
this . timeout ( 10000 ) ;
let structures = [ ] ;
let packr = new Packr ( { structures } ) ;
let buffer = typeof Buffer != 'undefined' ? Buffer . alloc ( 0x10000 ) : new Uint8Array ( 0x10000 ) ;
for ( var i = 0 ; i < ITERATIONS ; i ++ ) {
//serialized = pack(data, { shared: sharedStructure })
packr . useBuffer ( buffer ) ;
var serialized = packr . pack ( data ) ;
//var serializedGzip = deflateSync(serialized)
}
//console.log('serialized', serialized.length, global.propertyComparisons)
} ) ;
} ) ;
} ( msgpackr , chai ) ) ;