diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..eecf70db --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.vscode/*.json linguist-language=jsonc diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..0f7e6df0 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,108 @@ +name: CI + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +jobs: + nodejs: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - '18' + - '20' + - '22' + - '24' + steps: + - uses: actions/checkout@v6 + - name: Setup Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v6 + with: + cache: npm + node-version: ${{ matrix.node-version }} + - run: npm install -g nyc + - run: npm ci + - run: npm run test:cover + - uses: codecov/codecov-action@v5 + with: + files: coverage/coverage-final.json + token: ${{ secrets.CODECOV_TOKEN }} + + browser: + runs-on: ubuntu-latest + strategy: + matrix: + browser: [ChromeHeadless, FirefoxHeadless] + steps: + - uses: actions/checkout@v6 + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + cache: npm + node-version: '22' + - run: npm install -g npm + - run: npm ci + - run: npm run test:browser -- --browsers ${{ matrix.browser }} + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + cache: npm + node-version: '22' + - run: npm ci + - run: npx tsgo + - run: npm run lint + + deno: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Setup Deno + uses: denoland/setup-deno@v2 + with: + deno-version: "v2.x" + - run: npm ci + - run: npm run test:deno + + bun: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + - run: bun install + - run: npm run test:bun + + node_with_strip_types: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + cache: npm + node-version: '24' + - run: npm ci + - run: npm run test:node_with_strip_types + + timeline: + runs-on: ubuntu-latest + permissions: + actions: read + needs: + - nodejs + - browser + - lint + - deno + - bun + steps: + - uses: Kesin11/actions-timeline@v2 + diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000..685134e5 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,30 @@ +name: "CodeQL" + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + schedule: + - cron: '44 6 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + permissions: + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v4 + with: + languages: typescript + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v4 diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml new file mode 100644 index 00000000..1d4dd374 --- /dev/null +++ b/.github/workflows/fuzz.yml @@ -0,0 +1,28 @@ +# https://gitlab.com/gitlab-org/security-products/analyzers/fuzzers/jsfuzz + +name: Fuzz + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +jobs: + fuzzing: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + cache: npm + node-version: "20" + + # npm@9 may fail with https://github.com/npm/cli/issues/6723 + # npm@10 may fail with "GitFetcher requires an Arborist constructor to pack a tarball" + - run: npm install -g npm@8 + - run: npm ci + - run: npm run test:fuzz diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..7a381633 --- /dev/null +++ b/.gitignore @@ -0,0 +1,22 @@ +node_modules/ +dist/ +dist.*/ +build/ +.nyc_output/ +coverage/ +benchmark/sandbox.ts + +# v8 profiler logs +isolate-*.log + +# tsimp +.tsimp/ + +# deno +deno.lock + +# flamebearer +flamegraph.html + +# jsfuzz +corpus/ diff --git a/.mocharc.js b/.mocharc.js new file mode 100644 index 00000000..cc57238c --- /dev/null +++ b/.mocharc.js @@ -0,0 +1,10 @@ +'use strict'; + +require("ts-node/register"); + +module.exports = { + diff: true, + extension: ['ts'], + package: '../package.json', + timeout: 10000, +}; diff --git a/.nycrc.json b/.nycrc.json new file mode 100644 index 00000000..89492926 --- /dev/null +++ b/.nycrc.json @@ -0,0 +1,7 @@ +{ + "include": ["src/**/*.ts", "src/**/*.mts"], + "extension": [".ts", ".mtx"], + "reporter": [], + "sourceMap": true, + "instrument": true +} diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..219e8145 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,58 @@ +language: node_js +addons: + firefox: latest +env: + global: + # SAUCE_USERNAME + - secure: J+FOPE/vVK6yzVXHVE7xibFV/hV+Ehc78MBADLlE10YIY7Ag6JkVeomgqRFB9I8zFzj5DALkpzOLGx4iIrFs6iYiNnEcl39fkm8myHl8xIuW+KHt5QOsCtM5qmvfSEZhJV+La0lSzFicjY9VX90VLZvJOHIbiCvIFRoxnwYVw6o= + # SAUCE_ACCESS_KEY + - secure: ay3CSAjya+UQDi0RulLIl6q25oobwLsjLbdkeASgjBq0qN5dXgFgEpBjecBxFqPGrwzzCj9K9fR81NWV80EjLkGdcfN0oGx0wvsOo2C2ulWGHc1dRgKUnMKAA2TL3br14KMfmGn6fmr+fA7Vq+qWajQpExlG0Kuw68C9iNuKIQw= +matrix: + include: + - node_js: 10 + - node_js: 12 + - node_js: 14 + - node_js: lts/* + env: BROWSER=FirefoxHeadless + - node_js: lts/* + env: BROWSER=slChrome + - node_js: lts/* + env: BROWSER=slFirefox + - node_js: lts/* + env: BROWSER=slSafari + - node_js: lts/* + env: BROWSER=slIE + - node_js: lts/* + env: BROWSER=slEdge + - node_js: lts/* + env: BROWSER=slIos + - node_js: lts/* + env: BROWSER=slAndroid + fast_finish: true + allow_failures: + # Because Travis CI does not expose credentials to pull-request builds from forked repositories. + # https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions + - env: BROWSER=slChrome + - env: BROWSER=slFirefox + - env: BROWSER=slSafari + - env: BROWSER=slIE + - env: BROWSER=slEdge + - env: BROWSER=slIos + - env: BROWSER=slAndroid +cache: npm +install: | + npm install -g npm + if [ "${BROWSER}" = "" ] + then npm install -g nyc codecov + fi + npm ci +script: | + if [ "${BROWSER}" = "" ] + then npm run test:cover + else + travis_wait 600 npm run test:browser -- --browsers "$BROWSER" + fi +after_success: | + if [ "${BROWSER}" = "" ] + then codecov -f coverage/*.json + fi diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..2035884f --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,10 @@ +{ + // List of extensions which should be recommended for users of this workspace. + "recommendations": [ + "dbaeumer.vscode-eslint", + "yzhang.markdown-all-in-one" + ], + // List of extensions recommended by VS Code that should not be recommended for users of this workspace. + "unwantedRecommendations": [ + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..fc2d6922 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,35 @@ +// For configurations: +// https://code.visualstudio.com/Docs/editor/debugging +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Run the current Mocha test file", + "type": "node", + "sourceMaps": true, + "request": "launch", + "internalConsoleOptions": "openOnSessionStart", + "runtimeExecutable": "npx", + "program": "mocha", + "args": [ + "--colors", + "${relativeFile}" + ], + "cwd": "${workspaceFolder}" + }, + { + "name": "Run the current TypeScript file", + "type": "node", + "sourceMaps": true, + "request": "launch", + "internalConsoleOptions": "openOnSessionStart", + "args": [ + "--nolazy", + "-r", + "ts-node/register", + "${relativeFile}" + ], + "cwd": "${workspaceFolder}" + }, + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..de744665 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,15 @@ +{ + "typescript.tsdk": "node_modules/typescript/lib", + "files.eol": "\n", + "editor.tabSize": 2, + "editor.codeActionsOnSave": { + "source.fixAll.eslint": "explicit" + }, + "cSpell.words": [ + "instanceof", + "tsdoc", + "typeof", + "whatwg" + ], + "makefile.configureOnOpen": false +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..b459252b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,425 @@ +# This is the revision history of @msgpack/msgpack + +## 3.1.3 2025-12-26 + +https://github.com/msgpack/msgpack-javascript/compare/v3.1.2...v3.1.3 + +* More specific data types - ArrayBuffer instead of ArrayBufferLike (ts 5.9 compatibility issue) by @joshkel ([#279](https://github.com/msgpack/msgpack-javascript/pull/279)) + +## 3.1.2 2025-05-25 + +https://github.com/msgpack/msgpack-javascript/compare/v3.1.1...v3.1.2 + +* Make sure this library works with `node --experimental-strip-types` + +## 3.1.1 2025-03-12 + +https://github.com/msgpack/msgpack-javascript/compare/v3.1.0...v3.1.1 + +* Stop using `Symbol.dispose`, which is not yet supported in some environments ([#268](https://github.com/msgpack/msgpack-javascript/pull/268) by @rijenkii) + + +## 3.1.0 2025-02-21 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.1...v3.1.0 + +* Added support for nonstandard map keys in the decoder ([#266](https://github.com/msgpack/msgpack-javascript/pull/266) by @PejmanNik) + +## 3.0.1 2025-02-11 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0...v3.0.1 + +* Implement a tiny polyfill to Symbol.dispose ([#261](https://github.com/msgpack/msgpack-javascript/pull/261) to fix #260) + + +## 3.0.0 2025-02-07 + +https://github.com/msgpack/msgpack-javascript/compare/v2.8.0...v3.0.0 + +* Set the compile target to ES2020, dropping support for the dists with the ES5 target +* Fixed a bug that `encode()` and `decode()` were not re-entrant in reusing instances ([#257](https://github.com/msgpack/msgpack-javascript/pull/257)) +* Allowed the data alignment to support zero-copy decoding ([#248](https://github.com/msgpack/msgpack-javascript/pull/248), thanks to @EddiG) +* Added an option `rawStrings: boolean` to decoders ([#235](https://github.com/msgpack/msgpack-javascript/pull/235), thanks to @jasonpaulos) +* Optimized GC load by reusing stack states ([#228](https://github.com/msgpack/msgpack-javascript/pull/228), thanks to @sergeyzenchenko) +* Added an option `useBigInt64` to map JavaScript's BigInt to MessagePack's int64 and uint64 ([#223](https://github.com/msgpack/msgpack-javascript/pull/223)) +* Drop IE11 support ([#221](https://github.com/msgpack/msgpack-javascript/pull/221)) + * It also fixes [feature request: option to disable TEXT_ENCODING env check #219](https://github.com/msgpack/msgpack-javascript/issues/219) +* Change the interfaces of `Encoder` and `Decoder`, and describe the interfaces in README.md ([#224](https://github.com/msgpack/msgpack-javascript/pull/224)): + * `new Encoder(options: EncoderOptions)`: it takes the same named-options as `encode()` + * `new Decoder(options: DecoderOptions)`: it takes the same named-options as `decode()` + +## 3.0.0-beta6 2025-02-07 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0-beta5...v3.0.0-beta6 + +* Set the compile target to ES2020, dropping support for the dists with the ES5 target + +## 3.0.0-beta5 2025-02-06 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0-beta4...v3.0.0-beta5 + +* Fixed a bug that `encode()` and `decode()` were not re-entrant in reusing instances ([#257](https://github.com/msgpack/msgpack-javascript/pull/257)) + +## 3.0.0-beta4 2025-02-04 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0-beta3...v3.0.0-beta4 + +* Added Deno test to CI +* Added Bun tests to CI +* Allowed the data alignment to support zero-copy decoding ([#248](https://github.com/msgpack/msgpack-javascript/pull/248), thanks to @EddiG) + +## 3.0.0-beta3 2025-01-26 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0-beta2...v3.0.0-beta3 + +* Added an option `rawStrings: boolean` to decoders ([#235](https://github.com/msgpack/msgpack-javascript/pull/235), thanks to @jasonpaulos) +* Optimized GC load by reusing stack states ([#228](https://github.com/msgpack/msgpack-javascript/pull/228), thanks to @sergeyzenchenko) +* Drop support for Node.js v16 +* Type compatibility with ES2024 / SharedArrayBuffer + +## 3.0.0-beta2 + +https://github.com/msgpack/msgpack-javascript/compare/v3.0.0-beta1...v3.0.0-beta2 + +* Upgrade TypeScript compiler to v5.0 + +## 3.0.0-beta1 + +https://github.com/msgpack/msgpack-javascript/compare/v2.8.0...v3.0.0-beta1 + +* Added an option `useBigInt64` to map JavaScript's BigInt to MessagePack's int64 and uint64 ([#223](https://github.com/msgpack/msgpack-javascript/pull/223)) +* Drop IE11 support ([#221](https://github.com/msgpack/msgpack-javascript/pull/221)) + * It also fixes [feature request: option to disable TEXT_ENCODING env check #219](https://github.com/msgpack/msgpack-javascript/issues/219) +* Change the interfaces of `Encoder` and `Decoder`, and describe the interfaces in README.md ([#224](https://github.com/msgpack/msgpack-javascript/pull/224)): + * `new Encoder(options: EncoderOptions)`: it takes the same named-options as `encode()` + * `new Decoder(options: DecoderOptions)`: it takes the same named-options as `decode()` + +## 2.8.0 2022-09-02 + +https://github.com/msgpack/msgpack-javascript/compare/v2.7.2...v2.8.0 + +* Let `Encoder#encode()` return a copy of the internal buffer, instead of the reference of the buffer (fix #212). + * Introducing `Encoder#encodeSharedRef()` to return the shared reference to the internal buffer. + +## 2.7.2 2022/02/08 + +https://github.com/msgpack/msgpack-javascript/compare/v2.7.1...v2.7.2 + +* Fix a build problem in Nuxt3 projects [#200](https://github.com/msgpack/msgpack-javascript/pull/200) reported by (reported as #199 in @masaha03) + +## 2.7.1 2021/09/01 + +https://github.com/msgpack/msgpack-javascript/compare/v2.7.0...v2.7.1 + +* No code changes +* Build with TypeScript 4.4 + +## 2.7.0 2021/05/20 + +https://github.com/msgpack/msgpack-javascript/compare/v2.6.3...v2.7.0 + +* Made sure timestamp decoder to raise DecodeError in errors + * This was found by fuzzing tests using [jsfuzz](https://gitlab.com/gitlab-org/security-products/analyzers/fuzzers/jsfuzz) +* Tiny optimizations and refactoring + +## 2.6.3 2021/05/04 + +https://github.com/msgpack/msgpack-javascript/compare/v2.6.2...v2.6.3 + +* Added `mod.ts` for Deno support + +## 2.6.2 2021/05/04 + +https://github.com/msgpack/msgpack-javascript/compare/v2.6.1...v2.6.2 + +* Improve Deno support (see example/deno-*.ts for details) + +## 2.6.1 2021/05/04 + +https://github.com/msgpack/msgpack-javascript/compare/v2.6.0...v2.6.1 + +* Recover Decoder instance states after `DecodeError` (mitigating [#160](https://github.com/msgpack/msgpack-javascript/issues/160)) + +## 2.6.0 2021/04/21 + +https://github.com/msgpack/msgpack-javascript/compare/v2.5.1...v2.6.0 + +* Revert use of `tslib` (added in 2.5.0) to fix [#169](https://github.com/msgpack/msgpack-javascript/issues/169) + +## v2.5.1 2021/03/21 + +https://github.com/msgpack/msgpack-javascript/compare/v2.5.0...v2.5.1 + +* Fixed the ESM package's dependencies +## v2.5.0 2021/03/21 + +https://github.com/msgpack/msgpack-javascript/compare/v2.4.1...v2.5.0 + +* Throws `DecodeError` in decoding errors +* Rejects `__proto__` as a map key, throwing `DecodeError` + * Thank you to Ninevra Leanne Walden for reporting this issue +* Added `tslib` as a dependency + +## v2.4.1 2021/03/01 + +https://github.com/msgpack/msgpack-javascript/compare/v2.4.0...v2.4.1 + +* Fixed a performance regression that `TextEncoder` and `TextDecoder` were never used even if available ([reported as #157 by @ChALkeR](https://github.com/msgpack/msgpack-javascript/issues/157)) + +## v2.4.0 2021/02/15 + +https://github.com/msgpack/msgpack-javascript/compare/v2.3.1...v2.4.0 + +* Renamed `decodeStream()` to `decodeMultiStream()` + * `decodeStream()` is kept as a deprecated function but will be removed in a future +* Added `decodeMulti()`, a synchronous variant for `decodeMultiStream()` (thanks to @Bilge for the request in [#152](https://github.com/msgpack/msgpack-javascript/issues/152)) +* Improved `decodeAsync()` and its family to accept `BufferSource` (thanks to @rajaybasu for the suggestion in [#152-issuecomment-778712021)](https://github.com/msgpack/msgpack-javascript/issues/152#issuecomment-778712021)) + +## v2.3.1 2021/02/13 + +https://github.com/msgpack/msgpack-javascript/compare/v2.3.0...v2.3.1 + +* Fixed a lot of typos +* Update dev environment: + * Migration to GitHub Actions + * Upgrade Webpack from v4 to v5 + * Enable `noImplicitReturns` and `noUncheckedIndexedAccess` in tsconfig + +## v2.3.0 2020/10/17 + +https://github.com/msgpack/msgpack-javascript/compare/v2.2.1...v2.3.0 + +* Change the extension of ESM files from `.js` to `.mjs` [#144](https://github.com/msgpack/msgpack-javascript/pull/144) +* Make the package work with `strictNullChecks: false` [#139](https://github.com/msgpack/msgpack-javascript/pull/139) by @bananaumai + +## v2.2.1 2020/10/11 + +https://github.com/msgpack/msgpack-javascript/compare/v2.2.0...v2.2.1 + +* Fix `package.json` for webpack to use `module` field + +## v2.2.0 2020/10/04 + +https://github.com/msgpack/msgpack-javascript/compare/v2.1.1...v2.2.0 + +* Now `package.json` has a `module` field to support ES modules + +## v2.1.1 2020/10/04 + +https://github.com/msgpack/msgpack-javascript/compare/v2.1.0...v2.1.1 + +* Fixed typos +* Refactored the codebase + +## v2.1.0 2020/09/21 + +https://github.com/msgpack/msgpack-javascript/compare/v2.0.0...v2.1.0 + +* Added `forceIntegerToFloat` option to `EncodeOptions` by @carbotaniuman ([#123](https://github.com/msgpack/msgpack-javascript/pull/123)) + +## v2.0.0 2020/09/06 + +https://github.com/msgpack/msgpack-javascript/compare/v1.12.2...v2.0.0 + +* Officially introduce direct use of `Encoder` and `Decoder` for better performance + * The major version was bumped because it changed the interface to `Encoder` and `Decoder` +* Build with TypeScript 4.0 + +## v1.12.2 2020/05/14 + +https://github.com/msgpack/msgpack-javascript/compare/v1.12.1...v1.12.2 + +* Build with TypeScript 3.9 + +## v1.12.1 2020/04/08 + +https://github.com/msgpack/msgpack-javascript/compare/v1.12.0...v1.12.1 + +* Build with TypeScript 3.8 + +## v1.12.0 2020/03/03 + +https://github.com/msgpack/msgpack-javascript/compare/v1.11.1...v1.12.0 + +* Add `EncodeOptions#ignoreUndefined` [#107](https://github.com/msgpack/msgpack-javascript/pull/107) + * Like `JSON.stringify()`, less payload size, but taking more time to encode + +## v1.11.1 2020/02/26 + +https://github.com/msgpack/msgpack-javascript/compare/v1.11.0...v1.11.1 + +* Fix use of `process.env` for browsers (#104) + +## v1.11.0 2020/01/15 + +https://github.com/msgpack/msgpack-javascript/compare/v1.10.1...v1.11.0 + +* Added support for custom context for keeping track of objects ([#101](https://github.com/msgpack/msgpack-javascript/pull/101) by @grantila) +* Export ``EncodeOptions` and `DecodeOptions` ([#100](https://github.com/msgpack/msgpack-javascript/pull/100)) + +## v1.10.1 2020/01/11 + +https://github.com/msgpack/msgpack-javascript/compare/v1.10.0...v1.10.1 + +* Re-package it with the latest Webpack and Terser + +## v1.10.0 2019/12/27 + +https://github.com/msgpack/msgpack-javascript/compare/v1.9.3...v1.10.0 + +* Remove WebAssembly implementation, which introduced complexity rather than performance ([#95](https://github.com/msgpack/msgpack-javascript/pull/95)) + +## v1.9.3 2019/10/30 + +https://github.com/msgpack/msgpack-javascript/compare/v1.9.2...v1.9.3 + +* Fix a possible crash in decoding long strings (amending #88): [#90](https://github.com/msgpack/msgpack-javascript/pull/90) by @chrisnojima + + +## v1.9.2 2019/10/30 + +https://github.com/msgpack/msgpack-javascript/compare/v1.9.1...v1.9.2 + +* Fix a possible crash in decoding long strings: [#88](https://github.com/msgpack/msgpack-javascript/pull/88) by @chrisnojima + +## v1.9.1 2019/09/20 + +https://github.com/msgpack/msgpack-javascript/compare/v1.9.0...v1.9.1 + +* No code changes from 1.9.0 +* Upgrade dev dependencies + +## v1.9.0 2019/08/31 + +https://github.com/msgpack/msgpack-javascript/compare/v1.8.0...v1.9.0 + +* [Make cachedKeyDecoder configurable by sergeyzenchenko · Pull Request \#85](https://github.com/msgpack/msgpack-javascript/pull/85) +* [Add support for numbers as map keys by sergeyzenchenko · Pull Request \#84](https://github.com/msgpack/msgpack-javascript/pull/84) +* Build with TypeScript 3.6 + +## v1.8.0 2019/08/07 + +https://github.com/msgpack/msgpack-javascript/compare/v1.7.0...v1.8.0 + +* Adjust internal cache size according to benchmark results [bc5e681](https://github.com/msgpack/msgpack-javascript/commit/bc5e681e781881ed27efaf97ba4156b484dc7648) +* Internal refactoring [#82](https://github.com/msgpack/msgpack-javascript7/pull/82) + +## v1.7.0 2019/08/2 + +https://github.com/msgpack/msgpack-javascript/compare/v1.6.0...v1.7.0 + +* Introduce cache for map keys, which improves decoding in 1.5x faster for the benchmark (@sergeyzenchenko) [#54](https://github.com/msgpack/msgpack-javascript/pull/54) + * + +## v1.6.0 2019/07/19 + +https://github.com/msgpack/msgpack-javascript/compare/v1.5.0...v1.6.0 + +* Add `EncodeOptions.forceFloat32` to encode non-integer numbers in float32 (default to float64) [#79](https://github.com/msgpack/msgpack-javascript/pull/79) + +## v1.5.0 2019/07/17 + +https://github.com/msgpack/msgpack-javascript/compare/v1.4.6...v1.5.0 + +* Improve `decode()` to handle `ArrayBuffer` [#78](https://github.com/msgpack/msgpack-javascript/pull/78) + +## v1.4.6 2019/07/09 + +https://github.com/msgpack/msgpack-javascript/compare/v1.4.5...v1.4.6 + +* use `TextEncoder` to encode string in UTF-8 for performance [#68](https://github.com/msgpack/msgpack-javascript/pull/68) + +## v1.4.5 2019/06/24 + +https://github.com/msgpack/msgpack-javascript/compare/v1.4.4...v1.4.5 + +* Fix an encoding result of -128 from int16 to int8 [#73](https://github.com/msgpack/msgpack-javascript/pull/73) + +## v1.4.4 2019/06/22 + +https://github.com/msgpack/msgpack-javascript/compare/v1.4.1...v1.4.4 + +* Fix the UMD build setting to correctly setup `MessagePack` module in the global object + +## v1.4.3, v1.4.2 + +Mispackaged. + +## v1.4.1 2019/06/22 + +https://github.com/msgpack/msgpack-javascript/compare/v1.4.0...v1.4.1 + +* Improved entrypoints for browsers: + * Build as UMD + * Minidifed by default + +## v1.4.0 2019/06/12 + +https://github.com/msgpack/msgpack-javascript/compare/v1.3.2...v1.4.0 + +* Added `sortKeys: boolean` option to `encode()` for canonical encoding [#64](https://github.com/msgpack/msgpack-javascript/pull/64) +* Fixed `RangeError` in encoding BLOB [#66](https://github.com/msgpack/msgpack-javascript/pull/66) + +## v1.3.2 2019/06/04 + +https://github.com/msgpack/msgpack-javascript/compare/v1.3.1...v1.3.2 + +* Fix typings for older TypeScript [#55](https://github.com/msgpack/msgpack-javascript/pull/55) + +## v1.3.1 2019/06/01 + +https://github.com/msgpack/msgpack-javascript/compare/v1.3.0...v1.3.1 + +* Fix missing exports of `decodeStream()` + +## v1.3.0 2019/05/29 + +https://github.com/msgpack/msgpack-javascript/compare/v1.2.3...v1.3.0 + +* Add `decodeArrayStream()` to decode an array and returns `AsyncIterable` [#42](https://github.com/msgpack/msgpack-javascript/pull/42) +* Add `decodeStream()` to decode an unlimited data stream [#46](https://github.com/msgpack/msgpack-javascript/pull/46) +* Let `decodeAsync()` and `decodeArrayStream()` to take `ReadalbeStream>` (whatwg-streams) [#43](https://github.com/msgpack/msgpack-javascript/pull/46) + +## v1.2.3 2019/05/29 + +https://github.com/msgpack/msgpack-javascript/compare/v1.2.2...v1.2.3 + +* More optimizations for string decoding performance + +## v1.2.2 2019/05/29 + +https://github.com/msgpack/msgpack-javascript/compare/v1.2.1...v1.2.2 + +* Improved array decoding performance ([#32](https://github.com/msgpack/msgpack-javascript/pull/32) by @sergeyzenchenko) +* Improved string decoding performance with TextDecoder ([#34](https://github.com/msgpack/msgpack-javascript/pull/34) by @sergeyzenchenko) + +## v1.2.1 2019/05/26 + +https://github.com/msgpack/msgpack-javascript/compare/v1.2.0...v1.2.1 + +* Reduced object allocations in `encode()` + +## v1.2.0 2019/05/25 + +https://github.com/msgpack/msgpack-javascript/compare/v1.1.0...v1.2.0 + +* Shipped with WebAssembly ([#26](https://github.com/msgpack/msgpack-javascript/pull/26)) +* Fix handling strings to keep lone surrogates +* Fix issues in decoding very large string, which caused RangeError + +## v1.1.0 2019/05/19 + +https://github.com/msgpack/msgpack-javascript/compare/v1.0.0...v1.1.0 + +* Add options to `decode()` and `decodeAsync()`: + `maxStrLength`, `maxBinLength`, `maxArrayLength`, `maxMapLength`, and `maxExtLength` to limit max length of each item + +## v1.0.1 2019/05/12 + +https://github.com/msgpack/msgpack-javascript/compare/v1.0.0...v1.0.1 + +* Fix IE11 incompatibility + +## v1.0.0 2019/05/11 + +* Initial stable release diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f0e7f3e7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,5 @@ +Copyright 2019 The MessagePack Community. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..88c663a8 --- /dev/null +++ b/Makefile @@ -0,0 +1,38 @@ + +test: + npm run test + +test-all: + npm ci + npm publish --dry-run --tag "$(shell node --experimental-strip-types tools/get-release-tag.mjs)" + +publish: validate-git-status + npm publish --tag "$(shell node --experimental-strip-types tools/get-release-tag.mjs)" + git push origin main + git push origin --tags + +validate-git-status: + @ if [ "`git symbolic-ref --short HEAD`" != "main" ] ; \ + then echo "Not on the main branch!\n" ; exit 1 ; \ + fi + @ if ! git diff --exit-code --quiet ; \ + then echo "Local differences!\n" ; git status ; exit 1 ; \ + fi + git pull + +profile-encode: + npx rimraf isolate-*.log + node --prof --require ts-node/register -e 'require("./benchmark/profile-encode")' + node --prof-process --preprocess -j isolate-*.log | npx flamebearer + +profile-decode: + npx rimraf isolate-*.log + node --prof --require ts-node/register -e 'require("./benchmark/profile-decode")' + node --prof-process --preprocess -j isolate-*.log | npx flamebearer + +benchmark: + npx node -r ts-node/register benchmark/benchmark-from-msgpack-lite.ts + @echo + node benchmark/msgpack-benchmark.js + +.PHONY: test dist validate-branch benchmark diff --git a/README b/README deleted file mode 100644 index e69de29b..00000000 diff --git a/README.md b/README.md new file mode 100644 index 00000000..c0f4dc06 --- /dev/null +++ b/README.md @@ -0,0 +1,723 @@ +# MessagePack for ECMA-262/JavaScript/TypeScript + +[![npm version](https://img.shields.io/npm/v/@msgpack/msgpack.svg)](https://www.npmjs.com/package/@msgpack/msgpack) ![CI](https://github.com/msgpack/msgpack-javascript/workflows/CI/badge.svg) [![codecov](https://codecov.io/gh/msgpack/msgpack-javascript/branch/master/graphs/badge.svg)](https://codecov.io/gh/msgpack/msgpack-javascript) [![minzip](https://badgen.net/bundlephobia/minzip/@msgpack/msgpack)](https://bundlephobia.com/result?p=@msgpack/msgpack) [![tree-shaking](https://badgen.net/bundlephobia/tree-shaking/@msgpack/msgpack)](https://bundlephobia.com/result?p=@msgpack/msgpack) + +This library is an implementation of **MessagePack** for TypeScript and JavaScript, providing a compact and efficient binary serialization format. Learn more about MessagePack at: + +https://msgpack.org/ + +This library serves as a comprehensive reference implementation of MessagePack for JavaScript with a focus on accuracy, compatibility, interoperability, and performance. + +Additionally, this is also a universal JavaScript library. It is compatible not only with browsers, but with Node.js or other JavaScript engines that implement ES2015+ standards. As it is written in [TypeScript](https://www.typescriptlang.org/), this library bundles up-to-date type definition files (`d.ts`). + +*Note that this is the second edition of "MessagePack for JavaScript". The first edition, which was implemented in ES5 and never released to npmjs.com, is tagged as [`classic`](https://github.com/msgpack/msgpack-javascript/tree/classic). + +## Synopsis + +```typescript +import { deepStrictEqual } from "assert"; +import { encode, decode } from "@msgpack/msgpack"; + +const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), +}; + +const encoded: Uint8Array = encode(object); + +deepStrictEqual(decode(encoded), object); +``` + +## Table of Contents + +- [Synopsis](#synopsis) +- [Table of Contents](#table-of-contents) +- [Install](#install) +- [API](#api) + - [`encode(data: unknown, options?: EncoderOptions): Uint8Array`](#encodedata-unknown-options-encoderoptions-uint8array) + - [`EncoderOptions`](#encoderoptions) + - [`decode(buffer: ArrayLike | BufferSource, options?: DecoderOptions): unknown`](#decodebuffer-arraylikenumber--buffersource-options-decoderoptions-unknown) + - [`DecoderOptions`](#decoderoptions) + - [`decodeMulti(buffer: ArrayLike | BufferSource, options?: DecoderOptions): Generator`](#decodemultibuffer-arraylikenumber--buffersource-options-decoderoptions-generatorunknown-void-unknown) + - [`decodeAsync(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): Promise`](#decodeasyncstream-readablestreamlikearraylikenumber--buffersource-options-decoderoptions-promiseunknown) + - [`decodeArrayStream(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): AsyncIterable`](#decodearraystreamstream-readablestreamlikearraylikenumber--buffersource-options-decoderoptions-asynciterableunknown) + - [`decodeMultiStream(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): AsyncIterable`](#decodemultistreamstream-readablestreamlikearraylikenumber--buffersource-options-decoderoptions-asynciterableunknown) + - [Reusing Encoder and Decoder instances](#reusing-encoder-and-decoder-instances) +- [Extension Types](#extension-types) + - [ExtensionCodec context](#extensioncodec-context) + - [Handling BigInt with ExtensionCodec](#handling-bigint-with-extensioncodec) + - [The temporal module as timestamp extensions](#the-temporal-module-as-timestamp-extensions) +- [Faster way to decode a large array of floating point numbers](#faster-way-to-decode-a-large-array-of-floating-point-numbers) +- [Decoding a Blob](#decoding-a-blob) +- [MessagePack Specification](#messagepack-specification) + - [MessagePack Mapping Table](#messagepack-mapping-table) +- [Prerequisites](#prerequisites) + - [ECMA-262](#ecma-262) + - [NodeJS](#nodejs) + - [TypeScript Compiler / Type Definitions](#typescript-compiler--type-definitions) +- [Benchmark](#benchmark) +- [Distribution](#distribution) + - [NPM / npmjs.com](#npm--npmjscom) + - [CDN / unpkg.com](#cdn--unpkgcom) +- [Deno Support](#deno-support) +- [Bun Support](#bun-support) +- [Maintenance](#maintenance) + - [Testing](#testing) + - [Continuous Integration](#continuous-integration) + - [Release Engineering](#release-engineering) + - [Updating Dependencies](#updating-dependencies) +- [License](#license) + +## Install + +This library is published to `npmjs.com` as [@msgpack/msgpack](https://www.npmjs.com/package/@msgpack/msgpack). + +```shell +npm install @msgpack/msgpack +``` + +## API + +### `encode(data: unknown, options?: EncoderOptions): Uint8Array` + +It encodes `data` into a single MessagePack-encoded object, and returns a byte array as `Uint8Array`. It throws errors if `data` is, or includes, a non-serializable object such as a `function` or a `symbol`. + +for example: + +```typescript +import { encode } from "@msgpack/msgpack"; + +const encoded: Uint8Array = encode({ foo: "bar" }); +console.log(encoded); +``` + +If you'd like to convert an `uint8array` to a NodeJS `Buffer`, use `Buffer.from(arrayBuffer, offset, length)` in order not to copy the underlying `ArrayBuffer`, while `Buffer.from(uint8array)` copies it: + +```typescript +import { encode } from "@msgpack/msgpack"; + +const encoded: Uint8Array = encode({ foo: "bar" }); + +// `buffer` refers the same ArrayBuffer as `encoded`. +const buffer: Buffer = Buffer.from(encoded.buffer, encoded.byteOffset, encoded.byteLength); +console.log(buffer); +``` + +#### `EncoderOptions` + +| Name | Type | Default | +| ------------------- | -------------- | ----------------------------- | +| extensionCodec | ExtensionCodec | `ExtensionCodec.defaultCodec` | +| context | user-defined | - | +| useBigInt64 | boolean | false | +| maxDepth | number | `100` | +| initialBufferSize | number | `2048` | +| sortKeys | boolean | false | +| forceFloat32 | boolean | false | +| forceIntegerToFloat | boolean | false | +| ignoreUndefined | boolean | false | + +### `decode(buffer: ArrayLike | BufferSource, options?: DecoderOptions): unknown` + +It decodes `buffer` that includes a MessagePack-encoded object, and returns the decoded object typed `unknown`. + +`buffer` must be an array of bytes, which is typically `Uint8Array` or `ArrayBuffer`. `BufferSource` is defined as `ArrayBuffer | ArrayBufferView`. + +The `buffer` must include a single encoded object. If the `buffer` includes extra bytes after an object or the `buffer` is empty, it throws `RangeError`. To decode `buffer` that includes multiple encoded objects, use `decodeMulti()` or `decodeMultiStream()` (recommended) instead. + +for example: + +```typescript +import { decode } from "@msgpack/msgpack"; + +const encoded: Uint8Array; +const object = decode(encoded); +console.log(object); +``` + +NodeJS `Buffer` is also acceptable because it is a subclass of `Uint8Array`. + +#### `DecoderOptions` + +| Name | Type | Default | +| --------------- | ------------------- | ---------------------------------------------- | +| extensionCodec | ExtensionCodec | `ExtensionCodec.defaultCodec` | +| context | user-defined | - | +| useBigInt64 | boolean | false | +| rawStrings | boolean | false | +| maxStrLength | number | `4_294_967_295` (UINT32_MAX) | +| maxBinLength | number | `4_294_967_295` (UINT32_MAX) | +| maxArrayLength | number | `4_294_967_295` (UINT32_MAX) | +| maxMapLength | number | `4_294_967_295` (UINT32_MAX) | +| maxExtLength | number | `4_294_967_295` (UINT32_MAX) | +| mapKeyConverter | MapKeyConverterType | throw exception if key is not string or number | + +`MapKeyConverterType` is defined as `(key: unknown) => string | number`. + +To skip UTF-8 decoding of strings, `rawStrings` can be set to `true`. In this case, strings are decoded into `Uint8Array`. + +You can use `max${Type}Length` to limit the length of each type decoded. + +### `decodeMulti(buffer: ArrayLike | BufferSource, options?: DecoderOptions): Generator` + +It decodes `buffer` that includes multiple MessagePack-encoded objects, and returns decoded objects as a generator. See also `decodeMultiStream()`, which is an asynchronous variant of this function. + +This function is not recommended to decode a MessagePack binary via I/O stream including sockets because it's synchronous. Instead, `decodeMultiStream()` decodes a binary stream asynchronously, typically spending less CPU and memory. + +for example: + +```typescript +import { decode } from "@msgpack/msgpack"; + +const encoded: Uint8Array; + +for (const object of decodeMulti(encoded)) { + console.log(object); +} +``` + +### `decodeAsync(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): Promise` + +It decodes `stream`, where `ReadableStreamLike` is defined as `ReadableStream | AsyncIterable`, in an async iterable of byte arrays, and returns decoded object as `unknown` type, wrapped in `Promise`. + +This function works asynchronously, and might CPU resources more efficiently compared with synchronous `decode()`, because it doesn't wait for the completion of downloading. + +This function is designed to work with whatwg `fetch()` like this: + +```typescript +import { decodeAsync } from "@msgpack/msgpack"; + +const MSGPACK_TYPE = "application/x-msgpack"; + +const response = await fetch(url); +const contentType = response.headers.get("Content-Type"); +if (contentType && contentType.startsWith(MSGPACK_TYPE) && response.body != null) { + const object = await decodeAsync(response.body); + // do something with object +} else { /* handle errors */ } +``` + +### `decodeArrayStream(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): AsyncIterable` + +It is alike to `decodeAsync()`, but only accepts a `stream` that includes an array of items, and emits a decoded item one by one. + +for example: + +```typescript +import { decodeArrayStream } from "@msgpack/msgpack"; + +const stream: AsyncIterator; + +// in an async function: +for await (const item of decodeArrayStream(stream)) { + console.log(item); +} +``` + +### `decodeMultiStream(stream: ReadableStreamLike | BufferSource>, options?: DecoderOptions): AsyncIterable` + +It is alike to `decodeAsync()` and `decodeArrayStream()`, but the input `stream` must consist of multiple MessagePack-encoded items. This is an asynchronous variant for `decodeMulti()`. + +In other words, it could decode an unlimited stream and emits a decoded item one by one. + +for example: + +```typescript +import { decodeMultiStream } from "@msgpack/msgpack"; + +const stream: AsyncIterator; + +// in an async function: +for await (const item of decodeMultiStream(stream)) { + console.log(item); +} +``` + +This function is available since v2.4.0; previously it was called as `decodeStream()`. + +### Reusing Encoder and Decoder instances + +`Encoder` and `Decoder` classes are provided to have better performance by reusing instances: + +```typescript +import { deepStrictEqual } from "assert"; +import { Encoder, Decoder } from "@msgpack/msgpack"; + +const encoder = new Encoder(); +const decoder = new Decoder(); + +const encoded: Uint8Array = encoder.encode(object); +deepStrictEqual(decoder.decode(encoded), object); +``` + +According to our benchmark, reusing `Encoder` instance is about 20% faster +than `encode()` function, and reusing `Decoder` instance is about 2% faster +than `decode()` function. Note that the result should vary in environments +and data structure. + +`Encoder` and `Decoder` take the same options as `encode()` and `decode()` respectively. + +## Extension Types + +To handle [MessagePack Extension Types](https://github.com/msgpack/msgpack/blob/master/spec.md#extension-types), this library provides `ExtensionCodec` class. + +This is an example to setup custom extension types that handles `Map` and `Set` classes in TypeScript: + +```typescript +import { encode, decode, ExtensionCodec } from "@msgpack/msgpack"; + +const extensionCodec = new ExtensionCodec(); + +// Set +const SET_EXT_TYPE = 0 // Any in 0-127 +extensionCodec.register({ + type: SET_EXT_TYPE, + encode: (object: unknown): Uint8Array | null => { + if (object instanceof Set) { + return encode([...object], { extensionCodec }); + } else { + return null; + } + }, + decode: (data: Uint8Array) => { + const array = decode(data, { extensionCodec }) as Array; + return new Set(array); + }, +}); + +// Map +const MAP_EXT_TYPE = 1; // Any in 0-127 +extensionCodec.register({ + type: MAP_EXT_TYPE, + encode: (object: unknown): Uint8Array => { + if (object instanceof Map) { + return encode([...object], { extensionCodec }); + } else { + return null; + } + }, + decode: (data: Uint8Array) => { + const array = decode(data, { extensionCodec }) as Array<[unknown, unknown]>; + return new Map(array); + }, +}); + +const encoded = encode([new Set(), new Map()], { extensionCodec }); +const decoded = decode(encoded, { extensionCodec }); +``` + +Ensure you include your extensionCodec in any recursive encode and decode statements! + +Note that extension types for custom objects must be `[0, 127]`, while `[-1, -128]` is reserved for MessagePack itself. + +### ExtensionCodec context + +When you use an extension codec, it might be necessary to have encoding/decoding state to keep track of which objects got encoded/re-created. To do this, pass a `context` to the `EncoderOptions` and `DecoderOptions`: + +```typescript +import { encode, decode, ExtensionCodec } from "@msgpack/msgpack"; + +class MyContext { + track(object: any) { /*...*/ } +} + +class MyType { /* ... */ } + +const extensionCodec = new ExtensionCodec(); + +// MyType +const MYTYPE_EXT_TYPE = 0 // Any in 0-127 +extensionCodec.register({ + type: MYTYPE_EXT_TYPE, + encode: (object, context) => { + if (object instanceof MyType) { + context.track(object); + return encode(object.toJSON(), { extensionCodec, context }); + } else { + return null; + } + }, + decode: (data, extType, context) => { + const decoded = decode(data, { extensionCodec, context }); + const my = new MyType(decoded); + context.track(my); + return my; + }, +}); + +// and later +import { encode, decode } from "@msgpack/msgpack"; + +const context = new MyContext(); + +const encoded = encode({ myType: new MyType() }, { extensionCodec, context }); +const decoded = decode(encoded, { extensionCodec, context }); +``` + +### Handling BigInt with ExtensionCodec + +This library does not handle BigInt by default, but you have two options to handle it: + +* Set `useBigInt64: true` to map bigint to MessagePack's int64/uint64 +* Define a custom `ExtensionCodec` to map bigint to a MessagePack's extension type + +`useBigInt64: true` is the simplest way to handle bigint, but it has limitations: + +* A bigint is encoded in 8 byte binaries even if it's a small integer +* A bigint must be smaller than the max value of the uint64 and larger than the min value of the int64. Otherwise the behavior is undefined. + +So you might want to define a custom codec to handle bigint like this: + +```typescript +import { deepStrictEqual } from "assert"; +import { encode, decode, ExtensionCodec, DecodeError } from "@msgpack/msgpack"; + +// to define a custom codec: +const BIGINT_EXT_TYPE = 0; // Any in 0-127 +const extensionCodec = new ExtensionCodec(); +extensionCodec.register({ + type: BIGINT_EXT_TYPE, + encode(input: unknown): Uint8Array | null { + if (typeof input === "bigint") { + if (input <= Number.MAX_SAFE_INTEGER && input >= Number.MIN_SAFE_INTEGER) { + return encode(Number(input)); + } else { + return encode(String(input)); + } + } else { + return null; + } + }, + decode(data: Uint8Array): bigint { + const val = decode(data); + if (!(typeof val === "string" || typeof val === "number")) { + throw new DecodeError(`unexpected BigInt source: ${val} (${typeof val})`); + } + return BigInt(val); + }, +}); + +// to use it: +const value = BigInt(Number.MAX_SAFE_INTEGER) + BigInt(1); +const encoded = encode(value, { extensionCodec }); +deepStrictEqual(decode(encoded, { extensionCodec }), value); +``` + +### The temporal module as timestamp extensions + +There is a proposal for a new date/time representations in JavaScript: + +* https://github.com/tc39/proposal-temporal + +This library maps `Date` to the MessagePack timestamp extension by default, but you can re-map the temporal module (or [Temporal Polyfill](https://github.com/tc39/proposal-temporal/tree/main/polyfill)) to the timestamp extension like this: + +```typescript +import { Instant } from "@std-proposal/temporal"; +import { deepStrictEqual } from "assert"; +import { + encode, + decode, + ExtensionCodec, + EXT_TIMESTAMP, + encodeTimeSpecToTimestamp, + decodeTimestampToTimeSpec, +} from "@msgpack/msgpack"; + +// to define a custom codec +const extensionCodec = new ExtensionCodec(); +extensionCodec.register({ + type: EXT_TIMESTAMP, // override the default behavior! + encode(input: unknown): Uint8Array | null { + if (input instanceof Instant) { + const sec = input.seconds; + const nsec = Number(input.nanoseconds - BigInt(sec) * BigInt(1e9)); + return encodeTimeSpecToTimestamp({ sec, nsec }); + } else { + return null; + } + }, + decode(data: Uint8Array): Instant { + const timeSpec = decodeTimestampToTimeSpec(data); + const sec = BigInt(timeSpec.sec); + const nsec = BigInt(timeSpec.nsec); + return Instant.fromEpochNanoseconds(sec * BigInt(1e9) + nsec); + }, +}); + +// to use it +const instant = Instant.fromEpochMilliseconds(Date.now()); +const encoded = encode(instant, { extensionCodec }); +const decoded = decode(encoded, { extensionCodec }); +deepStrictEqual(decoded, instant); +``` + +This will become default in this library with major-version increment, if the temporal module is standardized. + +## Faster way to decode a large array of floating point numbers + +If there are large arrays of floating point numbers in your payload, there +is a way to decode it faster: define a custom extension type for `Float#Array` +with alignment. + +An extension type's `encode` method can return a function that takes a parameter +`pos: number`. This parameter can be used to make alignment of the buffer, +resulting decoding it much more performant. + +See an example implementation for `Float32Array`: + +```typescript +const extensionCodec = new ExtensionCodec(); + +const EXT_TYPE_FLOAT32ARRAY = 0; // Any in 0-127 +extensionCodec.register({ + type: EXT_TYPE_FLOAT32ARRAY, + encode: (object: unknown) => { + if (object instanceof Float32Array) { + return (pos: number) => { + const bpe = Float32Array.BYTES_PER_ELEMENT; + const padding = 1 + ((bpe - ((pos + 1) % bpe)) % bpe); + const data = new Uint8Array(object.buffer); + const result = new Uint8Array(padding + data.length); + result[0] = padding; + result.set(data, padding); + return result; + }; + } + return null; + }, + decode: (data: Uint8Array) => { + const padding = data[0]!; + const bpe = Float32Array.BYTES_PER_ELEMENT; + const offset = data.byteOffset + padding; + const length = data.byteLength - padding; + return new Float32Array(data.buffer, offset, length / bpe); + }, +}); +``` + +## Decoding a Blob + +[`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) is a binary data container provided by browsers. To read its contents when it contains a MessagePack binary, you can use `Blob#arrayBuffer()` or `Blob#stream()`. `Blob#stream()` +is recommended if your target platform support it. This is because streaming +decode should be faster for large objects. In both ways, you need to use +asynchronous API. + +```typescript +async function decodeFromBlob(blob: Blob): unknown { + if (blob.stream) { + // Blob#stream(): ReadableStream (recommended) + return await decodeAsync(blob.stream()); + } else { + // Blob#arrayBuffer(): Promise (if stream() is not available) + return decode(await blob.arrayBuffer()); + } +} +``` + +## MessagePack Specification + +This library is compatible with the "August 2017" revision of MessagePack specification at the point where timestamp ext was added: + +* [x] str/bin separation, added at August 2013 +* [x] extension types, added at August 2013 +* [x] timestamp ext type, added at August 2017 + +The living specification is here: + +https://github.com/msgpack/msgpack + +Note that as of June 2019 there're no official "version" on the MessagePack specification. See https://github.com/msgpack/msgpack/issues/195 for the discussions. + +### MessagePack Mapping Table + +The following table shows how JavaScript values are mapped to [MessagePack formats](https://github.com/msgpack/msgpack/blob/master/spec.md) and vice versa. + +The mapping of integers varies on the setting of `useBigInt64`. + +The default, `useBigInt64: false` is: + +| Source Value | MessagePack Format | Value Decoded | +| --------------------- | -------------------- | --------------------- | +| null, undefined | nil | null (*1) | +| boolean (true, false) | bool family | boolean (true, false) | +| number (53-bit int) | int family | number | +| number (64-bit float) | float family | number | +| string | str family | string (*2) | +| ArrayBufferView | bin family | Uint8Array (*3) | +| Array | array family | Array | +| Object | map family | Object (*4) | +| Date | timestamp ext family | Date (*5) | +| bigint | N/A | N/A (*6) | + +* *1 Both `null` and `undefined` are mapped to `nil` (`0xC0`) type, and are decoded into `null` +* *2 If you'd like to skip UTF-8 decoding of strings, set `rawStrings: true`. In this case, strings are decoded into `Uint8Array`. +* *3 Any `ArrayBufferView`s including NodeJS's `Buffer` are mapped to `bin` family, and are decoded into `Uint8Array` +* *4 In handling `Object`, it is regarded as `Record` in terms of TypeScript +* *5 MessagePack timestamps may have nanoseconds, which will lost when it is decoded into JavaScript `Date`. This behavior can be overridden by registering `-1` for the extension codec. +* *6 bigint is not supported in `useBigInt64: false` mode, but you can define an extension codec for it. + +If you set `useBigInt64: true`, the following mapping is used: + +| Source Value | MessagePack Format | Value Decoded | +| --------------------------------- | -------------------- | --------------------- | +| null, undefined | nil | null | +| boolean (true, false) | bool family | boolean (true, false) | +| **number (32-bit int)** | int family | number | +| **number (except for the above)** | float family | number | +| **bigint** | int64 / uint64 | bigint (*7) | +| string | str family | string | +| ArrayBufferView | bin family | Uint8Array | +| Array | array family | Array | +| Object | map family | Object | +| Date | timestamp ext family | Date | + + +* *7 If the bigint is larger than the max value of uint64 or smaller than the min value of int64, then the behavior is undefined. + +## Prerequisites + +This is a universal JavaScript library that supports major browsers and NodeJS. + +### ECMA-262 + +* ES2015 language features +* ES2024 standard library, including: + * Typed arrays (ES2015) + * Async iterations (ES2018) + * Features added in ES2015-ES2022 +* whatwg encodings (`TextEncoder` and `TextDecoder`) + +ES2022 standard library used in this library can be polyfilled with [core-js](https://github.com/zloirock/core-js). + +IE11 is no longer supported. If you'd like to use this library in IE11, use v2.x versions. + +### NodeJS + +NodeJS v18 is required. + +### TypeScript Compiler / Type Definitions + +This module requires type definitions of `AsyncIterator`, `ArrayBufferLike`, whatwg streams, and so on. They are provided by `"lib": ["ES2024", "DOM"]` in `tsconfig.json`. + +Regarding the TypeScript compiler version, only the latest TypeScript is tested in development. + +## Benchmark + +Run-time performance is not the only reason to use MessagePack, but it's important to choose MessagePack libraries, so a benchmark suite is provided to monitor the performance of this library. + +V8's built-in JSON has been improved for years, esp. `JSON.parse()` is [significantly improved in V8/7.6](https://v8.dev/blog/v8-release-76), it is the fastest deserializer as of 2019, as the benchmark result bellow suggests. + +However, MessagePack can handles binary data effectively, actual performance depends on situations. Esp. streaming-decoding may be significantly faster than non-streaming decoding if it's effective. You'd better take benchmark on your own use-case if performance matters. + +Benchmark on NodeJS/v22.13.1 (V8/12.4) + +| operation | op | ms | op/s | +| ------------------------------------------------- | ------: | ---: | -----: | +| buf = Buffer.from(JSON.stringify(obj)); | 1348700 | 5000 | 269740 | +| obj = JSON.parse(buf.toString("utf-8")); | 1700300 | 5000 | 340060 | +| buf = require("msgpack-lite").encode(obj); | 591300 | 5000 | 118260 | +| obj = require("msgpack-lite").decode(buf); | 539500 | 5000 | 107900 | +| buf = require("@msgpack/msgpack").encode(obj); | 1238700 | 5000 | 247740 | +| obj = require("@msgpack/msgpack").decode(buf); | 1402000 | 5000 | 280400 | +| buf = /* @msgpack/msgpack */ encoder.encode(obj); | 1379800 | 5000 | 275960 | +| obj = /* @msgpack/msgpack */ decoder.decode(buf); | 1406100 | 5000 | 281220 | + +Note that `JSON` cases use `Buffer` to emulate I/O where a JavaScript string must be converted into a byte array encoded in UTF-8, whereas MessagePack modules deal with byte arrays. + +## Distribution + +### NPM / npmjs.com + +The NPM package distributed in npmjs.com includes both ES2015+ and ES5 files: + +* `dist/` is compiled into ES2020 with CommomJS, provided for NodeJS v10 +* `dist.umd/` is compiled into ES5 with UMD + * `dist.umd/msgpack.min.js` - the minified file + * `dist.umd/msgpack.js` - the non-minified file +* `dist.esm/` is compiled into ES2020 with ES modules, provided for webpack-like bundlers and NodeJS's ESM-mode + +If you use NodeJS and/or webpack, their module resolvers use the suitable one automatically. + +### CDN / unpkg.com + +This library is available via CDN: + +```html + +``` + +It loads `MessagePack` module to the global object. + + +## Deno Support + +You can use this module on Deno. + +See `example/deno-*.ts` for examples. + +`deno.land/x` is not supported. + +## Bun Support + +You can use this module on Bun. + +## Maintenance + +### Testing + +For simple testing: + +``` +npm run test +``` + +### Continuous Integration + +This library uses GitHub Actions. + +Test matrix: + +* NodeJS + * v18 / v20 / v22 +* Browsers: + * Chrome, Firefox +* Deno +* Bun + +### Release Engineering + +```console +# run tests on NodeJS, Chrome, and Firefox +make test-all + +# edit the changelog +code CHANGELOG.md + +# bump version +npm version patch|minor|major + +# run the publishing task +make publish +``` + +### Updating Dependencies + +```console +npm run update-dependencies +``` + +## License + +Copyright 2019 The MessagePack community. + +This software uses the ISC license: + +https://opensource.org/licenses/ISC + +See [LICENSE](./LICENSE) for details. diff --git a/benchmark/.gitignore b/benchmark/.gitignore new file mode 100644 index 00000000..d8b83df9 --- /dev/null +++ b/benchmark/.gitignore @@ -0,0 +1 @@ +package-lock.json diff --git a/benchmark/benchmark-from-msgpack-lite-data.json b/benchmark/benchmark-from-msgpack-lite-data.json new file mode 100644 index 00000000..874f3b8b --- /dev/null +++ b/benchmark/benchmark-from-msgpack-lite-data.json @@ -0,0 +1,52 @@ +{ + "int0": 0, + "int1": 1, + "int1-": -1, + "int8": 255, + "int8-": -255, + "int16": 256, + "int16-": -256, + "int32": 65536, + "int32-": -65536, + "nil": null, + "true": true, + "false": false, + "float": 0.5, + "float-": -0.5, + "string0": "", + "string1": "A", + "string4": "foobarbaz", + "string8": "Omnes viae Romam ducunt.", + "string16": "L’homme n’est qu’un roseau, le plus faible de la nature ; mais c’est un roseau pensant. Il ne faut pas que l’univers entier s’arme pour l’écraser : une vapeur, une goutte d’eau, suffit pour le tuer. Mais, quand l’univers l’écraserait, l’homme serait encore plus noble que ce qui le tue, puisqu’il sait qu’il meurt, et l’avantage que l’univers a sur lui, l’univers n’en sait rien. Toute notre dignité consiste donc en la pensée. C’est de là qu’il faut nous relever et non de l’espace et de la durée, que nous ne saurions remplir. Travaillons donc à bien penser : voilà le principe de la morale.", + "array0": [], + "array1": [ + "foo" + ], + "array8": [ + 1, + 2, + 4, + 8, + 16, + 32, + 64, + 128, + 256, + 512, + 1024, + 2048, + 4096, + 8192, + 16384, + 32768, + 65536, + 131072, + 262144, + 524288, + 1048576 + ], + "map0": {}, + "map1": { + "foo": "bar" + } +} \ No newline at end of file diff --git a/benchmark/benchmark-from-msgpack-lite.ts b/benchmark/benchmark-from-msgpack-lite.ts new file mode 100644 index 00000000..40ac6f9b --- /dev/null +++ b/benchmark/benchmark-from-msgpack-lite.ts @@ -0,0 +1,160 @@ +/* eslint-disable */ +// original: https://raw.githubusercontent.com/kawanet/msgpack-lite/master/lib/benchmark.js + +var msgpack_msgpack = require("../src/index.ts"); + +var msgpack_node = try_require("msgpack"); +var msgpack_lite = try_require("msgpack-lite"); +var msgpack_js = try_require("msgpack-js"); +var msgpackr = try_require("msgpackr"); +var msgpack5 = try_require("msgpack5"); +var notepack = try_require("notepack"); + +msgpack5 = msgpack5 && msgpack5(); + +var pkg = require("../package.json"); +var data = require("./benchmark-from-msgpack-lite-data.json"); +var packed = msgpack_lite.encode(data); +var expected = JSON.stringify(data); + +var argv = Array.prototype.slice.call(process.argv, 2); + +if (argv[0] === "-v") { + console.warn(pkg.name + " " + pkg.version); + process.exit(0); +} + +var limit = 5; +if (argv[0] - 0) limit = argv.shift() - 0; +limit *= 1000; + +var COL1 = 65; +var COL2 = 7; +var COL3 = 5; +var COL4 = 7; + +const v8version = process.versions.v8.split(/\./, 2).join('.'); +console.log(`Benchmark on NodeJS/${process.version} (V8/${v8version})\n`) +console.log(rpad("operation", COL1), "|", " op ", "|", " ms ", "|", " op/s "); +console.log(rpad("", COL1, "-"), "|", lpad(":", COL2, "-"), "|", lpad(":", COL3, "-"), "|", lpad(":", COL4, "-")); + +var buf, obj; + +if (JSON) { + buf = bench('buf = Buffer.from(JSON.stringify(obj));', JSON_stringify, data); + obj = bench('obj = JSON.parse(buf.toString("utf-8"));', JSON_parse, buf); + runTest(obj); +} + +if (msgpack_lite) { + buf = bench('buf = require("msgpack-lite").encode(obj);', msgpack_lite.encode, data); + obj = bench('obj = require("msgpack-lite").decode(buf);', msgpack_lite.decode, packed); + runTest(obj); +} + +if (msgpack_node) { + buf = bench('buf = require("msgpack").pack(obj);', msgpack_node.pack, data); + obj = bench('obj = require("msgpack").unpack(buf);', msgpack_node.unpack, buf); + runTest(obj); +} + +if (msgpack_msgpack) { + buf = bench('buf = require("@msgpack/msgpack").encode(obj);', msgpack_msgpack.encode, data); + obj = bench('obj = require("@msgpack/msgpack").decode(buf);', msgpack_msgpack.decode, buf); + runTest(obj); + + const encoder = new msgpack_msgpack.Encoder(); + const decoder = new msgpack_msgpack.Decoder(); + buf = bench('buf = /* @msgpack/msgpack */ encoder.encode(obj);', (data) => encoder.encode(data), data); + obj = bench('obj = /* @msgpack/msgpack */ decoder.decode(buf);', (buf) => decoder.decode(buf), buf); + runTest(obj); + + if (process.env["CACHE_HIT_RATE"]) { + const {hit, miss} = decoder.keyDecoder; + console.log(`CACHE_HIT_RATE: cache hit rate in CachedKeyDecoder: hit=${hit}, miss=${miss}, hit rate=${hit / (hit + miss)}`); + } +} + +if (msgpackr) { + buf = bench('buf = require("msgpackr").pack(obj);', msgpackr.pack, data); + obj = bench('obj = require("msgpackr").unpack(buf);', msgpackr.unpack, buf); + runTest(obj); +} + +if (msgpack_js) { + buf = bench('buf = require("msgpack-js").encode(obj);', msgpack_js.encode, data); + obj = bench('obj = require("msgpack-js").decode(buf);', msgpack_js.decode, buf); + runTest(obj); +} + +if (msgpack5) { + buf = bench('buf = require("msgpack5")().encode(obj);', msgpack5.encode, data); + obj = bench('obj = require("msgpack5")().decode(buf);', msgpack5.decode, buf); + runTest(obj); +} + +if (notepack) { + buf = bench('buf = require("notepack").encode(obj);', notepack.encode, data); + obj = bench('obj = require("notepack").decode(buf);', notepack.decode, buf); + runTest(obj); +} + +function JSON_stringify(src: any): Buffer { + return Buffer.from(JSON.stringify(src)); +} + +function JSON_parse(json: Buffer): any { + return JSON.parse(json.toString("utf-8")); +} + +function bench(name: string, func: (...args: any[]) => any, src: any) { + if (argv.length) { + var match = argv.filter(function(grep) { + return (name.indexOf(grep) > -1); + }); + if (!match.length) return SKIP; + } + // warm up + func(src); + + var ret, duration = 0; + var start = Date.now(); + var count = 0; + while (1) { + var end = Date.now(); + duration = end - start; + if (duration >= limit) break; + while ((++count) % 100) ret = func(src); + } + name = rpad(name, COL1); + var score = Math.floor(count / duration! * 1000); + console.log(name, "|", lpad(`${count}`, COL2), "|", lpad(`${duration}`, COL3), "|", lpad(`${score}`, COL4)); + return ret; +} + +function rpad(str: string, len: number, chr = " ") { + return str.padEnd(len, chr); +} + +function lpad(str: string, len: number, chr = " ") { + return str.padStart(len, chr); +} + +function runTest(actual: any) { + if (actual === SKIP) return; + actual = JSON.stringify(actual); + if (actual === expected) return; + console.warn("expected: " + expected); + console.warn("actual: " + actual); +} + +function SKIP() { +} + +function try_require(name: string) { + try { + return require(name); + } catch (e) { + // ignore + } +} diff --git a/benchmark/decode-string.ts b/benchmark/decode-string.ts new file mode 100644 index 00000000..3e6bfbb1 --- /dev/null +++ b/benchmark/decode-string.ts @@ -0,0 +1,38 @@ +/* eslint-disable no-console */ +import { utf8EncodeJs, utf8Count, utf8DecodeJs, utf8DecodeTD } from "../src/utils/utf8.ts"; + +// @ts-ignore +import Benchmark from "benchmark"; + +for (const baseStr of ["A", "あ", "🌏"]) { + const dataSet = [10, 100, 500, 1_000].map((n) => { + return baseStr.repeat(n); + }); + + for (const str of dataSet) { + const byteLength = utf8Count(str); + const bytes = new Uint8Array(new ArrayBuffer(byteLength)); + utf8EncodeJs(str, bytes, 0); + + console.log(`\n## string "${baseStr}" (strLength=${str.length}, byteLength=${byteLength})\n`); + + const suite = new Benchmark.Suite(); + + suite.add("utf8DecodeJs", () => { + if (utf8DecodeJs(bytes, 0, byteLength) !== str) { + throw new Error("wrong result!"); + } + }); + + suite.add("TextDecoder", () => { + if (utf8DecodeTD(bytes, 0, byteLength) !== str) { + throw new Error("wrong result!"); + } + }); + suite.on("cycle", (event: any) => { + console.log(String(event.target)); + }); + + suite.run(); + } +} diff --git a/benchmark/encode-string.ts b/benchmark/encode-string.ts new file mode 100644 index 00000000..df1b2835 --- /dev/null +++ b/benchmark/encode-string.ts @@ -0,0 +1,33 @@ +/* eslint-disable no-console */ +import { utf8EncodeJs, utf8Count, utf8EncodeTE } from "../src/utils/utf8.ts"; + +// @ts-ignore +import Benchmark from "benchmark"; + +for (const baseStr of ["A", "あ", "🌏"]) { + const dataSet = [10, 30, 50, 100].map((n) => { + return baseStr.repeat(n); + }); + + for (const str of dataSet) { + const byteLength = utf8Count(str); + const buffer = new Uint8Array(byteLength); + + console.log(`\n## string "${baseStr}" (strLength=${str.length}, byteLength=${byteLength})\n`); + + const suite = new Benchmark.Suite(); + + suite.add("utf8EncodeJs", () => { + utf8EncodeJs(str, buffer, 0); + }); + + suite.add("utf8DecodeTE", () => { + utf8EncodeTE(str, buffer, 0); + }); + suite.on("cycle", (event: any) => { + console.log(String(event.target)); + }); + + suite.run(); + } +} diff --git a/benchmark/key-decoder.ts b/benchmark/key-decoder.ts new file mode 100644 index 00000000..dec38f30 --- /dev/null +++ b/benchmark/key-decoder.ts @@ -0,0 +1,61 @@ +/* eslint-disable no-console */ +import { utf8EncodeJs, utf8Count, utf8DecodeJs } from "../src/utils/utf8.ts"; +import { CachedKeyDecoder } from "../src/CachedKeyDecoder.ts"; + +import data from "./benchmark-from-msgpack-lite-data.json" with { type: "json" }; + +// @ts-ignore +import Benchmark from "benchmark"; + +type InputType = { + bytes: Uint8Array; + byteLength: number; + str: string; +}; + +const keys: Array = Object.keys(data).map((str) => { + const byteLength = utf8Count(str); + const bytes = new Uint8Array(new ArrayBuffer(byteLength)); + utf8EncodeJs(str, bytes, 0); + return { bytes, byteLength, str }; +}); + +for (const dataSet of [keys]) { + const keyDecoder = new CachedKeyDecoder(); + // make cache storage full + for (let i = 0; i < keyDecoder.maxKeyLength; i++) { + for (let j = 0; j < keyDecoder.maxLengthPerKey; j++) { + const str = `${j.toString().padStart(i + 1, "0")}`; + const byteLength = utf8Count(str); + const bytes = new Uint8Array(new ArrayBuffer(byteLength)); + utf8EncodeJs(str, bytes, 0); + keyDecoder.decode(bytes, 0, byteLength); // fill + } + } + + // console.dir(keyDecoder, { depth: 100 }); + console.log("## When the cache storage is full."); + + const suite = new Benchmark.Suite(); + + suite.add("utf8DecodeJs", () => { + for (const data of dataSet) { + if (utf8DecodeJs(data.bytes, 0, data.byteLength) !== data.str) { + throw new Error("wrong result!"); + } + } + }); + + suite.add("CachedKeyDecoder", () => { + for (const data of dataSet) { + if (keyDecoder.decode(data.bytes, 0, data.byteLength) !== data.str) { + throw new Error("wrong result!"); + } + } + }); + suite.on("cycle", (event: any) => { + console.log(String(event.target)); + }); + + suite.run(); +} diff --git a/benchmark/msgpack-benchmark.js b/benchmark/msgpack-benchmark.js new file mode 100644 index 00000000..2fd90a57 --- /dev/null +++ b/benchmark/msgpack-benchmark.js @@ -0,0 +1,146 @@ +/* eslint-disable no-console */ +// based on https://github.com/endel/msgpack-benchmark +"use strict"; +require("ts-node/register"); +const Benchmark = require("benchmark"); + +const msgpackEncode = require("..").encode; +const msgpackDecode = require("..").decode; +const ExtensionCodec = require("..").ExtensionCodec; + +const float32ArrayExtensionCodec = new ExtensionCodec(); +float32ArrayExtensionCodec.register({ + type: 0x01, + encode: (object) => { + if (object instanceof Float32Array) { + return new Uint8Array(object.buffer, object.byteOffset, object.byteLength); + } + return null; + }, + decode: (data) => { + const copy = new Uint8Array(data.byteLength); + copy.set(data); + return new Float32Array(copy.buffer); + }, +}); + +const float32ArrayZeroCopyExtensionCodec = new ExtensionCodec(); +float32ArrayZeroCopyExtensionCodec.register({ + type: 0x01, + encode: (object) => { + if (object instanceof Float32Array) { + return (pos) => { + const bpe = Float32Array.BYTES_PER_ELEMENT; + const padding = 1 + ((bpe - ((pos + 1) % bpe)) % bpe); + const data = new Uint8Array(object.buffer); + const result = new Uint8Array(padding + data.length); + result[0] = padding; + result.set(data, padding); + return result; + }; + } + return null; + }, + decode: (data) => { + const padding = data[0]; + const bpe = Float32Array.BYTES_PER_ELEMENT; + const offset = data.byteOffset + padding; + const length = data.byteLength - padding; + return new Float32Array(data.buffer, offset, length / bpe); + }, +}); + +const implementations = { + "@msgpack/msgpack": { + encode: msgpackEncode, + decode: msgpackDecode, + }, + "@msgpack/msgpack (Float32Array extension)": { + encode: (data) => msgpackEncode(data, { extensionCodec: float32ArrayExtensionCodec }), + decode: (data) => msgpackDecode(data, { extensionCodec: float32ArrayExtensionCodec }), + }, + "@msgpack/msgpack (Float32Array with zero-copy extension)": { + encode: (data) => msgpackEncode(data, { extensionCodec: float32ArrayZeroCopyExtensionCodec }), + decode: (data) => msgpackDecode(data, { extensionCodec: float32ArrayZeroCopyExtensionCodec }), + }, + "msgpack-lite": { + encode: require("msgpack-lite").encode, + decode: require("msgpack-lite").decode, + }, + "notepack.io": { + encode: require("notepack.io/browser/encode"), + decode: require("notepack.io/browser/decode"), + }, +}; + +const samples = [ + { + // exactly the same as: + // https://raw.githubusercontent.com/endel/msgpack-benchmark/master/sample-large.json + name: "./sample-large.json", + data: require("./sample-large.json"), + }, + { + name: "Large array of numbers", + data: [ + { + position: new Array(1e3).fill(1.14), + }, + ], + }, + { + name: "Large Float32Array", + data: [ + { + position: new Float32Array(1e3).fill(1.14), + }, + ], + }, +]; + +function validate(name, data, encoded) { + return JSON.stringify(data) === JSON.stringify(implementations[name].decode(encoded)); +} + +for (const sample of samples) { + const { name: sampleName, data } = sample; + const encodeSuite = new Benchmark.Suite(); + const decodeSuite = new Benchmark.Suite(); + + console.log(""); + console.log("**" + sampleName + ":** (" + JSON.stringify(data).length + " bytes in JSON)"); + console.log(""); + + for (const name of Object.keys(implementations)) { + implementations[name].toDecode = implementations[name].encode(data); + if (!validate(name, data, implementations[name].toDecode)) { + console.log("```"); + console.log("Not supported by " + name); + console.log("```"); + continue; + } + encodeSuite.add("(encode) " + name, () => { + implementations[name].encode(data); + }); + decodeSuite.add("(decode) " + name, () => { + implementations[name].decode(implementations[name].toDecode); + }); + } + encodeSuite.on("cycle", (event) => { + console.log(String(event.target)); + }); + + console.log("```"); + encodeSuite.run(); + console.log("```"); + + console.log(""); + + decodeSuite.on("cycle", (event) => { + console.log(String(event.target)); + }); + + console.log("```"); + decodeSuite.run(); + console.log("```"); +} diff --git a/benchmark/package.json b/benchmark/package.json new file mode 100644 index 00000000..ddc30b8c --- /dev/null +++ b/benchmark/package.json @@ -0,0 +1,15 @@ +{ + "name": "@msgpack/msgpack-benchmark", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "update-dependencies": "npx rimraf node_modules/ package-lock.json ; npm install ; npm audit fix --force ; git restore package.json ; npm install" + }, + "dependencies": { + "benchmark": "latest", + "msgpack-lite": "latest", + "msgpackr": "latest", + "notepack.io": "latest" + } +} diff --git a/benchmark/profile-decode.ts b/benchmark/profile-decode.ts new file mode 100644 index 00000000..512d87e0 --- /dev/null +++ b/benchmark/profile-decode.ts @@ -0,0 +1,26 @@ +import { encode, decode, decodeAsync } from "../src/index.ts"; +// @ts-ignore +import _ from "lodash"; +const data = require("./benchmark-from-msgpack-lite-data.json"); +const dataX = _.cloneDeep(new Array(100).fill(data)); +const encoded = encode(dataX); + +console.log("encoded size:", encoded.byteLength); + +console.time("decode #1"); +for (let i = 0; i < 1000; i++) { + decode(encoded); +} +console.timeEnd("decode #1"); + +(async () => { + const buffers = async function*() { + yield encoded; + }; + + console.time("decodeAsync #1"); + for (let i = 0; i < 1000; i++) { + await decodeAsync(buffers()); + } + console.timeEnd("decodeAsync #1"); +})(); diff --git a/benchmark/profile-encode.ts b/benchmark/profile-encode.ts new file mode 100644 index 00000000..245e71b5 --- /dev/null +++ b/benchmark/profile-encode.ts @@ -0,0 +1,18 @@ +import { encode } from "../src/index.ts"; +// @ts-ignore +import _ from "lodash"; + +const data = require("./benchmark-from-msgpack-lite-data.json"); +const dataX = _.cloneDeep(new Array(100).fill(data)); + +console.time("encode #1"); +for (let i = 0; i < 1000; i++) { + encode(dataX); +} +console.timeEnd("encode #1"); + +console.time("encode #2"); +for (let i = 0; i < 1000; i++) { + encode(dataX); +} +console.timeEnd("encode #2"); diff --git a/benchmark/sample-large.json b/benchmark/sample-large.json new file mode 100644 index 00000000..cd393880 --- /dev/null +++ b/benchmark/sample-large.json @@ -0,0 +1,257 @@ +[ + { + "_id":"56490c18d9275a0003000000", + "author":null, + "created_at":"2015-11-15T22:50:00.170Z", + "description":"A weekly discussion by Ruby developers about programming, life, and careers.", + "image":"https://s3.amazonaws.com/devchat.tv/ruby-rogues-thumb.jpg", + "keywords":[ + "Business", + "Careers", + "Technology", + "Software How-To" + ], + "language":"en", + "permalink":"http://rubyrogues.com/", + "published":true, + "title":"The Ruby Rogues", + "updated_at":"2015-11-15T22:50:06.565Z", + "url":"http://feeds.feedwrench.com/RubyRogues.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"56490d6ad9275a00030000eb", + "author":null, + "created_at":"2015-11-15T22:55:38.074Z", + "description":"Um podcast feito para programadores e empreendedores.", + "image":"http://www.grokpodcast.com/images/logo_itunes_grande.png", + "keywords":[ + "Technology", + "Podcasting", + "Business", + "Careers" + ], + "language":"pt-BR", + "permalink":"http://www.grokpodcast.com/", + "published":true, + "title":"Grok Podcast", + "updated_at":"2015-11-15T22:55:47.498Z", + "url":"http://www.grokpodcast.com/atom.xml", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a1c30b1191d0003000000", + "author":null, + "created_at":"2015-11-16T18:10:56.610Z", + "description":"The Web Platform Podcast is a developer discussion that dives deep into ‘all things’ web. We discuss everything from developing for mobile to building HDTV software. From wearables \u0026 robotics to user experience \u0026 mentoring, we bring to our listeners everything related to building products \u0026 services for The Web Platform of today, tomorrow, and beyond.", + "image":"http://static.libsyn.com/p/assets/f/7/2/0/f7208dae16d0543e/twp-logo-flat-blue-square.png", + "keywords":[ + "Technology", + "Software How-To", + "Tech News" + ], + "language":"en", + "permalink":"http://thewebplatform.libsyn.com/webpage", + "published":true, + "title":"The Web Platform Podcast", + "updated_at":"2015-11-16T18:11:02.022Z", + "url":"http://thewebplatform.libsyn.com//rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a1de3b1191d0003000047", + "author":null, + "created_at":"2015-11-16T18:18:11.854Z", + "description":"Developer Tea is a podcast for web and software developers hosted by a developer that you can listen to in less than 10 minutes. The show will cover a wide variety of topics related to the career of being a developer. We hope you'll take the topics from this podcast and continue the conversation, either online or in person with your peers. The show is hosted by Jonathan Cutrell, Director of Technology at Whiteboard and the author of Hacking the Impossible, a developer's guide to working with visionaries. :: Twitter: @developertea @jcutrell :: Email: developertea@gmail.com", + "image":"http://simplecast-media.s3.amazonaws.com/podcast/image/363/1440374119-artwork.jpg", + "keywords":[ + "Technology", + "Business", + "Careers", + "Society \u0026 Culture" + ], + "language":"en-us", + "permalink":"http://www.developertea.com/", + "published":true, + "title":"Developer Tea", + "updated_at":"2015-11-16T23:00:23.224Z", + "url":"http://feeds.feedburner.com/developertea", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a3163e51cc0000300004c", + "author":null, + "created_at":"2015-11-16T19:41:23.436Z", + "description":"Conference talks from the Remote Conferences series put on by Devchat.tv", + "image":"https://s3.amazonaws.com/devchat.tv/RemoteConfs.jpg", + "keywords":[ + + ], + "language":"en", + "permalink":"http://remoteconfs.com/", + "published":true, + "title":"Remote Conferences - Audio", + "updated_at":"2015-11-16T19:41:24.367Z", + "url":"http://feeds.feedwrench.com/remoteconfs-audio.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a315de51cc00003000000", + "author":null, + "created_at":"2015-11-16T19:41:17.492Z", + "description":"Weekly discussion by freelancers and professionals about running a business, finding clients, marketing, and lifestyle related to being a freelancer.", + "image":"https://s3.amazonaws.com/devchat.tv/freelancers_show_thumb.jpg", + "keywords":[ + "Business", + "Careers", + "Management \u0026amp; Marketing", + "Education", + "Training" + ], + "language":"en", + "permalink":"http://www.freelancersshow.com/", + "published":true, + "title":"The Freelancers' Show", + "updated_at":"2015-11-16T19:41:27.459Z", + "url":"http://feeds.feedwrench.com/TheFreelancersShow.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a3169e51cc000030000cd", + "author":null, + "created_at":"2015-11-16T19:41:29.686Z", + "description":"React Native Radio Podcast", + "image":"https://s3.amazonaws.com/devchat.tv/react-native-radio-album-art.jpg", + "keywords":[ + + ], + "language":"en", + "permalink":"http://devchat.tv/react-native-radio", + "published":true, + "title":"React Native Radio", + "updated_at":"2015-11-16T19:41:29.999Z", + "url":"http://feeds.feedwrench.com/react-native-radio.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a316fe51cc000030000d4", + "author":null, + "created_at":"2015-11-16T19:41:35.937Z", + "description":"The iOS Development Podcast", + "image":"https://s3.amazonaws.com/devchat.tv/iPhreaks-thumb.jpg", + "keywords":[ + "Technology", + "Tech News", + "Software How-To" + ], + "language":"en", + "permalink":"http://iphreaksshow.com/", + "published":true, + "title":"The iPhreaks Show", + "updated_at":"2015-11-16T19:41:43.700Z", + "url":"http://feeds.feedwrench.com/iPhreaks.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a3184e51cc00003000156", + "author":null, + "created_at":"2015-11-16T19:41:56.874Z", + "description":"Weekly podcast discussion about Javascript on the front and back ends. Also discuss programming practices, coding environments, and the communities related to the technology.", + "image":"https://s3.amazonaws.com/devchat.tv/javascript_jabber_thumb.jpg", + "keywords":[ + "Education", + "Training", + "Technology", + "Software How-To" + ], + "language":"en", + "permalink":"http://javascriptjabber.com/", + "published":true, + "title":"JavaScript Jabber", + "updated_at":"2015-11-16T19:42:24.692Z", + "url":"http://feeds.feedwrench.com/JavaScriptJabber.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a31dee51cc00003000210", + "author":null, + "created_at":"2015-11-16T19:43:26.390Z", + "description":"Each week we explore an aspect of web security.", + "image":"http://devchat.cachefly.net/websecwarriors/logo_3000x3000.jpeg", + "keywords":[ + + ], + "language":"en", + "permalink":"http://websecuritywarriors.com/", + "published":true, + "title":"Web Security Warriors", + "updated_at":"2015-11-16T19:43:28.133Z", + "url":"http://feeds.feedwrench.com/websecwarriors.rss", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564a3ddbe51cc00003000217", + "author":null, + "created_at":"2015-11-16T20:34:35.791Z", + "description":"Podcasts produzidos de 2008 a 2010 sobre jogos e todos os tipos de assuntos relacionados ao universo e cultura dos vídeogames.", + "image":"http://jogabilida.de/wp-content/uploads/2011/12/nl-podcast.png", + "keywords":[ + "Games \u0026 Hobbies", + "Video Games" + ], + "language":"pt-BR", + "permalink":"http://jogabilida.de/", + "published":true, + "title":"Podcast NowLoading", + "updated_at":"2015-11-16T23:00:23.963Z", + "url":"http://feeds.feedburner.com/podcastnowloading", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564b9cfe08602e00030000fa", + "author":null, + "created_at":"2015-11-17T21:32:46.210Z", + "description":"Being Boss is a podcast for creative entrepreneurs. From Emily Thompson and Kathleen Shannon. Get your business together. Being boss is hard. Making a dream job of your own isn't easy. But getting paid for it, becoming known for it, and finding purpose in it, is so doable - if you do the work.", + "image":"http://www.lovebeingboss.com/img/skin/Header_WhiteLogo.png", + "keywords":[ + + ], + "language":null, + "permalink":"http://www.lovebeingboss.com/", + "published":true, + "title":"Being Boss // A Podcast for Creative Entrepreneurs", + "updated_at":"2015-11-17T21:32:50.672Z", + "url":"http://www.lovebeingboss.com/RSSRetrieve.aspx?ID=18365\u0026Type=RSS20", + "score1": 100, + "score2": 0.1 + }, + { + "_id":"564c5c8008602e0003000128", + "author":null, + "created_at":"2015-11-18T11:09:52.991Z", + "description":"O mundo pop vira piada no Jovem Nerd", + "image":"http://jovemnerd.ig.com.br/wp-content/themes/jovemnerd_v2b/images/NC_FEED.jpg", + "keywords":[ + "Society \u0026 Culture" + ], + "language":"pt-BR", + "permalink":"http://jovemnerd.com.br/", + "published":true, + "title":"Nerdcast", + "updated_at":"2015-11-18T11:11:20.034Z", + "url":"http://jovemnerd.com.br/categoria/nerdcast/feed/", + "score1": 100, + "score2": 0.1 + } +] diff --git a/benchmark/string.ts b/benchmark/string.ts new file mode 100644 index 00000000..4f11f307 --- /dev/null +++ b/benchmark/string.ts @@ -0,0 +1,50 @@ +/* eslint-disable no-console */ +import { encode, decode } from "../src/index.ts"; + +const ascii = "A".repeat(40000); +const emoji = "🌏".repeat(20000); + +{ + // warm up ascii + const data = ascii; + const encoded = encode(data); + decode(encoded); + console.log(`encode / decode ascii data.length=${data.length} encoded.byteLength=${encoded.byteLength}`); + + // run + + console.time("encode ascii"); + for (let i = 0; i < 1000; i++) { + encode(data); + } + console.timeEnd("encode ascii"); + + console.time("decode ascii"); + for (let i = 0; i < 1000; i++) { + decode(encoded); + } + console.timeEnd("decode ascii"); +} + +{ + // warm up emoji + const data = emoji; + const encoded = encode(data); + decode(encoded); + + console.log(`encode / decode emoji data.length=${data.length} encoded.byteLength=${encoded.byteLength}`); + + // run + + console.time("encode emoji"); + for (let i = 0; i < 1000; i++) { + encode(data); + } + console.timeEnd("encode emoji"); + + console.time("decode emoji"); + for (let i = 0; i < 1000; i++) { + decode(encoded); + } + console.timeEnd("decode emoji"); +} diff --git a/benchmark/sync-vs-async.ts b/benchmark/sync-vs-async.ts new file mode 100644 index 00000000..620438ef --- /dev/null +++ b/benchmark/sync-vs-async.ts @@ -0,0 +1,54 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ + +import { encode, decode, decodeAsync, decodeArrayStream } from "../src/index.ts"; +import { writeFileSync, unlinkSync, readFileSync, createReadStream } from "node:fs"; +import { deepStrictEqual } from "node:assert"; + +type Data = { id: number; score: number; title: string; content: string; createdAt: Date }; + +(async () => { + const data: Data[] = []; + for (let i = 0; i < 1000; i++) { + const id = i + 1; + data.push({ + id, + score: Math.round(Math.random() * Number.MAX_SAFE_INTEGER), + title: `Hello, world! #${id}`, + content: `blah blah blah `.repeat(20).trim(), + createdAt: new Date(), + }); + } + const encoded = encode(data); + const file = "benchmark/tmp.msgpack"; + writeFileSync(file, encoded); + process.on("exit", () => unlinkSync(file)); + console.log(`encoded size ${Math.round(encoded.byteLength / 1024)}KiB`); + + deepStrictEqual(decode(readFileSync(file)), data); + deepStrictEqual(await decodeAsync(createReadStream(file)), data); + + // sync + console.time("readFileSync |> decode"); + for (let i = 0; i < 100; i++) { + decode(readFileSync(file)); + } + console.timeEnd("readFileSync |> decode"); + + // async + console.time("creteReadStream |> decodeAsync"); + for (let i = 0; i < 100; i++) { + await decodeAsync(createReadStream(file)); + } + console.timeEnd("creteReadStream |> decodeAsync"); + + // asyncArrayStream + + console.time("creteReadStream |> decodeArrayStream"); + for (let i = 0; i < 100; i++) { + for await (let result of decodeArrayStream(createReadStream(file))) { + // console.log(result); + } + } + console.timeEnd("creteReadStream |> decodeArrayStream"); +})(); diff --git a/benchmark/timestamp-ext.ts b/benchmark/timestamp-ext.ts new file mode 100644 index 00000000..318b4451 --- /dev/null +++ b/benchmark/timestamp-ext.ts @@ -0,0 +1,21 @@ +import { encode, decode } from "../src/index.ts"; + +const data = new Array(100).fill(new Date()); + +// warm up +const encoded = encode(data); +decode(encoded); + +// run + +console.time("encode"); +for (let i = 0; i < 10000; i++) { + encode(data); +} +console.timeEnd("encode"); + +console.time("decode"); +for (let i = 0; i < 10000; i++) { + decode(encoded); +} +console.timeEnd("decode"); diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..93dfed06 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,7 @@ +coverage: + status: + project: + default: + target: 90% + threshold: 1% + patch: off diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 00000000..0af8fa63 --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,151 @@ +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +import { fixupConfigRules, fixupPluginRules } from "@eslint/compat"; +import typescriptEslintEslintPlugin from "@typescript-eslint/eslint-plugin"; +import tsdoc from "eslint-plugin-tsdoc"; +import tsParser from "@typescript-eslint/parser"; +import js from "@eslint/js"; +import { FlatCompat } from "@eslint/eslintrc"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const compat = new FlatCompat({ + baseDirectory: __dirname, + recommendedConfig: js.configs.recommended, + allConfig: js.configs.all, +}); + +export default [ + { + ignores: ["**/*.js", "test/deno*", "test/bun*"], + }, + ...fixupConfigRules( + compat.extends( + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "plugin:import/recommended", + "plugin:import/typescript", + "prettier", + ), + ), + { + plugins: { + "@typescript-eslint": fixupPluginRules(typescriptEslintEslintPlugin), + tsdoc, + }, + + languageOptions: { + parser: tsParser, + ecmaVersion: 5, + sourceType: "script", + + parserOptions: { + project: "./tsconfig.json", + }, + }, + + settings: {}, + + rules: { + "no-constant-condition": [ + "warn", + { + checkLoops: false, + }, + ], + + "no-useless-escape": "warn", + "no-console": "warn", + "no-var": "warn", + "no-return-await": "warn", + "prefer-const": "warn", + "guard-for-in": "warn", + curly: "warn", + "no-param-reassign": "warn", + "prefer-spread": "warn", + "import/no-unresolved": "off", + "import/no-cycle": "error", + "import/no-default-export": "warn", + "tsdoc/syntax": "warn", + "@typescript-eslint/await-thenable": "warn", + + "@typescript-eslint/array-type": [ + "warn", + { + default: "generic", + }, + ], + + "@typescript-eslint/naming-convention": [ + "warn", + { + selector: "default", + format: ["camelCase", "UPPER_CASE", "PascalCase"], + leadingUnderscore: "allow", + }, + { + selector: "typeLike", + format: ["PascalCase"], + leadingUnderscore: "allow", + }, + ], + + "@typescript-eslint/restrict-plus-operands": "warn", + //"@typescript-eslint/no-throw-literal": "warn", + "@typescript-eslint/unbound-method": "warn", + "@typescript-eslint/explicit-module-boundary-types": "warn", + //"@typescript-eslint/no-extra-semi": "warn", + "@typescript-eslint/no-extra-non-null-assertion": "warn", + + "@typescript-eslint/no-unused-vars": [ + "warn", + { + argsIgnorePattern: "^_", + }, + ], + + "@typescript-eslint/no-use-before-define": "warn", + "@typescript-eslint/no-for-in-array": "warn", + "@typescript-eslint/no-unsafe-argument": "warn", + "@typescript-eslint/no-unsafe-call": "warn", + + "@typescript-eslint/no-unnecessary-condition": [ + "warn", + { + allowConstantLoopConditions: true, + }, + ], + + "@typescript-eslint/no-unnecessary-type-constraint": "warn", + "@typescript-eslint/no-implied-eval": "warn", + "@typescript-eslint/no-non-null-asserted-optional-chain": "warn", + "@typescript-eslint/no-invalid-void-type": "warn", + "@typescript-eslint/no-loss-of-precision": "warn", + "@typescript-eslint/no-confusing-void-expression": "warn", + "@typescript-eslint/no-redundant-type-constituents": "warn", + "@typescript-eslint/prefer-for-of": "warn", + "@typescript-eslint/prefer-includes": "warn", + "@typescript-eslint/prefer-string-starts-ends-with": "warn", + "@typescript-eslint/prefer-readonly": "warn", + "@typescript-eslint/prefer-regexp-exec": "warn", + "@typescript-eslint/prefer-nullish-coalescing": "warn", + "@typescript-eslint/prefer-optional-chain": "warn", + "@typescript-eslint/prefer-ts-expect-error": "warn", + "@typescript-eslint/consistent-type-imports": [ + "error", + { + prefer: "type-imports", + disallowTypeAnnotations: false, + }, + ], + "@typescript-eslint/indent": "off", + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-empty-interface": "off", + "@typescript-eslint/no-empty-function": "off", + "@typescript-eslint/no-var-requires": "off", + "@typescript-eslint/no-non-null-assertion": "off", + "@typescript-eslint/ban-ts-comment": "off", + }, + }, +]; diff --git a/example/deno-with-esmsh.ts b/example/deno-with-esmsh.ts new file mode 100755 index 00000000..4911fc59 --- /dev/null +++ b/example/deno-with-esmsh.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env deno run +/* eslint-disable no-console */ +import * as msgpack from "https://esm.sh/@msgpack/msgpack/mod.ts"; + +console.log(msgpack.decode(msgpack.encode("Hello, world!"))); diff --git a/example/deno-with-jsdeliver.ts b/example/deno-with-jsdeliver.ts new file mode 100755 index 00000000..af72b397 --- /dev/null +++ b/example/deno-with-jsdeliver.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env deno run +/* eslint-disable no-console */ +import * as msgpack from "https://cdn.jsdelivr.net/npm/@msgpack/msgpack/mod.ts"; + +console.log(msgpack.decode(msgpack.encode("Hello, world!"))); diff --git a/example/deno-with-npm.ts b/example/deno-with-npm.ts new file mode 100755 index 00000000..98fdf92b --- /dev/null +++ b/example/deno-with-npm.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env deno run +/* eslint-disable no-console */ +import * as msgpack from "npm:@msgpack/msgpack"; + +console.log(msgpack.decode(msgpack.encode("Hello, world!"))); diff --git a/example/deno-with-unpkg.ts b/example/deno-with-unpkg.ts new file mode 100755 index 00000000..84d9128b --- /dev/null +++ b/example/deno-with-unpkg.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env deno run +/* eslint-disable no-console */ +import * as msgpack from "https://unpkg.com/@msgpack/msgpack/mod.ts"; + +console.log(msgpack.decode(msgpack.encode("Hello, world!"))); diff --git a/example/fetch-example-server.ts b/example/fetch-example-server.ts new file mode 100644 index 00000000..e4b5d10c --- /dev/null +++ b/example/fetch-example-server.ts @@ -0,0 +1,31 @@ +// ts-node example/fetch-example-server.ts +// open example/fetch-example.html + +import http from "http"; +import { encode } from "../src"; + +const hostname = "127.0.0.1"; +const port = 8080; + +function bufferView(b: Uint8Array) { + return Buffer.from(b.buffer, b.byteOffset, b.byteLength); +} + +const server = http.createServer((req, res) => { + console.log("accept:", req.method, req.url); + + res.statusCode = 200; + res.setHeader("content-type", "application/x-msgpack"); + res.setHeader("access-control-allow-origin", "*"); + res.end( + bufferView( + encode({ + message: "Hello, world!", + }), + ), + ); +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); diff --git a/example/fetch-example.html b/example/fetch-example.html new file mode 100644 index 00000000..d59c2503 --- /dev/null +++ b/example/fetch-example.html @@ -0,0 +1,62 @@ + + + + + + + + + + +
+

Fetch API example

+

Open DevTool and see the console logs.

+ +
+ +
+ + + diff --git a/example/umd-example.html b/example/umd-example.html new file mode 100644 index 00000000..23ea96c3 --- /dev/null +++ b/example/umd-example.html @@ -0,0 +1,19 @@ + + + + + + + +
+

UMD for @msgpack/msgpack

+
<script src="https://unpkg.com/@msgpack/msgpack"></script>
+ +
+ + diff --git a/example/umd-example.js b/example/umd-example.js new file mode 100644 index 00000000..bf5a2809 --- /dev/null +++ b/example/umd-example.js @@ -0,0 +1,26 @@ +/* eslint-disable no-console */ +"use strict"; + +try { + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + document.writeln("

input:

"); + document.writeln(`
${JSON.stringify(object, undefined, 2)}
`); + + const encoded = MessagePack.encode(object); + + document.writeln("

output:

"); + document.writeln(`
${JSON.stringify(MessagePack.decode(encoded), undefined, 2)}
`); +} catch (e) { + console.error(e); + document.write(`

${e.constructor.name}: ${e.message}

`); +} diff --git a/example/webpack-example/.gitignore b/example/webpack-example/.gitignore new file mode 100644 index 00000000..d8b83df9 --- /dev/null +++ b/example/webpack-example/.gitignore @@ -0,0 +1 @@ +package-lock.json diff --git a/example/webpack-example/README.md b/example/webpack-example/README.md new file mode 100644 index 00000000..a2ae2f5a --- /dev/null +++ b/example/webpack-example/README.md @@ -0,0 +1,11 @@ +# Webpack Example for @msgpack/msgpack + +This example demonstrates tree-shaking with webpack. + +## Usage + +```shell +npm install +npx webpack +ls -lh dist/ +``` diff --git a/example/webpack-example/index.ts b/example/webpack-example/index.ts new file mode 100644 index 00000000..8b1c01c9 --- /dev/null +++ b/example/webpack-example/index.ts @@ -0,0 +1,4 @@ +import { encode } from "@msgpack/msgpack"; + +console.log(encode(null)); + diff --git a/example/webpack-example/package.json b/example/webpack-example/package.json new file mode 100644 index 00000000..6c891bd6 --- /dev/null +++ b/example/webpack-example/package.json @@ -0,0 +1,22 @@ +{ + "name": "webpack-example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@msgpack/msgpack": "../../" + }, + "devDependencies": { + "lodash": "^4.17.20", + "ts-loader": "^8.0.4", + "ts-node": "^9.0.0", + "typescript": "^4.0.3", + "webpack": "^4.44.2", + "webpack-cli": "^3.3.12" + } +} diff --git a/example/webpack-example/tsconfig.json b/example/webpack-example/tsconfig.json new file mode 100644 index 00000000..21cacb8f --- /dev/null +++ b/example/webpack-example/tsconfig.json @@ -0,0 +1,71 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig.json to read more about this file */ + + /* Basic Options */ + // "incremental": true, /* Enable incremental compilation */ + "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */ + "module": "es2020", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ + // "lib": [], /* Specify library files to be included in the compilation. */ + // "allowJs": true, /* Allow javascript files to be compiled. */ + // "checkJs": true, /* Report errors in .js files. */ + // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ + // "declaration": true, /* Generates corresponding '.d.ts' file. */ + // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ + // "sourceMap": true, /* Generates corresponding '.map' file. */ + // "outFile": "./", /* Concatenate and emit output to single file. */ + // "outDir": "./", /* Redirect output structure to the directory. */ + // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ + // "composite": true, /* Enable project compilation */ + // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ + // "removeComments": true, /* Do not emit comments to output. */ + // "noEmit": true, /* Do not emit outputs. */ + // "importHelpers": true, /* Import emit helpers from 'tslib'. */ + // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ + // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ + + /* Strict Type-Checking Options */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* Enable strict null checks. */ + // "strictFunctionTypes": true, /* Enable strict checking of function types. */ + // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ + // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ + // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ + // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ + + /* Additional Checks */ + // "noUnusedLocals": true, /* Report errors on unused locals. */ + // "noUnusedParameters": true, /* Report errors on unused parameters. */ + // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + + /* Module Resolution Options */ + "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ + "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ + "paths": { + "@msgpack/msgpack": ["../../"] + }, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ + // "typeRoots": [], /* List of folders to include type definitions from. */ + // "types": [], /* Type declaration files to be included in compilation. */ + // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ + "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + + /* Source Map Options */ + // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ + // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ + + /* Experimental Options */ + // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ + // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ + + /* Advanced Options */ + "skipLibCheck": true, /* Skip type checking of declaration files. */ + "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ + } +} diff --git a/example/webpack-example/webpack.config.ts b/example/webpack-example/webpack.config.ts new file mode 100644 index 00000000..3d22690c --- /dev/null +++ b/example/webpack-example/webpack.config.ts @@ -0,0 +1,61 @@ +const path = require("path"); +const webpack = require("webpack"); +const _ = require("lodash"); + +const config = { + mode: "production", + + entry: "./index.ts", + output: { + path: path.resolve(__dirname, "dist"), + filename: undefined, // will be set later + }, + resolve: { + extensions: [".ts", ".tsx", ".mjs", ".js", ".json", ".wasm"], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + loader: "ts-loader", + options: { + configFile: "tsconfig.json", + }, + }, + ], + }, + + plugins: [ + new webpack.DefinePlugin({ + // eslint-disable-next-line @typescript-eslint/naming-convention + "process.env.TEXT_ENCODING": "undefined", + // eslint-disable-next-line @typescript-eslint/naming-convention + "process.env.TEXT_DECODER": "undefined", + }), + ], + + optimization: { + noEmitOnErrors: true, + minimize: false, + }, + + // We don't need NodeJS stuff on browsers! + // https://webpack.js.org/configuration/node/ + node: false, + + devtool: "source-map", +}; + +module.exports = [ + ((config) => { + config.output.filename = "bundle.min.js"; + config.optimization.minimize = true; + return config; + })(_.cloneDeep(config)), + + ((config) => { + config.output.filename = "bundle.js"; + config.optimization.minimize = false; + return config; + })(_.cloneDeep(config)), +]; diff --git a/karma.conf.ts b/karma.conf.ts new file mode 100644 index 00000000..b4a51507 --- /dev/null +++ b/karma.conf.ts @@ -0,0 +1,70 @@ +// const webpack = require("webpack"); + +// eslint-disable-next-line import/no-default-export +export default function configure(config: any) { + config.set({ + customLaunchers: { + // To debug it wih IE11, + // Install `karma-virtualbox-ie11-launcher`, + // and configure custom launchers like this: + // IE11: { + // base: "VirtualBoxIE11", + // keepAlive: true, + // vmName: "IE11 - Win10", + // }, + }, + browsers: ["ChromeHeadless", "FirefoxHeadless"], + + basePath: "", + frameworks: ["mocha"], + files: ["./test/karma-run.ts"], + exclude: [], + preprocessors: { + "**/*.ts": ["webpack", "sourcemap"], + }, + reporters: ["dots"], + port: 9876, + colors: true, + logLevel: config.LOG_INFO, + autoWatch: true, + singleRun: false, + concurrency: 1, + browserNoActivityTimeout: 60_000, + + webpack: { + mode: "production", + + resolve: { + extensions: [".ts", ".tsx", ".mjs", ".js", ".json", ".wasm"], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + loader: "ts-loader", + options: { + transpileOnly: true, + configFile: "tsconfig.test-karma.json", + }, + }, + ], + }, + plugins: [], + optimization: { + minimize: false, + }, + performance: { + hints: false, + }, + devtool: "inline-source-map", + }, + mime: { + "text/x-typescript": ["ts", "tsx"], + }, + client: { + mocha: { + timeout: 15_000, + }, + }, + }); +} diff --git a/mod.ts b/mod.ts new file mode 100644 index 00000000..6c76e59b --- /dev/null +++ b/mod.ts @@ -0,0 +1 @@ +export * from "./dist.esm/index.mjs"; diff --git a/msgpack.base.js b/msgpack.base.js deleted file mode 100644 index d8f027b5..00000000 --- a/msgpack.base.js +++ /dev/null @@ -1,597 +0,0 @@ - -// === msgpack === -// MessagePack -> http://msgpack.sourceforge.net/ - -this.msgpack || (function(globalScope) { - -globalScope.msgpack = { - pack: msgpackpack, // msgpack.pack(data:Mix):ByteArray - unpack: msgpackunpack, // msgpack.unpack(data:BinaryString/ByteArray):Mix - worker: "msgpack.js", // msgpack.worker - WebWorkers script filename - upload: msgpackupload, // msgpack.upload(url:String, option:Hash, callback:Function) - download: msgpackdownload // msgpack.download(url:String, option:Hash, callback:Function) -}; - -var _ie = /MSIE/.test(navigator.userAgent), - _bit2num = {}, // BitStringToNumber { "00000000": 0, ... "11111111": 255 } - _bin2num = {}, // BinaryStringToNumber { "\00": 0, ... "\ff": 255 } - _num2bin = {}, // NumberToBinaryString { 0: "\00", ... 255: "\ff" } - _num2b64 = ("ABCDEFGHIJKLMNOPQRSTUVWXYZ" + - "abcdefghijklmnopqrstuvwxyz0123456789+/").split(""), - _sign = { 8: 0x80, 16: 0x8000, 32: 0x80000000 }, - _split8char = /.{8}/g; - -// for WebWorkers Code Block -self.importScripts && (onmessage = function(event) { - if (event.data.method === "pack") { - postMessage(base64encode(msgpackpack(event.data.data))); - } else { - postMessage(msgpackunpack(event.data.data)); - } -}); - -// msgpack.pack -function msgpackpack(data) { // @param Mix: - // @return ByteArray: - return encode([], data); -} - -// msgpack.unpack -function msgpackunpack(data) { // @param BinaryString/ByteArray: - // @return Mix: - return { data: typeof data === "string" ? toByteArray(data) - : data, - index: -1, decode: decode }.decode(); -} - -// inner - encoder -function encode(rv, // @param ByteArray: result - mix) { // @param Mix: source data - var size = 0, i = 0, iz, c, ary, hash, - high, low, i64 = 0, sign, exp, frac; - - if (mix == null) { // null or undefined - rv.push(0xc0); - } else { - switch (typeof mix) { - case "boolean": - rv.push(mix ? 0xc3 : 0xc2); - break; - case "number": - if (mix !== mix) { // isNaN - rv.push(0xcb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff); // quiet NaN - } else if (mix === Infinity) { - rv.push(0xcb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00); // positive infinity - } else if (Math.floor(mix) === mix) { - if (mix < 0) { // int - if (mix >= -32) { // negative fixnum - rv.push(0xe0 + mix + 32); - } else if (mix > -0x80) { - rv.push(0xd0, mix + 0x100); - } else if (mix > -0x8000) { - mix += 0x10000; - rv.push(0xd1, mix >> 8, mix & 0xff); - } else if (mix > -0x80000000) { - mix += 0x100000000; - rv.push(0xd2, mix >>> 24, (mix >> 16) & 0xff, - (mix >> 8) & 0xff, mix & 0xff); - } else { - ++i64; - } - } else { // uint - if (mix < 0x80) { - rv.push(mix); // positive fixnum - } else if (mix < 0x100) { // uint 8 - rv.push(0xcc, mix); - } else if (mix < 0x10000) { // uint 16 - rv.push(0xcd, mix >> 8, mix & 0xff); - } else if (mix < 0x100000000) { // uint 32 - rv.push(0xce, mix >>> 24, (mix >> 16) & 0xff, - (mix >> 8) & 0xff, mix & 0xff); - } else { - ++i64; - } - } - if (i64) { - high = Math.floor(mix / 0x100000000); - low = mix & (0x100000000 - 1); - rv.push(mix < 0 ? 0xd3 : 0xcf, - (high >> 24) & 0xff, (high >> 16) & 0xff, - (high >> 8) & 0xff, high & 0xff, - (low >> 24) & 0xff, (low >> 16) & 0xff, - (low >> 8) & 0xff, low & 0xff); - } - } else { // double - // THX! edvakf - // http://javascript.g.hatena.ne.jp/edvakf/20100614/1276503044 - hash = _bit2num; - sign = mix < 0; - sign && (mix *= -1); - - // add offset 1023 to ensure positive - exp = Math.log(mix) / Math.LN2 + 1023 | 0; - - // shift 52 - (exp - 1023) bits to make integer part exactly 53 bits, - // then throw away trash less than decimal point - frac = (Math.floor(mix * Math.pow(2, 52 + 1023 - exp))). - toString(2).slice(1); - - // exp is between 1 and 2047. make it 11 bits - exp = ("000000000" + exp.toString(2)).slice(-11); - - ary = (+sign + exp + frac).match(_split8char); - rv.push(0xcb, hash[ary[0]], hash[ary[1]], - hash[ary[2]], hash[ary[3]], - hash[ary[4]], hash[ary[5]], - hash[ary[6]], hash[ary[7]]); - } - break; - case "string": - // utf8.encode - for (ary = [], iz = mix.length, i = 0; i < iz; ++i) { - c = mix.charCodeAt(i); - if (c < 0x80) { // ASCII(0x00 ~ 0x7f) - ary.push(c & 0x7f); - } else if (c < 0x0800) { - ary.push(((c >>> 6) & 0x1f) | 0xc0, (c & 0x3f) | 0x80); - } else if (c < 0x10000) { - ary.push(((c >>> 12) & 0x0f) | 0xe0, - ((c >>> 6) & 0x3f) | 0x80, (c & 0x3f) | 0x80); - } - } - setType(rv, 32, ary.length, [0xa0, 0xda, 0xdb]); - Array.prototype.push.apply(rv, ary); - break; - default: // array or hash - if (Object.prototype.toString.call(mix) === "[object Array]") { // array - size = mix.length; - setType(rv, 16, size, [0x90, 0xdc, 0xdd]); - for (; i < size; ++i) { - encode(rv, mix[i]); - } - } else { // hash - if (Object.keys) { - size = Object.keys(mix).length; - } else { - for (i in mix) { - mix.hasOwnProperty(i) && ++size; - } - } - setType(rv, 16, size, [0x80, 0xde, 0xdf]); - for (i in mix) { - encode(rv, i); - encode(rv, mix[i]); - } - } - } - } - return rv; -} - -// inner - decoder -function decode() { // @return Mix: - var rv, undef, size, i = 0, iz, msb = 0, c, sign, exp, frac, key, - that = this, - data = that.data, - type = data[++that.index]; - - if (type >= 0xe0) { // Negative FixNum (111x xxxx) (-32 ~ -1) - return type - 0x100; - } - if (type < 0x80) { // Positive FixNum (0xxx xxxx) (0 ~ 127) - return type; - } - if (type < 0x90) { // FixMap (1000 xxxx) - size = type - 0x80; - type = 0x80; - } else if (type < 0xa0) { // FixArray (1001 xxxx) - size = type - 0x90; - type = 0x90; - } else if (type < 0xc0) { // FixRaw (101x xxxx) - size = type - 0xa0; - type = 0xa0; - } - switch (type) { - case 0xc0: return null; - case 0xc2: return false; - case 0xc3: return true; - case 0xca: rv = readByte(that, 4); // float - sign = rv & _sign[32]; // 1bit - exp = (rv >> 23) & 0xff; // 8bits - frac = rv & 0x7fffff; // 23bits - if (!rv || rv === 0x80000000) { // 0.0 or -0.0 - return 0; - } - if (exp === 0xff) { // NaN or Infinity - return frac ? NaN : Infinity; - } - return (sign ? -1 : 1) * - (frac | 0x800000) * Math.pow(2, exp - 127 - 23); // 127: bias - case 0xcb: rv = readByte(that, 4); // double - sign = rv & _sign[32]; // 1bit - exp = (rv >> 20) & 0x7ff; // 11bits - frac = rv & 0xfffff; // 52bits - 32bits (high word) - if (!rv || rv === 0x80000000) { // 0.0 or -0.0 - return 0; - } - if (exp === 0x7ff) { // NaN or Infinity - readByte(that, 4); // seek index - return frac ? NaN : Infinity; - } - return (sign ? -1 : 1) * - ((frac | 0x100000) * Math.pow(2, exp - 1023 - 20) // 1023: bias - + readByte(that, 4) * Math.pow(2, exp - 1023 - 52)); - case 0xcf: return readByte(that, 4) * Math.pow(2, 32) + - readByte(that, 4); // uint 64 - case 0xce: return readByte(that, 4); // uint 32 - case 0xcd: return readByte(that, 2); // uint 16 - case 0xcc: return readByte(that, 1); // uint 8 - case 0xd3: return decodeInt64(that); // int 64 - case 0xd2: rv = readByte(that, 4); // int 32 - case 0xd1: rv === undef && (rv = readByte(that, 2)); // int 16 - case 0xd0: rv === undef && (rv = readByte(that, 1)); // int 8 - msb = 4 << ((type & 0x3) + 1); // 8, 16, 32 - return rv < _sign[msb] ? rv : rv - _sign[msb] * 2; - case 0xdb: size = readByte(that, 4); // raw 32 - case 0xda: size === undef && (size = readByte(that, 2)); // raw 16 - case 0xa0: i = that.index + 1; // raw - that.index += size; - // utf8.decode - for (rv = [], ri = -1, iz = i + size; i < iz; ++i) { - c = data[i]; // first byte - if (c < 0x80) { // ASCII(0x00 ~ 0x7f) - rv[++ri] = c; - } else if (c < 0xe0) { - rv[++ri] = (c & 0x1f) << 6 | (data[++i] & 0x3f); - } else if (c < 0xf0) { - rv[++ri] = (c & 0x0f) << 12 | (data[++i] & 0x3f) << 6 - | (data[++i] & 0x3f); - } - } - return String.fromCharCode.apply(null, rv); - case 0xdf: size = readByte(that, 4); // map 32 - case 0xde: size === undef && (size = readByte(that, 2)); // map 16 - case 0x80: for (rv = {}; i < size; ++i) { // map - key = that.decode(); - rv[key] = that.decode(); // key/value pair - } - return rv; - case 0xdd: size = readByte(that, 4); // array 32 - case 0xdc: size === undef && (size = readByte(that, 2)); // array 16 - case 0x90: for (rv = []; i < size; ++i) { // array - rv.push(that.decode()); - } - } - return rv; -} - -// inner - read byte -function readByte(that, // @param Object: - size) { // @param Number: - // @return Number: - var rv = 0, data = that.data, i = that.index; - - switch (size) { - case 4: rv += data[++i] * 0x1000000 + (data[++i] << 16); - case 2: rv += data[++i] << 8; - case 1: rv += data[++i]; - } - that.index = i; - return rv; -} - -// inner - decode int64 -function decodeInt64(that) { // @param Object: - // @return Number: - var rv, overflow = 0, - bytes = that.data.slice(that.index + 1, that.index + 9); - - that.index += 8; - - // avoid overflow - if (bytes[0] & 0x80) { - - ++overflow; - bytes[0] ^= 0xff; - bytes[1] ^= 0xff; - bytes[2] ^= 0xff; - bytes[3] ^= 0xff; - bytes[4] ^= 0xff; - bytes[5] ^= 0xff; - bytes[6] ^= 0xff; - bytes[7] ^= 0xff; - } - rv = bytes[0] * 0x100000000000000 - + bytes[1] * 0x1000000000000 - + bytes[2] * 0x10000000000 - + bytes[3] * 0x100000000 - + bytes[4] * 0x1000000 - + bytes[5] * 0x10000 - + bytes[6] * 0x100 - + bytes[7]; - return overflow ? (rv + 1) * -1 : rv; -} - -// inner - set type and fixed size -function setType(rv, // @param ByteArray: result - fixSize, // @param Number: fix size. 16 or 32 - size, // @param Number: size - types) { // @param ByteArray: type formats. eg: [0x90, 0xdc, 0xdd] - if (size < fixSize) { - rv.push(types[0] + size); - } else if (size < 0x10000) { // 16 - rv.push(types[1], size >> 8, size & 0xff); - } else if (size < 0x100000000) { // 32 - rv.push(types[2], size >>> 24, (size >> 16) & 0xff, - (size >> 8) & 0xff, size & 0xff); - } -} - -// msgpack.download - load from server -function msgpackdownload(url, // @param String: - option, // @param Hash: { worker, timeout, before, after } - // option.worker - Boolean(= false): true is use WebWorkers - // option.timeout - Number(= 10): timeout sec - // option.before - Function: before(xhr, option) - // option.after - Function: after(xhr, option, { status, ok }) - callback) { // @param Function: callback(data, option, { status, ok }) - // data - Mix/null: - // option - Hash: - // status - Number: HTTP status code - // ok - Boolean: - option.method = "GET"; - option.binary = true; - ajax(url, option, callback); -} - -// msgpack.upload - save to server -function msgpackupload(url, // @param String: - option, // @param Hash: { data, worker, timeout, before, after } - // option.data - Mix: - // option.worker - Boolean(= false): true is use WebWorkers - // option.timeout - Number(= 10): timeout sec - // option.before - Function: before(xhr, option) - // option.after - Function: after(xhr, option, { status, ok }) - callback) { // @param Function: callback(data, option, { status, ok }) - // data - String: responseText - // option - Hash: - // status - Number: HTTP status code - // ok - Boolean: - option.method = "PUT"; - option.binary = true; - - if (option.worker && globalScope.Worker) { - var worker = new Worker(msgpack.worker); - - worker.onmessage = function(event) { - option.data = event.data; - ajax(url, option, callback); - }; - worker.postMessage({ method: "pack", data: option.data }); - } else { - // pack and base64 encode - option.data = base64encode(msgpackpack(option.data)); - ajax(url, option, callback); - } -} - -// inner - -function ajax(url, // @param String: - option, // @param Hash: { data, ifmod, method, timeout, - // header, binary, before, after, worker } - // option.data - Mix: upload data - // option.ifmod - Boolean: true is "If-Modified-Since" header - // option.method - String: "GET", "POST", "PUT" - // option.timeout - Number(= 10): timeout sec - // option.header - Hash(= {}): { key: "value", ... } - // option.binary - Boolean(= false): true is binary data - // option.before - Function: before(xhr, option) - // option.after - Function: after(xhr, option, { status, ok }) - // option.worker - Boolean(= false): true is use WebWorkers - callback) { // @param Function: callback(data, option, { status, ok }) - // data - String/Mix/null: - // option - Hash: - // status - Number: HTTP status code - // ok - Boolean: - function readyStateChange() { - if (xhr.readyState === 4) { - var data, status = xhr.status, worker, byteArray, - rv = { status: status, ok: status >= 200 && status < 300 }; - - if (!run++) { - if (method === "PUT") { - data = rv.ok ? xhr.responseText : ""; - } else { - if (rv.ok) { - if (option.worker && globalScope.Worker) { - worker = new Worker(msgpack.worker); - worker.onmessage = function(event) { - callback(event.data, option, rv); - }; - worker.postMessage({ method: "unpack", - data: xhr.responseText }); - gc(); - return; - } else { - byteArray = _ie ? toByteArrayIE(xhr) - : toByteArray(xhr.responseText); - data = msgpackunpack(byteArray); - } - } - } - after && after(xhr, option, rv); - callback(data, option, rv); - gc(); - } - } - } - - function ng(abort, status) { - if (!run++) { - var rv = { status: status || 400, ok: false }; - - after && after(xhr, option, rv); - callback(null, option, rv); - gc(abort); - } - } - - function gc(abort) { - abort && xhr && xhr.abort && xhr.abort(); - watchdog && (clearTimeout(watchdog), watchdog = 0); - xhr = null; - globalScope.addEventListener && - globalScope.removeEventListener("beforeunload", ng, false); - } - - var watchdog = 0, - method = option.method || "GET", - header = option.header || {}, - before = option.before, - after = option.after, - data = option.data || null, - xhr = globalScope.XMLHttpRequest ? new XMLHttpRequest() : - globalScope.ActiveXObject ? new ActiveXObject("Microsoft.XMLHTTP") : - null, - run = 0, i, - overrideMimeType = "overrideMimeType", - setRequestHeader = "setRequestHeader", - getbinary = method === "GET" && option.binary; - - try { - xhr.onreadystatechange = readyStateChange; - xhr.open(method, url, true); // ASync - - before && before(xhr, option); - - getbinary && xhr[overrideMimeType] && - xhr[overrideMimeType]("text/plain; charset=x-user-defined"); - data && - xhr[setRequestHeader]("Content-Type", - "application/x-www-form-urlencoded"); - - for (i in header) { - xhr[setRequestHeader](i, header[i]); - } - - globalScope.addEventListener && - globalScope.addEventListener("beforeunload", ng, false); // 400: Bad Request - - xhr.send(data); - watchdog = setTimeout(function() { - ng(1, 408); // 408: Request Time-out - }, (option.timeout || 10) * 1000); - } catch (err) { - ng(0, 400); // 400: Bad Request - } -} - -// inner - BinaryString To ByteArray -function toByteArray(data) { // @param BinaryString: "\00\01" - // @return ByteArray: [0x00, 0x01] - var rv = [], bin2num = _bin2num, remain, - ary = data.split(""), - i = -1, iz; - - iz = ary.length; - remain = iz % 8; - - while (remain--) { - ++i; - rv[i] = bin2num[ary[i]]; - } - remain = iz >> 3; - while (remain--) { - rv.push(bin2num[ary[++i]], bin2num[ary[++i]], - bin2num[ary[++i]], bin2num[ary[++i]], - bin2num[ary[++i]], bin2num[ary[++i]], - bin2num[ary[++i]], bin2num[ary[++i]]); - } - return rv; -} - -// inner - BinaryString to ByteArray -function toByteArrayIE(xhr) { - var rv = [], data, remain, - charCodeAt = "charCodeAt", - loop, v0, v1, v2, v3, v4, v5, v6, v7, - i = -1, iz; - - iz = vblen(xhr); - data = vbstr(xhr); - loop = Math.ceil(iz / 2); - remain = loop % 8; - - while (remain--) { - v0 = data[charCodeAt](++i); // 0x00,0x01 -> 0x0100 - rv.push(v0 & 0xff, v0 >> 8); - } - remain = loop >> 3; - while (remain--) { - v0 = data[charCodeAt](++i); - v1 = data[charCodeAt](++i); - v2 = data[charCodeAt](++i); - v3 = data[charCodeAt](++i); - v4 = data[charCodeAt](++i); - v5 = data[charCodeAt](++i); - v6 = data[charCodeAt](++i); - v7 = data[charCodeAt](++i); - rv.push(v0 & 0xff, v0 >> 8, v1 & 0xff, v1 >> 8, - v2 & 0xff, v2 >> 8, v3 & 0xff, v3 >> 8, - v4 & 0xff, v4 >> 8, v5 & 0xff, v5 >> 8, - v6 & 0xff, v6 >> 8, v7 & 0xff, v7 >> 8); - } - iz % 2 && rv.pop(); - - return rv; -} - -// inner - base64.encode -function base64encode(data) { // @param ByteArray: - // @return Base64String: - var rv = [], - c = 0, i = -1, iz = data.length, - pad = [0, 2, 1][data.length % 3], - num2bin = _num2bin, - num2b64 = _num2b64; - - if (globalScope.btoa) { - while (i < iz) { - rv.push(num2bin[data[++i]]); - } - return btoa(rv.join("")); - } - --iz; - while (i < iz) { - c = (data[++i] << 16) | (data[++i] << 8) | (data[++i]); // 24bit - rv.push(num2b64[(c >> 18) & 0x3f], - num2b64[(c >> 12) & 0x3f], - num2b64[(c >> 6) & 0x3f], - num2b64[ c & 0x3f]); - } - pad > 1 && (rv[rv.length - 2] = "="); - pad > 0 && (rv[rv.length - 1] = "="); - return rv.join(""); -} - -// --- init --- -(function() { - var i = 0, v; - - for (; i < 0x100; ++i) { - v = String.fromCharCode(i); - _bit2num[("0000000" + i.toString(2)).slice(-8)] = i; - _bin2num[v] = i; // "\00" -> 0x00 - _num2bin[i] = v; // 0 -> "\00" - } - // http://twitter.com/edvakf/statuses/15576483807 - for (i = 0x80; i < 0x100; ++i) { // [Webkit][Gecko] - _bin2num[String.fromCharCode(0xf700 + i)] = i; // "\f780" -> 0x80 - } -})(); - -_ie && document.write(' diff --git a/test/ajax.htm b/test/ajax.htm deleted file mode 100644 index c5d5bda9..00000000 --- a/test/ajax.htm +++ /dev/null @@ -1,179 +0,0 @@ - - - -msgpack ajax demo - - - - - -

msgpack.download(), msgpack.upload()

-
-
- Last downloaded data length: 0 bytes -
-
- add decode log - -
- - -
- - - - - - - - - - - - - - - - -
Test1 (without WebWorkers)
Choose Download dataUpload Last dataDownload Last data
- - - - - -> - - -> - -
- - -
- - - - - - - - - - - - - - - - -
Test2 (with WebWorkers)
Choose Download dataUpload Last dataDownload Last data
- - - - - -> - - -> - -
- - - - diff --git a/test/bench.htm b/test/bench.htm deleted file mode 100644 index 6f85dbd7..00000000 --- a/test/bench.htm +++ /dev/null @@ -1,177 +0,0 @@ - - -benchmark msgpack vs json - - - - - - - - - - - - - - - - - - -
Prepare DataBench !
- - - -
- - - -
- -> - - -
- -
- - - - - - - - - - - diff --git a/test/bench.ie.htm b/test/bench.ie.htm deleted file mode 100644 index 730ed6d0..00000000 --- a/test/bench.ie.htm +++ /dev/null @@ -1,176 +0,0 @@ - - -benchmark msgpack vs json - - - - - - - - - - - - - - - - - -
Prepare DataBench !
- - - -
- - - -
- -> - - -
- -
- - - - - - - - - - - diff --git a/test/bigint64.test.ts b/test/bigint64.test.ts new file mode 100644 index 00000000..e2bf08f4 --- /dev/null +++ b/test/bigint64.test.ts @@ -0,0 +1,38 @@ +import assert from "assert"; +import { encode, decode } from "../src/index.ts"; + +describe("useBigInt64: true", () => { + before(function () { + if (typeof BigInt === "undefined") { + this.skip(); + } + }); + + it("encodes and decodes 0n", () => { + const value = BigInt(0); + const encoded = encode(value, { useBigInt64: true }); + assert.deepStrictEqual(decode(encoded, { useBigInt64: true }), value); + }); + + it("encodes and decodes MAX_SAFE_INTEGER+1", () => { + const value = BigInt(Number.MAX_SAFE_INTEGER) + BigInt(1); + const encoded = encode(value, { useBigInt64: true }); + assert.deepStrictEqual(decode(encoded, { useBigInt64: true }), value); + }); + + it("encodes and decodes MIN_SAFE_INTEGER-1", () => { + const value = BigInt(Number.MIN_SAFE_INTEGER) - BigInt(1); + const encoded = encode(value, { useBigInt64: true }); + assert.deepStrictEqual(decode(encoded, { useBigInt64: true }), value); + }); + + it("encodes and decodes values with numbers and bigints", () => { + const value = { + ints: [0, Number.MAX_SAFE_INTEGER, Number.MIN_SAFE_INTEGER], + nums: [Number.NaN, Math.PI, Math.E, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY], + bigints: [BigInt(0), BigInt(Number.MAX_SAFE_INTEGER) + BigInt(1), BigInt(Number.MIN_SAFE_INTEGER) - BigInt(1)], + }; + const encoded = encode(value, { useBigInt64: true }); + assert.deepStrictEqual(decode(encoded, { useBigInt64: true }), value); + }); +}); diff --git a/test/bun.spec.ts b/test/bun.spec.ts new file mode 100644 index 00000000..fca79eaa --- /dev/null +++ b/test/bun.spec.ts @@ -0,0 +1,8 @@ +import { expect, test } from "bun:test"; +import { encode, decode } from "../src/index.ts"; + +test("Hello, world!", () => { + const encoded = encode("Hello, world!"); + const decoded = decode(encoded); + expect(decoded).toBe("Hello, world!"); +}); diff --git a/test/byteArray.js b/test/byteArray.js deleted file mode 100644 index 6edd82b6..00000000 --- a/test/byteArray.js +++ /dev/null @@ -1,53 +0,0 @@ - -// === uu.byteArray === -//#include uupaa.js - -uu.byteArray || (function(uu) { - -uu.byteArray = uubyteArray; // uu.byteArray(source:HexString):ByteArray -uu.byteArray.toHexString = uubyteArraytoHexString; // uu.byteArray.toHexString(source:ByteArray, - // verbose:Boolean = false):HexString - -// uu.byteArray - HexString to ByteArray -function uubyteArray(source) { // @param String: "00010203" - // @return ByteArray: [0, 1, 2, 3] - // @throws Error("BAD_DATA") - var rv = [], ri = -1, v, i = 0, iz = source.length, - hh2num = uu.hash.hh2num; - - if (iz % 2) { - throw new Error("BAD_DATA"); - } - - v = source.split(""); - - for (; i < iz; i += 2) { - rv[++ri] = hh2num[v[i] + v[i + 1]]; - } - return rv; -} - -// uu.byteArray.toHexString - array to HexString -function uubyteArraytoHexString(source, // @param ByteArray: [0, 1, 2, 3] - verbose) { // @param Boolean(= false): - // @return HexString: verbose = false "00010203" - // or verbose = true "0x00, 0x01, 0x02, 0x03" - var rv = [], ri = -1, v, i = 0, iz = source.length, - num2hh = uu.hash.num2hh; - - if (verbose) { - for (; i < iz; ++i) { - v = source[i]; - rv[++ri] = "0x" + num2hh[v * (v < 0 ? -1 : 1)]; - } - return rv.join(", "); - } - for (; i < iz; ++i) { - v = source[i]; - rv[++ri] = num2hh[v * (v < 0 ? -1 : 1)]; - } - return rv.join(""); -} - -})(uu); - diff --git a/test/codec-bigint.test.ts b/test/codec-bigint.test.ts new file mode 100644 index 00000000..b0ef7aad --- /dev/null +++ b/test/codec-bigint.test.ts @@ -0,0 +1,62 @@ +import assert from "assert"; +import { encode, decode, ExtensionCodec, DecodeError } from "../src/index.ts"; + +// There's a built-in `useBigInt64: true` option, but a custom codec might be +// better if you'd like to encode bigint to reduce the size of binaries. + +const BIGINT_EXT_TYPE = 0; // Any in 0-127 + +const extensionCodec = new ExtensionCodec(); +extensionCodec.register({ + type: BIGINT_EXT_TYPE, + encode(input: unknown): Uint8Array | null { + if (typeof input === "bigint") { + if (input <= Number.MAX_SAFE_INTEGER && input >= Number.MIN_SAFE_INTEGER) { + return encode(Number(input)); + } else { + return encode(String(input)); + } + } else { + return null; + } + }, + decode(data: Uint8Array): bigint { + const val = decode(data); + if (!(typeof val === "string" || typeof val === "number")) { + throw new DecodeError(`unexpected BigInt source: ${val} (${typeof val})`); + } + return BigInt(val); + }, +}); + +describe("codec BigInt", () => { + it("encodes and decodes 0n", () => { + const value = BigInt(0); + const encoded = encode(value, { extensionCodec }); + assert.deepStrictEqual(decode(encoded, { extensionCodec }), value); + }); + + it("encodes and decodes 100n", () => { + const value = BigInt(100); + const encoded = encode(value, { extensionCodec }); + assert.deepStrictEqual(decode(encoded, { extensionCodec }), value); + }); + + it("encodes and decodes -100n", () => { + const value = BigInt(-100); + const encoded = encode(value, { extensionCodec }); + assert.deepStrictEqual(decode(encoded, { extensionCodec }), value); + }); + + it("encodes and decodes MAX_SAFE_INTEGER+1", () => { + const value = BigInt(Number.MAX_SAFE_INTEGER) + BigInt(1); + const encoded = encode(value, { extensionCodec }); + assert.deepStrictEqual(decode(encoded, { extensionCodec }), value); + }); + + it("encodes and decodes MIN_SAFE_INTEGER-1", () => { + const value = BigInt(Number.MIN_SAFE_INTEGER) - BigInt(1); + const encoded = encode(value, { extensionCodec }); + assert.deepStrictEqual(decode(encoded, { extensionCodec }), value); + }); +}); diff --git a/test/codec-float.test.ts b/test/codec-float.test.ts new file mode 100644 index 00000000..accc590f --- /dev/null +++ b/test/codec-float.test.ts @@ -0,0 +1,72 @@ +import assert from "assert"; +import * as ieee754 from "ieee754"; +import { decode } from "../src/index.ts"; + +const FLOAT32_TYPE = 0xca; +const FLOAT64_TYPE = 0xcb; + +const SPECS = { + POSITIVE_ZERO: +0.0, + NEGATIVE_ZERO: -0.0, + POSITIVE_INFINITY: Number.POSITIVE_INFINITY, + NEGATIVE_INFINITY: Number.NEGATIVE_INFINITY, + + POSITIVE_VALUE_1: +0.1, + POSITIVE_VALUE_2: +42, + POSITIVE_VALUE_3: +Math.PI, + POSITIVE_VALUE_4: +Math.E, + NEGATIVE_VALUE_1: -0.1, + NEGATIVE_VALUE_2: -42, + NEGATIVE_VALUE_3: -Math.PI, + NEGATIVE_VALUE_4: -Math.E, + + MAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + MIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, + + MAX_VALUE: Number.MAX_VALUE, + MIN_VALUE: Number.MIN_VALUE, +} as Record; + +describe("codec: float 32/64", () => { + context("float 32", () => { + for (const [name, value] of Object.entries(SPECS)) { + it(`decodes ${name} (${value})`, () => { + const buf = new Uint8Array(4); + ieee754.write(buf, value, 0, false, 23, 4); + const expected = ieee754.read(buf, 0, false, 23, 4); + + assert.deepStrictEqual(decode([FLOAT32_TYPE, ...buf]), expected, "matched sign"); + assert.notDeepStrictEqual(decode([FLOAT32_TYPE, ...buf]), -expected, "unmatched sign"); + }); + } + + it(`decodes NaN`, () => { + const buf = new Uint8Array(4); + ieee754.write(buf, NaN, 0, false, 23, 4); + const expected = ieee754.read(buf, 0, false, 23, 4); + + assert.deepStrictEqual(decode([FLOAT32_TYPE, ...buf]), expected, "matched sign"); + }); + }); + + context("float 64", () => { + for (const [name, value] of Object.entries(SPECS)) { + it(`decodes ${name} (${value})`, () => { + const buf = new Uint8Array(8); + ieee754.write(buf, value, 0, false, 52, 8); + const expected = ieee754.read(buf, 0, false, 52, 8); + + assert.deepStrictEqual(decode([FLOAT64_TYPE, ...buf]), expected, "matched sign"); + assert.notDeepStrictEqual(decode([FLOAT64_TYPE, ...buf]), -expected, "unmatched sign"); + }); + } + + it(`decodes NaN`, () => { + const buf = new Uint8Array(8); + ieee754.write(buf, NaN, 0, false, 52, 8); + const expected = ieee754.read(buf, 0, false, 52, 8); + + assert.deepStrictEqual(decode([FLOAT64_TYPE, ...buf]), expected, "matched sign"); + }); + }); +}); diff --git a/test/codec-int.test.ts b/test/codec-int.test.ts new file mode 100644 index 00000000..7e5a822e --- /dev/null +++ b/test/codec-int.test.ts @@ -0,0 +1,45 @@ +import assert from "assert"; +import { setInt64, getInt64, getUint64, setUint64 } from "../src/utils/int.ts"; + +const INT64SPECS = { + ZERO: 0, + ONE: 1, + MINUS_ONE: -1, + X_FF: 0xff, + MINUS_X_FF: -0xff, + INT32_MAX: 0x7fffffff, + INT32_MIN: -0x7fffffff - 1, + MAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + MIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, +} as Record; + +describe("codec: int64 / uint64", () => { + context("int 64", () => { + for (const name of Object.keys(INT64SPECS)) { + const value = INT64SPECS[name]!; + + it(`sets and gets ${value} (${value < 0 ? "-" : ""}0x${Math.abs(value).toString(16)})`, () => { + const b = new Uint8Array(8); + const view = new DataView(b.buffer); + setInt64(view, 0, value); + assert.deepStrictEqual(getInt64(view, 0), value); + }); + } + }); + + context("uint 64", () => { + it(`sets and gets 0`, () => { + const b = new Uint8Array(8); + const view = new DataView(b.buffer); + setUint64(view, 0, 0); + assert.deepStrictEqual(getUint64(view, 0), 0); + }); + + it(`sets and gets MAX_SAFE_INTEGER`, () => { + const b = new Uint8Array(8); + const view = new DataView(b.buffer); + setUint64(view, 0, Number.MAX_SAFE_INTEGER); + assert.deepStrictEqual(getUint64(view, 0), Number.MAX_SAFE_INTEGER); + }); + }); +}); diff --git a/test/codec-timestamp.test.ts b/test/codec-timestamp.test.ts new file mode 100644 index 00000000..a3098726 --- /dev/null +++ b/test/codec-timestamp.test.ts @@ -0,0 +1,62 @@ +import assert from "assert"; +import util from "util"; +import { + encode, + decode, + encodeDateToTimeSpec, + decodeTimestampExtension, + decodeTimestampToTimeSpec, + encodeTimestampExtension, +} from "../src/index.ts"; + +const TIME = 1556636810389; + +const SPECS = { + ZERO: new Date(0), + TIME_BEFORE_EPOCH_NS: new Date(-1), + TIME_BEFORE_EPOCH_SEC: new Date(-1000), + TIME_BEFORE_EPOCH_SEC_AND_NS: new Date(-1002), + TIMESTAMP32: new Date(Math.floor(TIME / 1000) * 1000), + TIMESTAMP64: new Date(TIME), + TIMESTAMP64_OVER_INT32: new Date(Date.UTC(2200, 0)), // cf. https://github.com/msgpack/msgpack-ruby/pull/172 + TIMESTAMP96_SEC_OVER_UINT32: new Date(0x400000000 * 1000), + TIMESTAMP96_SEC_OVER_UINT32_WITH_NS: new Date(0x400000000 * 1000 + 2), + + REGRESSION_1: new Date(1556799054803), +} as Record; + +describe("codec: timestamp 32/64/96", () => { + context("encode / decode", () => { + for (const name of Object.keys(SPECS)) { + const value = SPECS[name]!; + + it(`encodes and decodes ${name} (${value.toISOString()})`, () => { + const encoded = encode(value); + assert.deepStrictEqual(decode(encoded), value, `encoded: ${util.inspect(Buffer.from(encoded))}`); + }); + } + }); + + context("encodeDateToTimeSpec", () => { + it("normalizes new Date(-1) to { sec: -1, nsec: 999000000 }", () => { + assert.deepStrictEqual(encodeDateToTimeSpec(new Date(-1)), { sec: -1, nsec: 999000000 }); + }); + }); + + context("encodeDateToTimeSpec", () => { + it("decodes timestamp-ext binary to TimeSpec", () => { + const encoded = encodeTimestampExtension(new Date(42000))!; + assert.deepStrictEqual(decodeTimestampToTimeSpec(encoded), { sec: 42, nsec: 0 }); + }); + }); + + context("decodeTimestampExtension", () => { + context("for broken data", () => { + it("throws errors", () => { + assert.throws(() => { + decodeTimestampExtension(Uint8Array.from([0])); + }, /unrecognized data size for timestamp/i); + }); + }); + }); +}); diff --git a/test/codec.htm b/test/codec.htm deleted file mode 100644 index d424d986..00000000 --- a/test/codec.htm +++ /dev/null @@ -1,1316 +0,0 @@ - - - -msgpack codec test - - - - - diff --git a/test/decode-blob.test.ts b/test/decode-blob.test.ts new file mode 100644 index 00000000..a18fc9ee --- /dev/null +++ b/test/decode-blob.test.ts @@ -0,0 +1,23 @@ +import assert from "assert"; +import { encode, decode, decodeAsync } from "../src/index.ts"; + +(typeof Blob !== "undefined" ? describe : describe.skip)("Blob", () => { + it("decodes it with `decode()`", async function () { + const blob = new Blob([encode("Hello!")]); + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!blob.arrayBuffer) { + this.skip(); + } + assert.deepStrictEqual(decode(await blob.arrayBuffer()), "Hello!"); + }); + + it("decodes it with `decodeAsync()`", async function () { + const blob = new Blob([encode("Hello!")]); + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!blob.stream) { + this.skip(); + } + + assert.deepStrictEqual(await decodeAsync(blob.stream()), "Hello!"); + }); +}); diff --git a/test/decode-max-length.test.ts b/test/decode-max-length.test.ts new file mode 100644 index 00000000..8dd87f94 --- /dev/null +++ b/test/decode-max-length.test.ts @@ -0,0 +1,95 @@ +import assert from "assert"; +import { encode, decode, decodeAsync } from "../src/index.ts"; +import type { DecoderOptions } from "../src/index.ts"; + +describe("decode with max${Type}Length specified", () => { + async function* createStream(input: T) { + yield input; + } + + context("maxStrLength", () => { + const input = encode("foo"); + const options = { maxStrLength: 1 } satisfies DecoderOptions; + + it("throws errors (synchronous)", () => { + assert.throws(() => { + decode(input, options); + }, /max length exceeded/i); + }); + + it("throws errors (asynchronous)", async () => { + await assert.rejects(async () => { + await decodeAsync(createStream(input), options); + }, /max length exceeded/i); + }); + }); + + context("maxBinLength", () => { + const input = encode(Uint8Array.from([1, 2, 3])); + const options = { maxBinLength: 1 } satisfies DecoderOptions; + + it("throws errors (synchronous)", () => { + assert.throws(() => { + decode(input, options); + }, /max length exceeded/i); + }); + + it("throws errors (asynchronous)", async () => { + await assert.rejects(async () => { + await decodeAsync(createStream(input), options); + }, /max length exceeded/i); + }); + }); + + context("maxArrayLength", () => { + const input = encode([1, 2, 3]); + const options = { maxArrayLength: 1 } satisfies DecoderOptions; + + it("throws errors (synchronous)", () => { + assert.throws(() => { + decode(input, options); + }, /max length exceeded/i); + }); + + it("throws errors (asynchronous)", async () => { + await assert.rejects(async () => { + await decodeAsync(createStream(input), options); + }, /max length exceeded/i); + }); + }); + + context("maxMapLength", () => { + const input = encode({ foo: 1, bar: 1, baz: 3 }); + const options = { maxMapLength: 1 } satisfies DecoderOptions; + + it("throws errors (synchronous)", () => { + assert.throws(() => { + decode(input, options); + }, /max length exceeded/i); + }); + + it("throws errors (asynchronous)", async () => { + await assert.rejects(async () => { + await decodeAsync(createStream(input), options); + }, /max length exceeded/i); + }); + }); + + context("maxExtType", () => { + const input = encode(new Date()); + // timextamp ext requires at least 4 bytes. + const options = { maxExtLength: 1 } satisfies DecoderOptions; + + it("throws errors (synchronous)", () => { + assert.throws(() => { + decode(input, options); + }, /max length exceeded/i); + }); + + it("throws errors (asynchronous)", async () => { + await assert.rejects(async () => { + await decodeAsync(createStream(input), options); + }, /max length exceeded/i); + }); + }); +}); diff --git a/test/decode-raw-strings.test.ts b/test/decode-raw-strings.test.ts new file mode 100644 index 00000000..c50ff15d --- /dev/null +++ b/test/decode-raw-strings.test.ts @@ -0,0 +1,49 @@ +import assert from "assert"; +import { encode, decode } from "../src/index.ts"; +import type { DecoderOptions } from "../src/index.ts"; + +describe("decode with rawStrings specified", () => { + const options = { rawStrings: true } satisfies DecoderOptions; + + it("decodes string as binary", () => { + const actual = decode(encode("foo"), options); + const expected = Uint8Array.from([0x66, 0x6f, 0x6f]); + assert.deepStrictEqual(actual, expected); + }); + + it("decodes invalid UTF-8 string as binary", () => { + const invalidUtf8String = Uint8Array.from([ + 61, 180, 118, 220, 39, 166, 43, 68, 219, 116, 105, 84, 121, 46, 122, 136, 233, 221, 15, 174, 247, 19, 50, 176, + 184, 221, 66, 188, 171, 36, 135, 121, + ]); + const encoded = Uint8Array.from([ + 196, 32, 61, 180, 118, 220, 39, 166, 43, 68, 219, 116, 105, 84, 121, 46, 122, 136, 233, 221, 15, 174, 247, 19, 50, + 176, 184, 221, 66, 188, 171, 36, 135, 121, + ]); + + const actual = decode(encoded, options); + assert.deepStrictEqual(actual, invalidUtf8String); + }); + + it("decodes object keys as strings", () => { + const actual = decode(encode({ key: "foo" }), options); + const expected = { key: Uint8Array.from([0x66, 0x6f, 0x6f]) }; + assert.deepStrictEqual(actual, expected); + }); + + it("ignores maxStrLength", () => { + const lengthLimitedOptions = { ...options, maxStrLength: 1 } satisfies DecoderOptions; + + const actual = decode(encode("foo"), lengthLimitedOptions); + const expected = Uint8Array.from([0x66, 0x6f, 0x6f]); + assert.deepStrictEqual(actual, expected); + }); + + it("respects maxBinLength", () => { + const lengthLimitedOptions = { ...options, maxBinLength: 1 } satisfies DecoderOptions; + + assert.throws(() => { + decode(encode("foo"), lengthLimitedOptions); + }, /max length exceeded/i); + }); +}); diff --git a/test/decode.jsfuzz.js b/test/decode.jsfuzz.js new file mode 100644 index 00000000..7341f49a --- /dev/null +++ b/test/decode.jsfuzz.js @@ -0,0 +1,30 @@ +/* eslint-disable */ +const assert = require("node:assert"); +const { Decoder, encode, DecodeError } = require("../dist/index.js"); + +/** + * @param {Buffer} bytes + * @returns {void} + */ +module.exports.fuzz = function fuzz(bytes) { + const decoder = new Decoder(); + try { + decoder.decode(bytes); + } catch (e) { + if (e instanceof DecodeError) { + // ok + } else if (e instanceof RangeError) { + // ok + } else { + throw e; + } + } + + // make sure the decoder instance is not broken + const object = { + foo: 1, + bar: 2, + baz: ["one", "two", "three"], + }; + assert.deepStrictEqual(decoder.decode(encode(object)), object); +} diff --git a/test/decodeArrayStream.test.ts b/test/decodeArrayStream.test.ts new file mode 100644 index 00000000..a8f7ed56 --- /dev/null +++ b/test/decodeArrayStream.test.ts @@ -0,0 +1,91 @@ +import assert from "assert"; +import { encode, decodeArrayStream } from "../src/index.ts"; + +describe("decodeArrayStream", () => { + const generateSampleObject = () => { + return { + id: Math.random(), + name: "test", + }; + }; + + const createStream = async function* (object: any) { + for (const byte of encode(object)) { + yield [byte]; + } + }; + + it("decodes numbers array (array8)", async () => { + const object = [1, 2, 3, 4, 5]; + + const result: Array = []; + + for await (const item of decodeArrayStream(createStream(object))) { + result.push(item); + } + + assert.deepStrictEqual(object, result); + }); + + it("decodes numbers of array (array16)", async () => { + const createStream = async function* () { + yield [0xdc, 0, 3]; + yield encode(1); + yield encode(2); + yield encode(3); + }; + + const result: Array = []; + + for await (const item of decodeArrayStream(createStream())) { + result.push(item); + } + + assert.deepStrictEqual(result, [1, 2, 3]); + }); + + it("decodes numbers of array (array32)", async () => { + const createStream = async function* () { + yield [0xdd, 0, 0, 0, 3]; + yield encode(1); + yield encode(2); + yield encode(3); + }; + + const result: Array = []; + + for await (const item of decodeArrayStream(createStream())) { + result.push(item); + } + + assert.deepStrictEqual(result, [1, 2, 3]); + }); + + it("decodes objects array", async () => { + const objectsArrays: Array = []; + + for (let i = 0; i < 10; i++) { + objectsArrays.push(generateSampleObject()); + } + + const result: Array = []; + + for await (const item of decodeArrayStream(createStream(objectsArrays))) { + result.push(item); + } + + assert.deepStrictEqual(objectsArrays, result); + }); + + it("fails for non array input", async () => { + const object = "demo"; + + await assert.rejects(async () => { + const result: Array = []; + + for await (const item of decodeArrayStream(createStream(object))) { + result.push(item); + } + }, /.*Unrecognized array type byte:.*/i); + }); +}); diff --git a/test/decodeAsync.test.ts b/test/decodeAsync.test.ts new file mode 100644 index 00000000..6af6c19b --- /dev/null +++ b/test/decodeAsync.test.ts @@ -0,0 +1,138 @@ +import assert from "assert"; +import { encode, decodeAsync } from "../src/index.ts"; + +describe("decodeAsync", () => { + function wrapWithNoisyBuffer(byte: number) { + return Uint8Array.from([0x01, byte, 0x02]).subarray(1, 2); + } + + it("decodes nil", async () => { + const createStream = async function* () { + yield wrapWithNoisyBuffer(0xc0); // nil + }; + + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, null); + }); + + it("decodes fixarray [nil]", async () => { + const createStream = async function* () { + yield wrapWithNoisyBuffer(0x91); // fixarray size=1 + yield [0xc0]; // nil + }; + + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, [null]); + }); + + it("decodes fixmap {'foo': 'bar'}", async () => { + const createStream = async function* () { + yield [0x81]; // fixmap size=1 + yield encode("foo"); + yield encode("bar"); + }; + + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, { "foo": "bar" }); + }); + + it("decodes fixmap {'[1, 2]': 'baz'} with custom map key converter", async () => { + const createStream = async function* () { + yield [0x81]; // fixmap size=1 + yield encode([1, 2]); + yield encode("baz"); + }; + + const object = await decodeAsync(createStream(), { + mapKeyConverter: (key) => JSON.stringify(key), + }); + + const key = JSON.stringify([1, 2]); + assert.deepStrictEqual(object, { [key]: "baz" }); + }); + + it("decodes multi-byte integer byte-by-byte", async () => { + const createStream = async function* () { + yield [0xcd]; // uint 16 + yield [0x12]; + yield [0x34]; + }; + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, 0x1234); + }); + + it("decodes fixstr byte-by-byte", async () => { + const createStream = async function* () { + yield [0xa3]; // fixstr size=3 + yield [0x66]; // "f" + yield [0x6f]; // "o" + yield [0x6f]; // "o" + }; + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, "foo"); + }); + + it("decodes binary byte-by-byte", async () => { + const createStream = async function* () { + yield [0xc4]; // bin 8 + yield [0x03]; // bin size=3 + yield [0x66]; // "f" + yield [0x6f]; // "o" + yield [0x6f]; // "o" + }; + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, Uint8Array.from([0x66, 0x6f, 0x6f])); + }); + + it("decodes binary with noisy buffer", async () => { + const createStream = async function* () { + yield wrapWithNoisyBuffer(0xc5); // bin 16 + yield [0x00]; + yield [0x00]; // bin size=0 + }; + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, new Uint8Array(0)); + }); + + it("decodes mixed object byte-by-byte", async () => { + const object = { + nil: null, + true: true, + false: false, + int: -42, + uint64: Number.MAX_SAFE_INTEGER, + int64: Number.MIN_SAFE_INTEGER, + float: Math.PI, + string: "Hello, world!", + longString: "Hello, world!\n".repeat(100), + binary: Uint8Array.from([0xf1, 0xf2, 0xf3]), + array: [1000, 2000, 3000], + map: { foo: 1, bar: 2, baz: 3 }, + timestampExt: new Date(), + map0: {}, + array0: [], + str0: "", + bin0: Uint8Array.from([]), + }; + + const createStream = async function* () { + for (const byte of encode(object)) { + yield [byte]; + } + }; + assert.deepStrictEqual(await decodeAsync(createStream()), object); + }); + + it("decodes BufferSource", async () => { + // https://developer.mozilla.org/en-US/docs/Web/API/BufferSource + const createStream = async function* () { + yield [0x81] as ArrayLike; // fixmap size=1 + yield encode("foo") as BufferSource; + yield encode("bar") as BufferSource; + }; + + // createStream() returns AsyncGenerator | BufferSource, ...> + const object = await decodeAsync(createStream()); + assert.deepStrictEqual(object, { "foo": "bar" }); + }); +}); diff --git a/test/decodeMulti.test.ts b/test/decodeMulti.test.ts new file mode 100644 index 00000000..eb661b80 --- /dev/null +++ b/test/decodeMulti.test.ts @@ -0,0 +1,31 @@ +import assert from "assert"; +import { encode, decodeMulti } from "../src/index.ts"; + +describe("decodeMulti", () => { + it("decodes multiple objects in a single binary", () => { + const items = [ + "foo", + 10, + { + name: "bar", + }, + [1, 2, 3], + ]; + + const encodedItems = items.map((item) => encode(item)); + const encoded = new Uint8Array(encodedItems.reduce((p, c) => p + c.byteLength, 0)); + let offset = 0; + for (const encodedItem of encodedItems) { + encoded.set(encodedItem, offset); + offset += encodedItem.byteLength; + } + + const result: Array = []; + + for (const item of decodeMulti(encoded)) { + result.push(item); + } + + assert.deepStrictEqual(result, items); + }); +}); diff --git a/test/decodeMultiStream.test.ts b/test/decodeMultiStream.test.ts new file mode 100644 index 00000000..4da31e55 --- /dev/null +++ b/test/decodeMultiStream.test.ts @@ -0,0 +1,60 @@ +import assert from "assert"; +import { encode, decodeMultiStream } from "../src/index.ts"; + +describe("decodeStream", () => { + it("decodes stream", async () => { + const items = [ + "foo", + 10, + { + name: "bar", + }, + [1, 2, 3], + ]; + + const createStream = async function* (): AsyncGenerator { + for (const item of items) { + yield encode(item); + } + }; + + const result: Array = []; + + for await (const item of decodeMultiStream(createStream())) { + result.push(item); + } + + assert.deepStrictEqual(result, items); + }); + + it("decodes multiple objects in a single binary stream", async () => { + const items = [ + "foo", + 10, + { + name: "bar", + }, + [1, 2, 3], + ]; + + const encodedItems = items.map((item) => encode(item)); + const encoded = new Uint8Array(encodedItems.reduce((p, c) => p + c.byteLength, 0)); + let offset = 0; + for (const encodedItem of encodedItems) { + encoded.set(encodedItem, offset); + offset += encodedItem.byteLength; + } + + const createStream = async function* (): AsyncGenerator { + yield encoded; + }; + + const result: Array = []; + + for await (const item of decodeMultiStream(createStream())) { + result.push(item); + } + + assert.deepStrictEqual(result, items); + }); +}); diff --git a/test/deno_cjs_test.ts b/test/deno_cjs_test.ts new file mode 100755 index 00000000..7293f0a4 --- /dev/null +++ b/test/deno_cjs_test.ts @@ -0,0 +1,12 @@ +#!/usr/bin/env deno test --allow-read + +/* eslint-disable */ +import { deepStrictEqual } from "node:assert"; +import { test } from "node:test"; +import * as msgpack from "../dist.cjs/index.cjs"; + +test("Hello, world!", () => { + const encoded = msgpack.encode("Hello, world!"); + const decoded = msgpack.decode(encoded); + deepStrictEqual(decoded, "Hello, world!"); +}); diff --git a/test/deno_test.ts b/test/deno_test.ts new file mode 100755 index 00000000..fb5aa709 --- /dev/null +++ b/test/deno_test.ts @@ -0,0 +1,12 @@ +#!/usr/bin/env deno test + +/* eslint-disable */ +import { deepStrictEqual } from "node:assert"; +import { test } from "node:test"; +import * as msgpack from "../mod.ts"; + +test("Hello, world!", () => { + const encoded = msgpack.encode("Hello, world!"); + const decoded = msgpack.decode(encoded); + deepStrictEqual(decoded, "Hello, world!"); +}); diff --git a/test/edge-cases.test.ts b/test/edge-cases.test.ts new file mode 100644 index 00000000..797f6be6 --- /dev/null +++ b/test/edge-cases.test.ts @@ -0,0 +1,182 @@ +// kind of hand-written fuzzing data +// any errors should not break Encoder/Decoder instance states +import assert from "assert"; +import { encode, decodeAsync, decode, Encoder, Decoder, decodeMulti, decodeMultiStream } from "../src/index.ts"; + +function testEncoder(encoder: Encoder): void { + const object = { + foo: 1, + bar: 2, + baz: ["one", "two", "three"], + }; + assert.deepStrictEqual(decode(encoder.encode(object)), object); +} + +function testDecoder(decoder: Decoder): void { + const object = { + foo: 1, + bar: 2, + baz: ["one", "two", "three"], + }; + assert.deepStrictEqual(decoder.decode(encode(object)), object); +} + +describe("edge cases", () => { + context("try to encode cyclic refs", () => { + it("throws errors on arrays", () => { + const encoder = new Encoder(); + const cyclicRefs: Array = []; + cyclicRefs.push(cyclicRefs); + assert.throws(() => { + encoder.encode(cyclicRefs); + }, /too deep/i); + testEncoder(encoder); + }); + + it("throws errors on objects", () => { + const encoder = new Encoder(); + const cyclicRefs: Record = {}; + cyclicRefs["foo"] = cyclicRefs; + assert.throws(() => { + encoder.encode(cyclicRefs); + }, /too deep/i); + testEncoder(encoder); + }); + }); + + context("try to encode unrecognized objects", () => { + it("throws errors", () => { + const encoder = new Encoder(); + assert.throws(() => { + encode(() => {}); + }, /unrecognized object/i); + testEncoder(encoder); + }); + }); + + context("try to decode a map with non-string keys (asynchronous)", () => { + it("throws errors", async () => { + const decoder = new Decoder(); + const createStream = async function* () { + yield [0x81]; // fixmap size=1 + yield encode(null); + yield encode(null); + }; + + await assert.rejects(async () => { + await decoder.decodeAsync(createStream()); + }, /The type of key must be string/i); + testDecoder(decoder); + }); + }); + + context("try to decode invalid MessagePack binary", () => { + it("throws errors", () => { + const decoder = new Decoder(); + const TYPE_NEVER_USED = 0xc1; + + assert.throws(() => { + decoder.decode([TYPE_NEVER_USED]); + }, /unrecognized type byte/i); + testDecoder(decoder); + }); + }); + + context("try to decode insufficient data", () => { + it("throws errors (synchronous)", () => { + const decoder = new Decoder(); + assert.throws(() => { + decoder.decode([ + 0x92, // fixarray size=2 + 0xc0, // nil + ]); + }, RangeError); + testDecoder(decoder); + }); + + it("throws errors (asynchronous)", async () => { + const decoder = new Decoder(); + const createStream = async function* () { + yield [0x92]; // fixarray size=2 + yield encode(null); + }; + + await assert.rejects(async () => { + await decoder.decodeAsync(createStream()); + }, RangeError); + testDecoder(decoder); + }); + }); + + context("try to decode data with extra bytes", () => { + it("throws errors (synchronous)", () => { + const decoder = new Decoder(); + assert.throws(() => { + decoder.decode([ + 0x90, // fixarray size=0 + ...encode(null), + ]); + }, RangeError); + testDecoder(decoder); + }); + + it("throws errors (asynchronous)", async () => { + const decoder = new Decoder(); + const createStream = async function* () { + yield [0x90]; // fixarray size=0 + yield encode(null); + }; + + await assert.rejects(async () => { + await decoder.decodeAsync(createStream()); + }, RangeError); + testDecoder(decoder); + }); + + it("throws errors (asynchronous)", async () => { + const decoder = new Decoder(); + const createStream = async function* () { + yield [0x90, ...encode(null)]; // fixarray size=0 + nil + }; + + await assert.rejects(async () => { + await decoder.decodeAsync(createStream()); + }, RangeError); + testDecoder(decoder); + }); + }); + + context("try to decode an empty input", () => { + it("throws RangeError (synchronous)", () => { + assert.throws(() => { + decode([]); + }, RangeError); + }); + + it("decodes an empty array with decodeMulti()", () => { + assert.deepStrictEqual([...decodeMulti([])], []); + }); + + it("throws RangeError (asynchronous)", async () => { + const createStream = async function* () { + yield []; + }; + + assert.rejects(async () => { + await decodeAsync(createStream()); + }, RangeError); + }); + + it("decodes an empty array with decodeMultiStream()", async () => { + const createStream = async function* () { + yield []; + }; + + const results: Array = []; + for await (const item of decodeMultiStream(createStream())) { + results.push(item); + } + assert.deepStrictEqual(results, []); + }); + }); +}); diff --git a/test/encode.test.ts b/test/encode.test.ts new file mode 100644 index 00000000..f1b6ffef --- /dev/null +++ b/test/encode.test.ts @@ -0,0 +1,74 @@ +import assert from "assert"; +import { encode, decode } from "../src/index.ts"; + +describe("encode", () => { + context("sortKeys", () => { + it("cannonicalizes encoded binaries", () => { + assert.deepStrictEqual(encode({ a: 1, b: 2 }, { sortKeys: true }), encode({ b: 2, a: 1 }, { sortKeys: true })); + }); + }); + + context("forceFloat32", () => { + it("encodes numbers in float64 without forceFloat32", () => { + assert.deepStrictEqual(encode(3.14), Uint8Array.from([0xcb, 0x40, 0x9, 0x1e, 0xb8, 0x51, 0xeb, 0x85, 0x1f])); + }); + + it("encodes numbers in float32 when forceFloat32=true", () => { + assert.deepStrictEqual(encode(3.14, { forceFloat32: true }), Uint8Array.from([0xca, 0x40, 0x48, 0xf5, 0xc3])); + }); + + it("encodes numbers in float64 with forceFloat32=false", () => { + assert.deepStrictEqual( + encode(3.14, { forceFloat32: false }), + Uint8Array.from([0xcb, 0x40, 0x9, 0x1e, 0xb8, 0x51, 0xeb, 0x85, 0x1f]), + ); + }); + }); + + context("forceFloat", () => { + it("encodes integers as integers without forceIntegerToFloat", () => { + assert.deepStrictEqual(encode(3), Uint8Array.from([0x3])); + }); + + it("encodes integers as floating point when forceIntegerToFloat=true", () => { + assert.deepStrictEqual( + encode(3, { forceIntegerToFloat: true }), + Uint8Array.from([0xcb, 0x40, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), + ); + }); + + it("encodes integers as float32 when forceIntegerToFloat=true and forceFloat32=true", () => { + assert.deepStrictEqual( + encode(3, { forceIntegerToFloat: true, forceFloat32: true }), + Uint8Array.from([0xca, 0x40, 0x40, 0x00, 0x00]), + ); + }); + + it("encodes integers as integers when forceIntegerToFloat=false", () => { + assert.deepStrictEqual(encode(3, { forceIntegerToFloat: false }), Uint8Array.from([0x3])); + }); + }); + + context("ignoreUndefined", () => { + it("encodes { foo: undefined } as is by default", () => { + assert.deepStrictEqual(decode(encode({ foo: undefined, bar: 42 })), { foo: null, bar: 42 }); + }); + + it("encodes { foo: undefined } as is with `ignoreUndefined: false`", () => { + assert.deepStrictEqual(decode(encode({ foo: undefined, bar: 42 }, { ignoreUndefined: false })), { + foo: null, + bar: 42, + }); + }); + + it("encodes { foo: undefined } to {} with `ignoreUndefined: true`", () => { + assert.deepStrictEqual(decode(encode({ foo: undefined, bar: 42 }, { ignoreUndefined: true })), { bar: 42 }); + }); + }); + + context("ArrayBuffer as buffer", () => { + const buffer = encode([1, 2, 3]); + const arrayBuffer = buffer.buffer.slice(buffer.byteOffset, buffer.byteLength); + assert.deepStrictEqual(decode(arrayBuffer), decode(buffer)); + }); +}); diff --git a/test/karma-run.ts b/test/karma-run.ts new file mode 100644 index 00000000..483ee016 --- /dev/null +++ b/test/karma-run.ts @@ -0,0 +1,17 @@ +/* eslint-disable */ +// the util module requires process.env +(globalThis as any).process = { + env: {}, +}; + +(globalThis as any).Buffer = require("buffer").Buffer; + +// import "util" first, +// because core-js breaks the util polyfll (https://github.com/browserify/node-util) on IE11. +require("util"); + +require("core-js"); + +const testsContext = (require as any).context(".", true, /\.test\.ts$/); + +testsContext.keys().forEach(testsContext); diff --git a/test/last.bin b/test/last.bin deleted file mode 100644 index f14e69b3..00000000 --- a/test/last.bin +++ /dev/null @@ -1 +0,0 @@ -ab?|hr˿\( \ No newline at end of file diff --git a/test/last.php b/test/last.php deleted file mode 100644 index 1a892e47..00000000 --- a/test/last.php +++ /dev/null @@ -1,8 +0,0 @@ - diff --git a/test/msgpack-ext.test.ts b/test/msgpack-ext.test.ts new file mode 100644 index 00000000..15244cb0 --- /dev/null +++ b/test/msgpack-ext.test.ts @@ -0,0 +1,33 @@ +import assert from "assert"; +import { encode, decode, ExtData } from "../src/index.ts"; + +function seq(n: number) { + const a: Array = []; + for (let i = 0; i < n; i++) { + a.push((i + 1) % 0xff); + } + return Uint8Array.from(a); +} + +describe("msgpack-ext", () => { + const SPECS = { + FIXEXT1: [0xd4, new ExtData(0, seq(1))], + FIXEXT2: [0xd5, new ExtData(0, seq(2))], + FIXEXT4: [0xd6, new ExtData(0, seq(4))], + FIXEXT8: [0xd7, new ExtData(0, seq(8))], + FIXEXT16: [0xd8, new ExtData(0, seq(16))], + EXT8: [0xc7, new ExtData(0, seq(17))], + EXT16: [0xc8, new ExtData(0, seq(0x100))], + EXT32: [0xc9, new ExtData(0, seq(0x10000))], + } as Record; + + for (const name of Object.keys(SPECS)) { + const [msgpackType, extData] = SPECS[name]!; + + it(`preserves ExtData by decode(encode(${name}))`, () => { + const encoded = encode(extData); + assert.strictEqual(encoded[0], msgpackType); + assert.deepStrictEqual(decode(encoded), extData); + }); + } +}); diff --git a/test/msgpack-test-suite.test.ts b/test/msgpack-test-suite.test.ts new file mode 100644 index 00000000..d42b0db7 --- /dev/null +++ b/test/msgpack-test-suite.test.ts @@ -0,0 +1,119 @@ +import assert from "assert"; +import util from "util"; +import { Exam } from "msgpack-test-js"; +import { MsgTimestamp } from "msg-timestamp"; +import { encode, decode, ExtensionCodec, EXT_TIMESTAMP, encodeTimeSpecToTimestamp } from "../src/index.ts"; + +const extensionCodec = new ExtensionCodec(); +extensionCodec.register({ + type: EXT_TIMESTAMP, + encode: (input) => { + if (input instanceof MsgTimestamp) { + return encodeTimeSpecToTimestamp({ + sec: input.getTime(), + nsec: input.getNano(), + }); + } else { + return null; + } + }, + decode: (data: Uint8Array) => { + return MsgTimestamp.parse(Buffer.from(data)); + }, +}); + +const TEST_TYPES = { + array: 1, + bignum: 0, // TODO + binary: 1, + bool: 1, + map: 1, + nil: 1, + number: 1, + string: 1, + timestamp: 1, +}; + +describe("msgpack-test-suite", () => { + Exam.getExams(TEST_TYPES).forEach((exam) => { + const types = exam.getTypes(TEST_TYPES); + const first = types[0]!; + const title = `${first}: ${exam.stringify(first)}`; + it(`encodes ${title}`, () => { + types.forEach((type) => { + const value = exam.getValue(type); + const buffer = Buffer.from(encode(value, { extensionCodec })); + + if (exam.matchMsgpack(buffer)) { + assert(true, exam.stringify(type)); + } else { + const msg = `encode(${util.inspect(value)}): expect ${util.inspect(buffer)} to be one of ${util.inspect( + exam.getMsgpacks(), + )}`; + assert(false, msg); + } + }); + }); + + it(`decodes ${title}`, () => { + const msgpacks = exam.getMsgpacks(); + msgpacks.forEach((encoded, idx) => { + const value = decode(encoded, { extensionCodec }); + if (exam.matchValue(value)) { + assert(true, exam.stringify(idx)); + } else { + const values = exam.getTypes().map((type) => exam.getValue(type)); + const msg = `decode(${util.inspect(encoded)}): expect ${util.inspect(value)} to be one of ${util.inspect( + values, + )}`; + assert(false, msg); + } + }); + }); + }); + + context("specs not covered by msgpack-test-js", () => { + // by detecting test coverage + const SPECS = { + FLOAT64_POSITIVE_INF: Number.POSITIVE_INFINITY, + FLOAT64_NEGATIVE_INF: Number.NEGATIVE_INFINITY, + FLOAT64_NAN: Number.NaN, + STR16: "a".repeat(0x100), + STR16_MBS: "🌏".repeat(0x100), + STR32: "b".repeat(0x10_000), + STR32_MBS: "🍣".repeat(0x10_000), + STR32LARGE: "c".repeat(0x50_000), // may cause "RangeError: Maximum call stack size exceeded" in simple implelementions + STR_INCLUDING_NUL: "foo\0bar\0", + STR_BROKEN_FF: "\xff", + BIN16: new Uint8Array(0x100).fill(0xff), + BIN32: new Uint8Array(0x10_000).fill(0xff), + BIN32LARGE: new Uint8Array(0x50_000).fill(0xff), // regression: caused "RangeError: Maximum call stack size exceeded" + ARRAY16: new Array(0x100).fill(true), + ARRAY32: new Array(0x10000).fill(true), + MAP16: new Array(0x100).fill(null).reduce>((acc, _val, i) => { + acc[`k${i}`] = i; + return acc; + }, {}), + MAP32: new Array(0x10000).fill(null).reduce>((acc, _val, i) => { + acc[`k${i}`] = i; + return acc; + }, {}), + MIXED: new Array(0x10).fill(Number.MAX_SAFE_INTEGER), + } as Record; + + for (const name of Object.keys(SPECS)) { + const value = SPECS[name]; + + it(`encodes and decodes ${name}`, () => { + const encoded = encode(value); + assert.deepStrictEqual(decode(new Uint8Array(encoded)), value); + }); + } + }); + + describe("encoding in minimum values", () => { + it("int 8", () => { + assert.deepStrictEqual(encode(-128), Uint8Array.from([0xd0, 0x80])); + }); + }); +}); diff --git a/test/prototype-pollution.test.ts b/test/prototype-pollution.test.ts new file mode 100644 index 00000000..29c1b53d --- /dev/null +++ b/test/prototype-pollution.test.ts @@ -0,0 +1,58 @@ +import { throws } from "assert"; +import { encode, decode, DecodeError } from "../src/index.ts"; + +describe("prototype pollution", () => { + context("__proto__ exists as a map key", () => { + it("raises DecodeError in decoding", () => { + const o = { + foo: "bar", + }; + // override __proto__ as an enumerable property + Object.defineProperty(o, "__proto__", { + value: new Date(0), + enumerable: true, + }); + const encoded = encode(o); + + throws(() => { + decode(encoded); + }, DecodeError); + }); + }); + + context("constructor exists as a map key", () => { + it("raises DecodeError in decoding", () => { + const o = { + foo: "bar", + }; + // override constructor as an enumerable property + Object.defineProperty(o, "constructor", { + value: new Date(0), + enumerable: true, + }); + const encoded = encode(o); + + throws(() => { + decode(encoded); + }, DecodeError); + }); + }); + + context("prototype exists as a map key", () => { + it("raises DecodeError in decoding", () => { + const o = { + foo: "bar", + }; + // override prototype as an enumerable property + Object.defineProperty(o, "prototype", { + value: new Date(0), + enumerable: true, + }); + const encoded = encode(o); + + throws(() => { + decode(encoded); + }, DecodeError); + }); + }); +}); diff --git a/test/put.bin b/test/put.bin deleted file mode 100644 index f14e69b3..00000000 --- a/test/put.bin +++ /dev/null @@ -1 +0,0 @@ -ab?|hr˿\( \ No newline at end of file diff --git a/test/readme.test.ts b/test/readme.test.ts new file mode 100644 index 00000000..9aff528d --- /dev/null +++ b/test/readme.test.ts @@ -0,0 +1,24 @@ +import { deepStrictEqual } from "assert"; +import { encode, decode } from "../src/index.ts"; + +describe("README", () => { + context("## Synopsis", () => { + it("runs", () => { + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + const encoded = encode(object); + // encoded is an Uint8Array instance + + deepStrictEqual(decode(encoded), object); + }); + }); +}); diff --git a/test/reuse-instances-with-extensions.test.ts b/test/reuse-instances-with-extensions.test.ts new file mode 100644 index 00000000..f5396258 --- /dev/null +++ b/test/reuse-instances-with-extensions.test.ts @@ -0,0 +1,48 @@ +// https://github.com/msgpack/msgpack-javascript/issues/195 + +import { deepStrictEqual } from "assert"; +import { Encoder, Decoder, ExtensionCodec } from "../src/index.ts"; + +const MSGPACK_EXT_TYPE_BIGINT = 0; + +function registerCodecs(context: MsgPackContext) { + const { extensionCodec, encode, decode } = context; + + extensionCodec.register({ + type: MSGPACK_EXT_TYPE_BIGINT, + encode: (value) => (typeof value === "bigint" ? encode(value.toString()) : null), + decode: (data) => BigInt(decode(data) as string), + }); +} + +class MsgPackContext { + readonly encode: (value: unknown) => Uint8Array; + readonly decode: (buffer: BufferSource | ArrayLike) => unknown; + readonly extensionCodec = new ExtensionCodec(); + + constructor() { + const encoder = new Encoder({ extensionCodec: this.extensionCodec, context: this }); + const decoder = new Decoder({ extensionCodec: this.extensionCodec, context: this }); + + this.encode = encoder.encode.bind(encoder); + this.decode = decoder.decode.bind(decoder); + + registerCodecs(this); + } +} + +describe("reuse instances with extensions", () => { + it("should encode and decode a bigint", () => { + const context = new MsgPackContext(); + const buf = context.encode(BigInt(42)); + const data = context.decode(buf); + deepStrictEqual(data, BigInt(42)); + }); + + it("should encode and decode bigints", () => { + const context = new MsgPackContext(); + const buf = context.encode([BigInt(1), BigInt(2), BigInt(3)]); + const data = context.decode(buf); + deepStrictEqual(data, [BigInt(1), BigInt(2), BigInt(3)]); + }); +}); diff --git a/test/reuse-instances.test.ts b/test/reuse-instances.test.ts new file mode 100644 index 00000000..2c971cd4 --- /dev/null +++ b/test/reuse-instances.test.ts @@ -0,0 +1,154 @@ +import { deepStrictEqual } from "assert"; +import { Encoder, Decoder, decode } from "../src/index.ts"; + +const createStream = async function* (...args: any) { + for (const item of args) { + yield item; + } +}; + +const N = 10; + +describe("shared instances", () => { + context("encode() and decodeSync()", () => { + it("runs multiple times", () => { + const encoder = new Encoder(); + const decoder = new Decoder(); + + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + for (let i = 0; i < N; i++) { + const encoded: Uint8Array = encoder.encode(object); + deepStrictEqual(decoder.decode(encoded), object, `#${i}`); + } + }); + }); + + context("encode() and decodeAsync()", () => { + it("runs multiple times", async () => { + const encoder = new Encoder(); + const decoder = new Decoder(); + + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + for (let i = 0; i < N; i++) { + const encoded: Uint8Array = encoder.encode(object); + deepStrictEqual(await decoder.decodeAsync(createStream(encoded)), object, `#${i}`); + } + }); + }); + + context("encode() and decodeStream()", () => { + it("runs multiple times", async () => { + const encoder = new Encoder(); + const decoder = new Decoder(); + + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + for (let i = 0; i < N; i++) { + const encoded: Uint8Array = encoder.encode(object); + const a: Array = []; + for await (const item of decoder.decodeStream(createStream(encoded))) { + a.push(item); + } + deepStrictEqual(a, [object], `#${i}`); + } + }); + }); + + context("encode() and decodeArrayStream()", () => { + it("runs multiple times", async () => { + const encoder = new Encoder(); + const decoder = new Decoder(); + + const object = { + nil: null, + integer: 1, + float: Math.PI, + string: "Hello, world!", + binary: Uint8Array.from([1, 2, 3]), + array: [10, 20, 30], + map: { foo: "bar" }, + timestampExt: new Date(), + }; + + for (let i = 0; i < N; i++) { + const encoded: Uint8Array = encoder.encode([object]); + const a: Array = []; + for await (const item of decoder.decodeStream(createStream(encoded))) { + a.push(item); + } + deepStrictEqual(a, [[object]], `#${i}`); + } + }); + + context("regression #212", () => { + it("runs multiple times", () => { + const encoder = new Encoder(); + const decoder = new Decoder(); + + const data1 = { + isCommunication: false, + isWarning: false, + alarmId: "619f65a2774abf00568b7210", + intervalStart: "2022-05-20T12:00:00.000Z", + intervalStop: "2022-05-20T13:00:00.000Z", + triggeredAt: "2022-05-20T13:00:00.000Z", + component: "someComponent", + _id: "6287920245a582301475627d", + }; + + const data2 = { + foo: "bar", + }; + + const arr = [data1, data2]; + const enc = arr.map((x) => [x, encoder.encode(x)] as const); + + enc.forEach(([orig, acc]) => { + const des = decoder.decode(acc); + deepStrictEqual(des, orig); + }); + }); + }); + + context("Encoder#encodeSharedRef()", () => { + it("returns the shared reference", () => { + const encoder = new Encoder(); + + const a = encoder.encodeSharedRef(true); + const b = encoder.encodeSharedRef(false); + + deepStrictEqual(decode(a), decode(b)); // yes, this is the expected behavior + deepStrictEqual(a.buffer, b.buffer); + }); + }); + }); +}); diff --git a/test/upload.php b/test/upload.php deleted file mode 100644 index 7c672a3c..00000000 --- a/test/upload.php +++ /dev/null @@ -1,13 +0,0 @@ - diff --git a/test/uupaa.js b/test/uupaa.js deleted file mode 100644 index 81f41612..00000000 --- a/test/uupaa.js +++ /dev/null @@ -1,9126 +0,0 @@ -/*!{id:"uupaa.js",ver:0.8,license:"MIT",author:"uupaa.js@gmail.com"}*/ - -// Firefox 3.5(end of 2010-08) -//