Move to packages/bls

This commit is contained in:
Cayman 2019-08-05 10:48:26 -05:00
commit b651b83acc
35 changed files with 1986 additions and 0 deletions

15
.babel-register Normal file
View File

@ -0,0 +1,15 @@
/*
See
https://github.com/babel/babel/issues/8652
https://github.com/babel/babel/pull/6027
Babel isn't currently configured by default to read .ts files and
can only be configured to do so via cli or configuration below.
This file is used by mocha to interpret test files using a properly
configured babel.
This can (probably) be removed in babel 8.x.
*/
require('@babel/register')({
extensions: ['.ts'],
})

11
.babelrc Normal file
View File

@ -0,0 +1,11 @@
{
"presets": [
"@babel/env",
"@babel/typescript"
],
"plugins": [
"@babel/proposal-class-properties",
"@babel/proposal-object-rest-spread",
"rewire-exports"
]
}

66
.gitignore vendored Normal file
View File

@ -0,0 +1,66 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
# next.js build output
.next
.idea/**
dist/
lib/

201
LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

31
README.md Normal file
View File

@ -0,0 +1,31 @@
### ChainSafe ETH2.0 Projects
Note:
There is a lot of work being done that are core infrastructural pieces for Eth2.0. Contributions to any of the below repositories would be greatly appreciated. All the libraries are written in TypeScript (or in the process of being converted from pure JS to TypeScript):
<br />
-- [PM / Meta Repo](https://github.com/ChainSafe/Sharding)<br />
|-- [Beacon Chain](https://github.com/ChainSafe/lodestar_chain)<br />
|-- [Simple Serialize (SSZ)](https://github.com/ChainSafe/ssz-js)<br />
|-- [Fixed Size Numbers](https://github.com/ChainSafe/fixed-sized-numbers-ts/)<br />
|-- [BLS Signatures and Signature Aggregation](https://github.com/ChainSafe/bls-js)<br />
# bls-js
[![Build Status](https://travis-ci.org/ChainSafe/bls-js.svg?branch=master)](https://travis-ci.org/ChainSafe/bls-js)
[![codecov](https://codecov.io/gh/ChainSafe/bls-js/branch/master/graph/badge.svg)](https://codecov.io/gh/ChainSafe/bls-js)
[![](https://badges.gitter.im/chainsafe/lodestar.svg)](https://gitter.im/chainsafe/lodestar?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
![ETH2.0_Spec_Version 0.8.0](https://img.shields.io/badge/ETH2.0_Spec_Version-0.8.0-2e86c1.svg)
This is a Javascript library that implements BLS (Boneh-Lynn-Shacham) signatures and supports signature aggregation.
>[spec](https://github.com/ethereum/eth2.0-specs/blob/master/specs/bls_signature.md)
>[test vectors](https://github.com/ethereum/eth2.0-spec-tests/tree/master/tests/bls)
## Usage
- `yarn add @chainsafe/bls`
## Development
- `git clone --recursive git@github.com:ChainSafe/bls-js.git`
- `yarn install`
- `yarn test`

72
package.json Normal file
View File

@ -0,0 +1,72 @@
{
"name": "@chainsafe/bls",
"version": "0.1.6",
"description": "Implementation of bls signature verification for ethereum 2.0",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"homepage": "https://github.com/chainsafe/bls-js",
"author": "ChainSafe Systems",
"license": "Apache-2.0",
"files": [
"lib"
],
"keywords": [
"ethereum",
"serenity",
"signature verification",
"bls"
],
"scripts": {
"prebuild": "rm -rf lib && rm -rf dist",
"build": "yarn build-lib && yarn build-web && yarn build-types",
"build-lib": "babel src -x .ts -d lib",
"build-types": "tsc --declaration --emitDeclarationOnly",
"build-web": "webpack --mode production --entry ./lib/web.js --output ./dist/bls.min.js",
"check-types": "tsc --noEmit",
"lint": "eslint --ext .ts src/",
"lint-fix": "eslint --ext .ts src/ --fix",
"pretest": "yarn check-types",
"prepublishOnly": "yarn build",
"test:unit": "mocha -r ./.babel-register \"tests/unit/**/*.test.ts\"",
"test:spec": "mocha -r ./.babel-register \"tests/spec/**/*.test.ts\"",
"test": "nyc -r lcov -e .ts -x \"*.test.ts\" mocha -r ./.babel-register \"tests/**/*.test.ts\" && nyc report",
"coverage": "codecov"
},
"dependencies": {
"@chainsafe/milagro-crypto-js": "0.1.3",
"assert": "^1.4.1",
"js-sha256": "^0.9.0",
"secure-random": "^1.1.1"
},
"devDependencies": {
"@babel/cli": "^7.2.3",
"@babel/core": "^7.3.3",
"@babel/plugin-proposal-class-properties": "^7.3.3",
"@babel/plugin-proposal-object-rest-spread": "^7.3.2",
"@babel/plugin-transform-runtime": "^7.3.4",
"@babel/preset-env": "^7.4.4",
"@babel/preset-typescript": "^7.3.3",
"@babel/register": "^7.0.0",
"@babel/runtime": "^7.4.4",
"@chainsafe/eth2.0-spec-test-util": "^0.2.3",
"@types/assert": "^1.4.2",
"@types/chai": "^4.1.7",
"@types/mocha": "^5.2.5",
"@types/node": "^10.12.17",
"@typescript-eslint/eslint-plugin": "^1.3.0",
"@typescript-eslint/parser": "^1.3.0",
"babel-plugin-rewire-exports": "^1.1.0",
"chai": "^4.2.0",
"codecov": "^3.1.0",
"eslint": "^5.14.1",
"js-yaml": "^3.13.1",
"mocha": "^5.2.0",
"nyc": "^13.3.0",
"sinon": "^7.2.7",
"supertest": "^4.0.2",
"ts-node": "^7.0.1",
"typescript": "^3.2.1",
"webpack": "^4.30.0",
"webpack-cli": "^3.3.2"
}
}

5
src/@types/keccak256/index.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
declare module 'keccak256' {
export default function hash(a: Buffer | (Buffer | string | number)[]): Buffer;
}

3
src/@types/secure-random/index.d.ts vendored Normal file
View File

@ -0,0 +1,3 @@
declare module "secure-random" {
export function randomBuffer(length: number): Buffer;
}

4
src/constants.ts Normal file
View File

@ -0,0 +1,4 @@
export const SECRET_KEY_LENGTH = 32;
export const FP_POINT_LENGTH = 48;
export const PUBLIC_KEY_LENGTH = FP_POINT_LENGTH;
export const G2_HASH_PADDING = 16;

3
src/ctx.ts Normal file
View File

@ -0,0 +1,3 @@
import CTX from "@chainsafe/milagro-crypto-js";
export default new CTX("BLS381");

14
src/helpers/ec-pairing.ts Normal file
View File

@ -0,0 +1,14 @@
import {G1point} from "./g1point";
import {G2point} from "./g2point";
import {FP12} from "@chainsafe/milagro-crypto-js/src/fp12";
import ctx from "../ctx";
export class ElipticCurvePairing {
public static pair(p1: G1point, p2: G2point): FP12 {
const e = ctx.PAIR.ate(p2.getPoint(), p1.getPoint());
return ctx.PAIR.fexp(e);
}
}

122
src/helpers/g1point.ts Normal file
View File

@ -0,0 +1,122 @@
import {BIG} from "@chainsafe/milagro-crypto-js/src/big";
import {ECP} from "@chainsafe/milagro-crypto-js/src/ecp";
import ctx from "../ctx";
import {bytes48} from "../types";
import assert from "assert";
import {calculateYFlag, getModulus} from "./utils";
import * as random from "secure-random";
import {FP_POINT_LENGTH} from "../constants";
export class G1point {
private point: ECP;
public constructor(point: ECP) {
this.point = point;
}
public mul(value: BIG): G1point {
const newPoint = this.point.mul(value);
return new G1point(newPoint);
}
public add(other: G1point): G1point {
const sum = new ctx.ECP();
sum.add(this.point);
sum.add(other.point);
sum.affine();
return new G1point(sum);
}
public equal(other: G1point): boolean {
return this.point.equals(other.point);
}
public toBytes(): bytes48 {
const buffer = Buffer.alloc(FP_POINT_LENGTH, 0);
this.point.getX().tobytearray(buffer, 0);
return buffer;
}
public getPoint(): ECP {
return this.point;
}
public toBytesCompressed(): bytes48 {
const output = this.toBytes();
const c = true;
const b = this.point.is_infinity();
const a = !b && calculateYFlag(this.point.getY());
const flags = ((a ? 1 << 5 : 0) | (b ? 1 << 6 : 0) | (c ? 1 << 7 : 0));
const mask = 31;
output[0] &= mask;
output[0] |= flags;
return output;
}
public static fromBytesCompressed(value: bytes48): G1point {
assert(value.length === FP_POINT_LENGTH, `Expected g1 compressed input to have ${FP_POINT_LENGTH} bytes`);
value = Buffer.from(value);
const aIn = (value[0] & (1 << 5)) != 0;
const bIn = (value[0] & (1 << 6)) != 0;
const cIn = (value[0] & (1 << 7)) != 0;
value[0] &= 31;
if (!cIn) {
throw new Error("The serialised input does not have the C flag set.");
}
const x = ctx.BIG.frombytearray(value, 0);
if (bIn) {
if (!aIn && x.iszilch()) {
// This is a correctly formed serialisation of infinity
return new G1point(new ctx.ECP());
} else {
// The input is malformed
throw new Error(
"The serialised input has B flag set, but A flag is set, or X is non-zero.");
}
}
const modulus = getModulus();
if (ctx.BIG.comp(modulus, x) <= 0) {
throw new Error("X coordinate is too large.");
}
let point = new ctx.ECP();
point.setx(x);
if (point.is_infinity()) {
throw new Error("X coordinate is not on the curve.");
}
// Did we get the right branch of the sqrt?
if (!point.is_infinity() && aIn != calculateYFlag(point.getY())) {
// We didn't: so choose the other branch of the sqrt.
const x = new ctx.FP(point.getX());
const yneg = new ctx.FP(point.getY());
yneg.neg();
point.setxy(x.redc(), yneg.redc())
}
return new G1point(point);
}
public static generator(): G1point {
return new G1point(ctx.ECP.generator());
}
public static random(): G1point {
let ecp: ECP;
do {
ecp = new ctx.ECP();
ecp.setx(
ctx.BIG.frombytearray(
random.randomBuffer(FP_POINT_LENGTH),
0
)
)
} while (ecp.is_infinity());
return new G1point(ecp);
}
}

248
src/helpers/g2point.ts Normal file
View File

@ -0,0 +1,248 @@
import {BIG} from "@chainsafe/milagro-crypto-js/src/big";
import {ECP2} from "@chainsafe/milagro-crypto-js/src/ecp2";
import {BLSDomain, bytes32, bytes96} from "../types";
import { sha256 } from 'js-sha256';
import ctx from "../ctx";
import * as random from "secure-random";
import {calculateYFlag, getModulus, padLeft} from "./utils";
import assert from "assert";
import {FP_POINT_LENGTH, G2_HASH_PADDING} from "../constants";
export class G2point {
private point: ECP2;
public constructor(point: ECP2) {
this.point = point;
}
public add(other: G2point): G2point {
const sum = new ctx.ECP2();
sum.add(this.point);
sum.add(other.point);
sum.affine();
return new G2point(sum);
}
public mul(value: BIG): G2point {
const newPoint = this.point.mul(value);
return new G2point(newPoint);
}
public equal(other: G2point): boolean {
return this.point.equals(other.point);
}
public getPoint(): ECP2 {
return this.point;
}
public toBytesCompressed(): Buffer {
const xReBytes = Buffer.alloc(FP_POINT_LENGTH, 0);
const xImBytes = Buffer.alloc(FP_POINT_LENGTH, 0);
this.point.getX().getA().tobytearray(xReBytes, 0);
this.point.getX().getB().tobytearray(xImBytes, 0);
const c1 = true;
const b1 = this.point.is_infinity();
const a1 = !b1 && calculateYFlag(this.point.getY().getB());
const flags = ((a1 ? 1 << 5 : 0) | (b1 ? 1 << 6 : 0) | (c1 ? 1 << 7 : 0));
const mask = 31;
xImBytes[0] &= mask;
xImBytes[0] |= flags;
xReBytes[0] &= mask;
return Buffer.concat([
xImBytes,
xReBytes
]);
}
public static hashToG2(message: bytes32, domain: BLSDomain): G2point {
const padding = Buffer.alloc(G2_HASH_PADDING, 0);
const xReBytes = Buffer.concat([
padding,
Buffer.from(sha256.arrayBuffer(
Buffer.concat([
message,
padLeft(domain, 8),
Buffer.from('01', 'hex')
])
))
]);
const xImBytes = Buffer.concat([
padding,
Buffer.from(sha256.arrayBuffer(
Buffer.concat([
message,
padLeft(domain, 8),
Buffer.from('02', 'hex')
])
))
]);
const xRe = ctx.BIG.frombytearray(xReBytes, 0);
const xIm = ctx.BIG.frombytearray(xImBytes, 0);
const one = new ctx.BIG(1);
let point = new ctx.ECP2();
point.setx(new ctx.FP2(xRe, xIm));
while (point.is_infinity()) {
xRe.add(one);
xRe.norm();
point = new ctx.ECP2();
point.setx(new ctx.FP2(xRe, xIm))
}
return new G2point(G2point.scaleWithCofactor(G2point.normaliseY(point)));
}
public static fromCompressedBytes(value: bytes96): G2point {
assert(value.length === 2 * FP_POINT_LENGTH, 'Expected signature of 96 bytes');
value = Buffer.from(value);
const xImBytes = value.slice(0, FP_POINT_LENGTH);
const xReBytes = value.slice(FP_POINT_LENGTH);
const aIn = (xImBytes[0] & (1 << 5)) != 0;
const bIn = (xImBytes[0] & (1 << 6)) != 0;
const cIn = (xImBytes[0] & (1 << 7)) != 0;
//clear bits
xImBytes[0] &= 31;
if((xReBytes[0] & 224) != 0) {
throw new Error("The input has non-zero a2, b2 or c2 flag on xRe");
}
if(!cIn) {
throw new Error("The serialised input does not have the C flag set.");
}
const xIm = ctx.BIG.frombytearray(xImBytes, 0);
const xRe = ctx.BIG.frombytearray(xReBytes, 0);
if (bIn) {
if (!aIn
&& xIm.iszilch()
&& xRe.iszilch() ) {
// This is a correctly formed serialisation of infinity
return new G2point(new ctx.ECP2());
} else {
// The input is malformed
throw new Error(
"The serialised input has B flag set, but A flag is set, or X is non-zero.");
}
}
const modulus = getModulus();
if(ctx.BIG.comp(modulus, xRe) <= 0 || ctx.BIG.comp(modulus, xIm) <= 0) {
throw new Error(
"The deserialised X real or imaginary coordinate is too large.");
}
let point = new ctx.ECP2();
point.setx(new ctx.FP2(xRe, xIm));
if(point.is_infinity()) {
throw new Error("X coordinate is not on the curve.");
}
if (!point.is_infinity() && aIn != calculateYFlag(point.getY().getB())) {
// We didn't: so choose the other branch of the sqrt.
const x = point.getX();
const yneg = point.getY();
yneg.neg();
point.setxy(x, yneg);
}
return new G2point(point);
}
public static fromUncompressedInput(
xReBytes: Buffer,
xImBytes: Buffer,
yReBytes: Buffer,
yImBytes: Buffer,
zReBytes: Buffer,
zImBytes: Buffer): G2point {
const xRe = ctx.BIG.frombytearray(padLeft(xReBytes, FP_POINT_LENGTH), 0);
const xIm = ctx.BIG.frombytearray(padLeft(xImBytes, FP_POINT_LENGTH), 0);
const yRe = ctx.BIG.frombytearray(padLeft(yReBytes, FP_POINT_LENGTH), 0);
const yIm = ctx.BIG.frombytearray(padLeft(yImBytes, FP_POINT_LENGTH), 0);
const zRe = ctx.BIG.frombytearray(padLeft(zReBytes, FP_POINT_LENGTH), 0);
const zIm = ctx.BIG.frombytearray(padLeft(zImBytes, FP_POINT_LENGTH), 0);
const x = new ctx.FP2(xRe, xIm);
const y = new ctx.FP2(yRe, yIm);
const z = new ctx.FP2(zRe, zIm);
z.inverse();
x.mul(z);
x.reduce();
y.mul(z);
y.reduce();
const point = new ctx.ECP2();
point.setxy(x, y);
return new G2point(point);
}
public static random(): G2point {
let point: ECP2;
do {
point = new ctx.ECP2();
point.setx(
new ctx.FP2(
ctx.BIG.frombytearray(
random.randomBuffer(FP_POINT_LENGTH),
0
),
ctx.BIG.frombytearray(
random.randomBuffer(FP_POINT_LENGTH),
0
)
)
)
} while (point.is_infinity());
return new G2point(point);
}
public static scaleWithCofactor(point: ECP2): ECP2 {
const upper = ctx.BIG.frombytearray(
Buffer.from(
"0000000000000000000000000000000005d543a95414e7f1091d50792876a202cd91de4547085abaa68a205b2e5a7ddf",
"hex"
),
0
);
const lower = ctx.BIG.frombytearray(
Buffer.from(
"00000000000000000000000000000000a628f1cb4d9e82ef21537e293a6691ae1616ec6e786f0c70cf1c38e31c7238e5",
"hex"
),
0
);
const shift = ctx.BIG.frombytearray(
Buffer.from(
"000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000",
"hex"
),
0
);
let sum = new ctx.ECP2();
sum.copy(point);
sum = sum.mul(upper);
sum = sum.mul(shift);
let tmp = new ctx.ECP2();
tmp.copy(point);
tmp = tmp.mul(lower);
sum.add(tmp);
return sum;
}
public static normaliseY(point: ECP2): ECP2 {
const y = point.getY();
const yNeg = new ctx.FP2(y);
yNeg.neg();
if (ctx.BIG.comp(y.getB(), yNeg.getB()) < 0
|| ((ctx.BIG.comp(y.getB(), yNeg.getB()) == 0)
&& ctx.BIG.comp(y.getA(), yNeg.getA()) < 0)
) {
const newPoint = new ctx.ECP2();
newPoint.setxy(point.getX(), yNeg);
return newPoint
} else {
return point;
}
}
}

34
src/helpers/utils.ts Normal file
View File

@ -0,0 +1,34 @@
import assert from "assert";
import {BIG} from "@chainsafe/milagro-crypto-js/src/big";
import ctx from "../ctx";
/**
* Pads byte array with zeroes on left side up to desired length.
* Throws if source is larger than desired result.
* @param source
* @param length
*/
export function padLeft(source: Buffer, length: number): Buffer {
assert(source.length <= length, 'Given array must be smaller or equal to desired array size');
const result = Buffer.alloc(length, 0);
source.copy(result, length - source.length);
return result;
}
//TODO: find a way to convert ctx.ROM_FIELD.MODULUS to BIG (MODULUS basebit = 58, BIG basebit=23
export function getModulus(): BIG {
return ctx.BIG.frombytearray(
Buffer.from(
'1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab',
'hex'
),
0
)
}
export function calculateYFlag(yIm: BIG): boolean {
const tmp = new ctx.BIG(yIm);
tmp.add(yIm);
tmp.div(getModulus());
return tmp.isunity();
}

132
src/index.ts Normal file
View File

@ -0,0 +1,132 @@
import {
BLSDomain,
BLSSecretKey,
BLSPubkey,
BLSSignature,
bytes32,
bytes8
} from "./types";
import {Keypair} from "./keypair";
import {PrivateKey} from "./privateKey";
import {G2point} from "./helpers/g2point";
import {G1point} from "./helpers/g1point";
import {PublicKey} from "./publicKey";
import {Signature} from "./signature";
import {ElipticCurvePairing} from "./helpers/ec-pairing";
import ctx from "./ctx";
/**
* Generates new secret and public key
*/
function generateKeyPair(): Keypair {
return Keypair.generate();
}
/**
* Generates public key from given secret.
* @param {BLSSecretKey} secretKey
*/
function generatePublicKey(secretKey: BLSSecretKey): BLSPubkey {
const keypair = new Keypair(PrivateKey.fromBytes(secretKey));
return keypair.publicKey.toBytesCompressed();
}
/**
* Signs given message using secret key.
* @param secretKey
* @param messageHash
* @param domain
*/
function sign(secretKey: BLSSecretKey, messageHash: bytes32, domain: BLSDomain): BLSSignature {
const privateKey = PrivateKey.fromBytes(secretKey);
const hash = G2point.hashToG2(messageHash, domain);
return privateKey.sign(hash).toBytesCompressed();
}
/**
* Compines all given signature into one.
* @param signatures
*/
function aggregateSignatures(signatures: BLSSignature[]): BLSSignature {
return signatures.map((signature): Signature => {
return Signature.fromCompressedBytes(signature)
}).reduce((previousValue, currentValue): Signature => {
return previousValue.add(currentValue);
}).toBytesCompressed();
}
/**
* Combines all given public keys into single one
* @param publicKeys
*/
function aggregatePubkeys(publicKeys: BLSPubkey[]): BLSPubkey {
if(publicKeys.length === 0) {
return new G1point(new ctx.ECP()).toBytesCompressed();
}
return publicKeys.map((publicKey): G1point => {
return G1point.fromBytesCompressed(publicKey)
}).reduce((previousValue, currentValue): G1point => {
return previousValue.add(currentValue);
}).toBytesCompressed();
}
/**
* Verifies if signature is message signed with given public key.
* @param publicKey
* @param messageHash
* @param signature
* @param domain
*/
function verify(publicKey: BLSPubkey, messageHash: bytes32, signature: BLSSignature, domain: bytes8): boolean {
try {
const key = PublicKey.fromBytes(publicKey);
const sig = Signature.fromCompressedBytes(signature);
const g1Generated = G1point.generator();
const e1 = ElipticCurvePairing.pair(key.getPoint(), G2point.hashToG2(messageHash, domain));
const e2 = ElipticCurvePairing.pair(g1Generated, sig.getPoint());
return e1.equals(e2);
} catch (e) {
return false;
}
}
/**
* Verifies if signature is list of message signed with corresponding public key.
* @param publicKeys
* @param messageHashes
* @param signature
* @param domain
*/
function verifyMultiple(publicKeys: BLSPubkey[], messageHashes: bytes32[], signature: BLSSignature, domain: bytes8): boolean {
if(publicKeys.length === 0 || publicKeys.length != messageHashes.length) {
return false;
}
try {
const g1Generated = G1point.generator();
const eCombined = new ctx.FP12(1);
publicKeys.forEach((publicKey, index): void => {
const g2 = G2point.hashToG2(messageHashes[index], domain);
eCombined.mul(
ElipticCurvePairing.pair(
PublicKey.fromBytes(publicKey).getPoint(),
g2
)
);
});
const e2 = ElipticCurvePairing.pair(g1Generated, Signature.fromCompressedBytes(signature).getPoint());
return e2.equals(eCombined);
} catch (e) {
return false;
}
}
export default {
generateKeyPair,
generatePublicKey,
sign,
aggregateSignatures,
aggregatePubkeys,
verify,
verifyMultiple
}

31
src/keypair.ts Normal file
View File

@ -0,0 +1,31 @@
import {PublicKey} from "./publicKey";
import {PrivateKey} from "./privateKey";
export class Keypair {
private _publicKey: PublicKey;
private _privateKey: PrivateKey;
public constructor(privateKey: PrivateKey, publicKey?: PublicKey) {
this._privateKey = privateKey;
if(!publicKey) {
this._publicKey = PublicKey.fromPrivateKey(this._privateKey);
} else {
this._publicKey = publicKey;
}
}
public get publicKey(): PublicKey {
return this._publicKey;
}
public get privateKey(): PrivateKey {
return this._privateKey;
}
public static generate(): Keypair {
return new Keypair(PrivateKey.random());
}
}

64
src/privateKey.ts Normal file
View File

@ -0,0 +1,64 @@
import {BIG} from "@chainsafe/milagro-crypto-js/src/big";
import {FP_POINT_LENGTH, SECRET_KEY_LENGTH} from "./constants";
import assert from "assert";
import ctx from "./ctx";
import {padLeft} from "./helpers/utils";
import {G2point} from "./helpers/g2point";
import * as random from "secure-random";
import {BLSDomain, BLSSecretKey, bytes32} from "./types";
export class PrivateKey {
private value: BIG;
public constructor(value: BIG) {
this.value = value;
}
public getValue(): BIG {
return this.value;
}
public sign(message: G2point): G2point {
return message.mul(this.value);
}
public signMessage(message: bytes32, domain: BLSDomain): G2point {
return G2point.hashToG2(message, domain).mul(this.value);
}
public toBytes(): BLSSecretKey {
const buffer = Buffer.alloc(FP_POINT_LENGTH, 0);
this.value.tobytearray(buffer, 0);
return buffer.slice(FP_POINT_LENGTH - SECRET_KEY_LENGTH);
}
public toHexString(): string {
return `0x${this.toBytes().toString('hex')}`;
}
public static fromBytes(bytes: Uint8Array): PrivateKey {
assert(bytes.length === SECRET_KEY_LENGTH, 'Private key should have 32 bytes');
const value = Buffer.from(bytes);
return new PrivateKey(
ctx.BIG.frombytearray(
padLeft(
value,
48
),
0
)
)
}
public static fromHexString(value: string): PrivateKey {
return PrivateKey.fromBytes(
Buffer.from(value.replace('0x', ''), 'hex')
);
}
public static random(): PrivateKey {
return PrivateKey.fromBytes(random.randomBuffer(SECRET_KEY_LENGTH));
}
}

36
src/publicKey.ts Normal file
View File

@ -0,0 +1,36 @@
import {G1point} from "./helpers/g1point";
import {PrivateKey} from "./privateKey";
import {BLSPubkey} from "./types";
export class PublicKey {
private point: G1point;
public constructor(point: G1point) {
this.point = point;
}
public getPoint(): G1point {
return this.point;
}
public toBytesCompressed(): BLSPubkey {
return this.point.toBytesCompressed();
}
public toHexString(): string {
return `0x${this.toBytesCompressed().toString('hex')}`;
}
public static fromPrivateKey(privateKey: PrivateKey): PublicKey {
return new PublicKey(
G1point.generator().mul(privateKey.getValue())
);
}
public static fromBytes(publicKey: BLSPubkey): PublicKey {
return new PublicKey(
G1point.fromBytesCompressed(publicKey)
);
}
}

35
src/signature.ts Normal file
View File

@ -0,0 +1,35 @@
import {G2point} from "./helpers/g2point";
import {BLSSignature} from "./types";
import assert from "assert";
import {FP_POINT_LENGTH} from "./constants";
export class Signature {
private point: G2point;
public constructor(point: G2point) {
this.point = point;
}
public add(other: Signature): Signature {
return new Signature(
this.point.add(other.point)
);
}
public getPoint(): G2point {
return this.point;
}
public toBytesCompressed(): BLSSignature {
return this.point.toBytesCompressed();
}
public static fromCompressedBytes(signature: BLSSignature): Signature {
assert(
signature.length === 2 * FP_POINT_LENGTH,
`Signature must have ${2 * FP_POINT_LENGTH} bytes`
);
return new Signature(G2point.fromCompressedBytes(signature));
}
}

9
src/types.ts Normal file
View File

@ -0,0 +1,9 @@
export type bytes8 = Buffer;
export type bytes32 = Buffer;
export type bytes48 = Buffer;
export type bytes96 = Buffer;
export type BLSDomain = bytes8;
export type BLSPubkey = bytes48;
export type BLSSecretKey = bytes32;
export type BLSSignature = bytes96;

8
src/web.ts Normal file
View File

@ -0,0 +1,8 @@
import bls from "./index"
// eslint-disable-next-line @typescript-eslint/no-explicit-any
// @ts-ignore
(function (window: any) {
window.bls = bls
// @ts-ignore
})(window);

View File

@ -0,0 +1,21 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import bls from "../../src";
import {BLSSignature} from "../../src/types";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/aggregate_pubkeys/aggregate_pubkeys.yaml"),
bls.aggregatePubkeys,
({input}) => {
const sigs: BLSSignature[] = [];
input.forEach((sig: string) => {
sigs.push(Buffer.from(sig.replace('0x', ''), 'hex'))
});
return [
sigs
];
},
({output}) => output,
(result) => `0x${result.toString('hex')}`,
() => false,
);

View File

@ -0,0 +1,22 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import bls from "../../src";
import {G2point} from "../../src/helpers/g2point";
import {BLSPubkey} from "../../src/types";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/aggregate_sigs/aggregate_sigs.yaml"),
bls.aggregateSignatures,
({input}) => {
const pubKeys: BLSPubkey[] = [];
input.forEach((pubKey: string) => {
pubKeys.push(Buffer.from(pubKey.replace('0x', ''), 'hex'))
});
return [
pubKeys
];
},
({output}) => output,
(result) => `0x${result.toString('hex')}`,
() => false,
);

View File

@ -0,0 +1,23 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import {padLeft} from "../../src/helpers/utils";
import {G2point} from "../../src/helpers/g2point";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/msg_hash_g2_compressed/g2_compressed.yaml"),
G2point.hashToG2,
({input}) => {
const domain = padLeft(Buffer.from(input.domain.replace('0x', ''), 'hex'), 8);
return [
Buffer.from(input.message.replace('0x', ''), 'hex'),
domain
];
},
({output}) => {
const xReExpected = padLeft(Buffer.from(output[0].replace('0x', ''), 'hex'), 48);
const xImExpected = padLeft(Buffer.from(output[1].replace('0x', ''), 'hex'), 48);
return '0x' + Buffer.concat([xReExpected, xImExpected]).toString('hex')
},
(result:G2point) => `0x${result.toBytesCompressed().toString('hex')}`,
() => false,
);

View File

@ -0,0 +1,28 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import {padLeft} from "../../src/helpers/utils";
import {G2point} from "../../src/helpers/g2point";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/msg_hash_g2_uncompressed/g2_uncompressed.yaml"),
G2point.hashToG2,
({input}) => {
const domain = padLeft(Buffer.from(input.domain.replace('0x', ''), 'hex'), 8);
return [
Buffer.from(input.message.replace('0x', ''), 'hex'),
domain
];
},
({output}) => {
return '0x' + G2point.fromUncompressedInput(
Buffer.from(output[0][0].replace('0x', ''), 'hex'),
Buffer.from(output[0][1].replace('0x', ''), 'hex'),
Buffer.from(output[1][0].replace('0x', ''), 'hex'),
Buffer.from(output[1][1].replace('0x', ''), 'hex'),
Buffer.from(output[2][0].replace('0x', ''), 'hex'),
Buffer.from(output[2][1].replace('0x', ''), 'hex'),
).toBytesCompressed().toString('hex');
},
(result:G2point) => `0x${result.toBytesCompressed().toString('hex')}`,
() => false,
);

View File

@ -0,0 +1,14 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import bls from "../../src";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/priv_to_pub/priv_to_pub.yaml"),
bls.generatePublicKey,
({input}) => {
return [Buffer.from(input.replace('0x', ''), 'hex')];
},
({output}) => output,
(result) => `0x${result.toString('hex')}`,
() => false,
);

View File

@ -0,0 +1,20 @@
import {join} from "path";
import {describeSpecTest} from "@chainsafe/eth2.0-spec-test-util";
import bls from "../../src";
import {padLeft} from "../../src/helpers/utils";
describeSpecTest(
join(__dirname, "./spec-tests/tests/bls/sign_msg/sign_msg.yaml"),
bls.sign,
({input}) => {
const domain = padLeft(Buffer.from(input.domain.replace('0x', ''), 'hex'), 8);
return [
Buffer.from(input.privkey.replace('0x', ''), 'hex'),
Buffer.from(input.message.replace('0x', ''), 'hex'),
domain
];
},
({output}) => output,
(result) => `0x${result.toString('hex')}`,
() => false,
);

1
tests/spec/spec-tests Submodule

@ -0,0 +1 @@
Subproject commit 7567342c966c4e020f6ab3889f93cedb65ea9bfe

View File

@ -0,0 +1,109 @@
import {G1point} from "../../../src/helpers/g1point";
import {expect} from "chai";
describe('g1point', function() {
it('should generate different random point', () => {
const g1 = G1point.random();
const g2 = G1point.random();
expect(g1.equal(g2)).to.be.false;
});
it('should be same', () => {
const g1 = G1point.random();
expect(g1.equal(g1)).to.be.true;
});
it('serialize adn deserialize should produce same result', () => {
const g1 = G1point.random();
const g2 = G1point.fromBytesCompressed(g1.toBytesCompressed());
expect(g1.equal(g2)).to.be.true;
});
it('deserialize correct point doesn not throw', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'8123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef',
'hex'
)
)
}).to.not.throw;
});
it('deserialize incorrect point throws', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'8123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde0',
'hex'
)
)
}).to.throw('X coordinate is not on the curve.');
});
it('deserialize incorrect point throws 2', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab',
'hex'
)
)
}).to.throw('X coordinate is too large.');
});
it('deserialize incorrect point throws 3', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaac',
'hex'
)
)
}).to.throw('X coordinate is too large.');
});
it('deserialize incorrect point throws to few bytes', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaa',
'hex'
)
)
}).to.throw('Expected g1 compressed input to have 48 bytes');
});
it('deserialize incorrect point throws to many bytes', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaa900',
'hex'
)
)
}).to.throw('Expected g1 compressed input to have 48 bytes');
});
it('deserialize infinity', () => {
const g1 = G1point.fromBytesCompressed(
Buffer.from(
'c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'hex'
)
);
expect(g1.getPoint().is_infinity()).to.be.true
});
it('wrong infinity serialization', () => {
expect(() => {
G1point.fromBytesCompressed(
Buffer.from(
'e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'hex'
)
)
}).to.throw('The serialised input has B flag set, but A flag is set, or X is non-zero.');
});
});

View File

@ -0,0 +1,153 @@
import {G2point} from "../../../src/helpers/g2point";
import {expect} from "chai";
describe('g2point', function() {
it('should be equals', () => {
const g2 = G2point.random();
expect(g2.equal(g2)).to.be.true;
});
it('should not be equals', () => {
const g2 = G2point.random();
const g22 = G2point.random();
expect(g2.equal(g22)).to.be.false;
});
it('serialize deserialize should be equal', () => {
const g2 = G2point.random();
expect(G2point.fromCompressedBytes(g2.toBytesCompressed()).equal(g2)).to.be.true;
});
it('should deserialize from compress', () => {
const x =
"8123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
+ "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.not.throw;
});
it('should fail to deserialize', () => {
const x =
"800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ "1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('The deserialised X real or imaginary coordinate is too large.');
});
it('should fail to deserialize 2', () => {
const x =
"9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab"
+ "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('The deserialised X real or imaginary coordinate is too large.');
});
it('should fail to deserialize 3', () => {
const x =
"800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ "1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaac";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('The deserialised X real or imaginary coordinate is too large.');
});
it('should fail to deserialize 4', () => {
const x =
"9a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaac"
+ "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('The deserialised X real or imaginary coordinate is too large.');
});
it('should fail to deserialize 5', () => {
const x =
"8123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
+ "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde0";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('X coordinate is not on the curve.');
});
it('should fail to deserialize infinity', () => {
const x =
"800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('X coordinate is not on the curve.');
});
it('should fail to deserialize - too few bytes', () => {
const x = "8123456789abcd";
expect(() => {
G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
)
}).to.throw('Expected signature of 96 bytes');
});
it('should fail to deserialize - too many bytes', () => {
expect(() => {
G2point.fromCompressedBytes(
Buffer.alloc(100, 1),
)
}).to.throw('Expected signature of 96 bytes');
});
it('should deserialize infinity', () => {
const x =
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
const g2 = G2point.fromCompressedBytes(
Buffer.from(
x,
'hex'
)
);
expect(g2.getPoint().is_infinity()).to.be.true;
})
});

View File

@ -0,0 +1,27 @@
import {expect} from "chai";
import {padLeft} from "../../../src/helpers/utils";
import {G1point} from "../../../src/helpers/g1point";
describe('helpers tests', function() {
describe('padLeft', function() {
it('throw if source larger than target', () => {
expect(
() => padLeft(Buffer.alloc(2, 0), 1)
).to.throw;
});
it('pad one 0 on left side', () => {
const result = padLeft(
Buffer.alloc(1, 1),
2
);
expect(result.length).to.be.equal(2);
expect(result[0]).to.be.equal(0);
expect(result[1]).to.be.equal(1);
});
});
});

313
tests/unit/index.test.ts Normal file
View File

@ -0,0 +1,313 @@
import bls from "../../src";
import {Keypair} from "../../src/keypair";
import { sha256 } from 'js-sha256';
import {G2point} from "../../src/helpers/g2point";
import {expect} from "chai";
describe('test bls', function () {
describe('aggregate pubkey', function () {
it('should aggregate empty array', function () {
expect(bls.aggregatePubkeys([])).to.not.throw;
});
});
describe('verify', function() {
it('should verify signature', () => {
const keypair = Keypair.generate();
const messageHash = Buffer.from(sha256.arrayBuffer("Test"));
const domain = Buffer.alloc(8, 1);
const signature = keypair.privateKey.sign(
G2point.hashToG2(messageHash, domain)
);
const result = bls.verify(
keypair.publicKey.toBytesCompressed(),
messageHash,
signature.toBytesCompressed(),
domain
);
expect(result).to.be.true;
});
it('should not modify original pubkey when verifying', () => {
const keypair = Keypair.generate();
const messageHash = Buffer.from(sha256.arrayBuffer("Test"));
const domain = Buffer.alloc(8, 1);
const signature = keypair.privateKey.sign(
G2point.hashToG2(messageHash, domain)
);
const pubKey = keypair.publicKey.toBytesCompressed();
bls.verify(
pubKey,
messageHash,
signature.toBytesCompressed(),
domain
);
expect('0x' + pubKey.toString('hex')).to.be.equal(keypair.publicKey.toHexString());
});
it('should fail verify empty signature', () => {
const keypair = Keypair.generate();
const messageHash2 = Buffer.from(sha256.arrayBuffer("Test message2"));
const domain = Buffer.from("01", 'hex');
const signature = Buffer.alloc(96);
const result = bls.verify(
keypair.publicKey.toBytesCompressed(),
messageHash2,
signature,
domain
);
expect(result).to.be.false;
});
it('should fail verify signature of different message', () => {
const keypair = Keypair.generate();
const messageHash = Buffer.from(sha256.arrayBuffer("Test message"));
const messageHash2 = Buffer.from(sha256.arrayBuffer("Test message2"))
const domain = Buffer.from("01", 'hex');
const signature = keypair.privateKey.sign(
G2point.hashToG2(messageHash, domain)
);
const result = bls.verify(
keypair.publicKey.toBytesCompressed(),
messageHash2,
signature.toBytesCompressed(),
domain
);
expect(result).to.be.false;
});
it('should fail verify signature of different domain', () => {
const keypair = Keypair.generate();
const messageHash = Buffer.from(sha256.arrayBuffer("Test message"));
const domain = Buffer.from("01", 'hex');
const domain2 = Buffer.from("02", 'hex');
const signature = keypair.privateKey.sign(
G2point.hashToG2(messageHash, domain)
);
const result = bls.verify(
keypair.publicKey.toBytesCompressed(),
messageHash,
signature.toBytesCompressed(),
domain2
);
expect(result).to.be.false;
});
it('should fail verify signature signed by different key', () => {
const keypair = Keypair.generate();
const keypair2 = Keypair.generate();
const messageHash = Buffer.from(sha256.arrayBuffer("Test message"));
const domain = Buffer.from("01", 'hex');
const signature = keypair.privateKey.sign(
G2point.hashToG2(messageHash, domain)
);
const result = bls.verify(
keypair2.publicKey.toBytesCompressed(),
messageHash,
signature.toBytesCompressed(),
domain
);
expect(result).to.be.false;
});
});
describe('verify multiple', function() {
it('should verify aggregated signatures', function () {
this.timeout(5000)
const domain = Buffer.alloc(8, 0);
const keypair1 = Keypair.generate();
const keypair2 = Keypair.generate();
const keypair3 = Keypair.generate();
const keypair4 = Keypair.generate();
const message1 = Buffer.from("Test1", 'utf-8');
const message2 = Buffer.from("Test2", 'utf-8');
const signature1 = keypair1.privateKey.signMessage(message1, domain);
const signature2 = keypair2.privateKey.signMessage(message1, domain);
const signature3 = keypair3.privateKey.signMessage(message2, domain);
const signature4 = keypair4.privateKey.signMessage(message2, domain);
const aggregatePubKey12 = bls.aggregatePubkeys([
keypair1.publicKey.toBytesCompressed(),
keypair2.publicKey.toBytesCompressed(),
]);
const aggregatePubKey34 = bls.aggregatePubkeys([
keypair3.publicKey.toBytesCompressed(),
keypair4.publicKey.toBytesCompressed(),
]);
const aggregateSignature = bls.aggregateSignatures([
signature1.toBytesCompressed(),
signature2.toBytesCompressed(),
signature3.toBytesCompressed(),
signature4.toBytesCompressed(),
]);
const result = bls.verifyMultiple(
[aggregatePubKey12, aggregatePubKey34],
[message1, message2],
aggregateSignature,
domain
);
expect(result).to.be.true;
});
it('should fail to verify aggregated signatures - swapped messages', function () {
this.timeout(5000)
const domain = Buffer.alloc(8, 0);
const keypair1 = Keypair.generate();
const keypair2 = Keypair.generate();
const keypair3 = Keypair.generate();
const keypair4 = Keypair.generate();
const message1 = Buffer.from("Test1", 'utf-8');
const message2 = Buffer.from("Test2", 'utf-8');
const signature1 = keypair1.privateKey.signMessage(message1, domain);
const signature2 = keypair2.privateKey.signMessage(message1, domain);
const signature3 = keypair3.privateKey.signMessage(message2, domain);
const signature4 = keypair4.privateKey.signMessage(message2, domain);
const aggregatePubKey12 = bls.aggregatePubkeys([
keypair1.publicKey.toBytesCompressed(),
keypair2.publicKey.toBytesCompressed(),
]);
const aggregatePubKey34 = bls.aggregatePubkeys([
keypair3.publicKey.toBytesCompressed(),
keypair4.publicKey.toBytesCompressed(),
]);
const aggregateSignature = bls.aggregateSignatures([
signature1.toBytesCompressed(),
signature2.toBytesCompressed(),
signature3.toBytesCompressed(),
signature4.toBytesCompressed(),
]);
const result = bls.verifyMultiple(
[aggregatePubKey12, aggregatePubKey34],
[message2, message1],
aggregateSignature,
domain
);
expect(result).to.be.false;
});
it('should fail to verify aggregated signatures - different pubkeys and messsages', () => {
const domain = Buffer.alloc(8, 0);
const keypair1 = Keypair.generate();
const keypair2 = Keypair.generate();
const keypair3 = Keypair.generate();
const keypair4 = Keypair.generate();
const message1 = Buffer.from("Test1", 'utf-8');
const message2 = Buffer.from("Test2", 'utf-8');
const signature1 = keypair1.privateKey.signMessage(message1, domain);
const signature2 = keypair2.privateKey.signMessage(message1, domain);
const signature3 = keypair3.privateKey.signMessage(message2, domain);
const signature4 = keypair4.privateKey.signMessage(message2, domain);
const aggregatePubKey12 = bls.aggregatePubkeys([
keypair1.publicKey.toBytesCompressed(),
keypair2.publicKey.toBytesCompressed(),
]);
const aggregateSignature = bls.aggregateSignatures([
signature1.toBytesCompressed(),
signature2.toBytesCompressed(),
signature3.toBytesCompressed(),
signature4.toBytesCompressed(),
]);
const result = bls.verifyMultiple(
[aggregatePubKey12],
[message2, message1],
aggregateSignature,
domain
);
expect(result).to.be.false;
});
it('should fail to verify aggregated signatures - different domain', () => {
const domain = Buffer.alloc(8, 0);
const domain2 = Buffer.alloc(8, 1);
const keypair1 = Keypair.generate();
const keypair2 = Keypair.generate();
const keypair3 = Keypair.generate();
const keypair4 = Keypair.generate();
const message1 = Buffer.from("Test1", 'utf-8');
const message2 = Buffer.from("Test2", 'utf-8');
const signature1 = keypair1.privateKey.signMessage(message1, domain);
const signature2 = keypair2.privateKey.signMessage(message1, domain);
const signature3 = keypair3.privateKey.signMessage(message2, domain2);
const signature4 = keypair4.privateKey.signMessage(message2, domain2);
const aggregatePubKey12 = bls.aggregatePubkeys([
keypair1.publicKey.toBytesCompressed(),
keypair2.publicKey.toBytesCompressed(),
]);
const aggregateSignature = bls.aggregateSignatures([
signature1.toBytesCompressed(),
signature2.toBytesCompressed(),
signature3.toBytesCompressed(),
signature4.toBytesCompressed(),
]);
const result = bls.verifyMultiple(
[aggregatePubKey12],
[message2, message1],
aggregateSignature,
domain
);
expect(result).to.be.false;
});
it('should fail to verify aggregated signatures - no public keys', () => {
const domain = Buffer.alloc(8, 0);
const signature = Buffer.alloc(96);
const message1 = Buffer.from("Test1", 'utf-8');
const message2 = Buffer.from("Test2", 'utf-8');
const result = bls.verifyMultiple(
[],
[message2, message1],
signature,
domain
);
expect(result).to.be.false;
});
});
});

View File

@ -0,0 +1,25 @@
import {PrivateKey} from "../../src/privateKey";
import {PublicKey} from "../../src/publicKey";
import {Keypair} from "../../src/keypair";
import {expect} from "chai";
describe('keypair', function() {
it('should create from private and public key', () => {
const secret = PrivateKey.random();
const secret2 = PrivateKey.random();
const publicKey = PublicKey.fromBytes(PublicKey.fromPrivateKey(secret2).toBytesCompressed());
const keypair = new Keypair(secret, publicKey);
expect(keypair.publicKey).to.be.equal(publicKey);
expect(keypair.privateKey).to.be.equal(secret);
expect(keypair.privateKey).to.not.be.equal(secret2);
});
it('should create from private', () => {
const secret = PrivateKey.random();
const publicKey = PublicKey.fromPrivateKey(secret);
const keypair = new Keypair(secret);
expect(keypair.publicKey.toBytesCompressed().toString('hex'))
.to.be.equal(publicKey.toBytesCompressed().toString('hex'));
})
});

View File

@ -0,0 +1,22 @@
import {PrivateKey} from "../../src/privateKey";
import {expect} from "chai";
import {SECRET_KEY_LENGTH} from "../../src/constants";
describe('privateKey', function() {
it('should generate random private key', function () {
const privateKey1 = PrivateKey.random();
const privateKey2 = PrivateKey.random();
expect(privateKey1).to.not.be.equal(privateKey2);
});
it('should export private key to hex string', function () {
const privateKey = '0x9a88071ff0634f6515c7699c97d069dc4b2fa28455f6b457e92d1c1302f0c6bb';
expect(PrivateKey.fromHexString(privateKey).toHexString()).to.be.equal(privateKey);
});
it('should export private key to bytes', function () {
expect(PrivateKey.random().toBytes().length).to.be.equal(SECRET_KEY_LENGTH);
});
});

64
tsconfig.json Normal file
View File

@ -0,0 +1,64 @@
{
"compilerOptions": {
/* Basic Options */
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
//"lib": ["esnext"], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
"declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"baseUrl": "bls-js/", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": ["./node_modules/@types"],
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
},
"include": [
"src/**/*"
],
"exclude":[
"node_modules"
]
}