diff --git a/.eslintrc.json b/.eslintrc.json index 880ce38..67a92be 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -12,7 +12,7 @@ "plugins": ["@typescript-eslint", "unused-imports", "simple-import-sort"], "rules": { "unused-imports/no-unused-imports": "error", - "prefer-const": "error", + "prefer-const": "off", "simple-import-sort/imports": "error", "simple-import-sort/exports": "error", "no-var": "error" diff --git a/Dockerfile b/Dockerfile index e51b122..f8d23f1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ -FROM node:16.20.2 +FROM node:20.6.1 WORKDIR /app COPY . . -RUN yarn install +RUN yarn install --non-interactive --frozen-lockfile EXPOSE 4001 EXPOSE 4002 EXPOSE 4003 diff --git a/README.md b/README.md index 73a4dd3..6f50ec2 100644 --- a/README.md +++ b/README.md @@ -247,6 +247,7 @@ For the nodes to function correctly, you need to set up three separate databases curl --location 'http://localhost:4001/apis/v1/messaging/settings/eip155:11155111:0xD8634C39BBFd4033c0d3289C4515275102423681/ETH_TEST_SEPOLIA' ``` + ## Contributing We welcome contributions from the community! To contribute, please follow these steps: @@ -262,3 +263,146 @@ Please ensure your code adheres to our coding standards and includes appropriate ## License This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. + + +## Docker (local testing) + +Assumptions: +- default dir is /Users/w/chain where all push git repos are located (you can use any) + - /Users/w/chain/push-node-smart-contracts + - /Users/w/chain/push-vnode + - /Users/w/chain/push-snode + - /Users/w/chain/push-anode +- docker and docker-compose are installed +- docker desktop case: Allow full docker image access to /Users/w/chain in Preferences->Resources->File sharing +- recommended docker version could look like this (no specific version is needed; ) +- naming conventions: + - docker images: hardhat-main, validator-main, ... + - docker containers: vnode1, vnode2, ... + - docker dns: vnode1.local, redis.local, hardhat.local, .... +```shell +docker --version +Docker version 20.10.21, build baeda1f + +docker-compose --version +Docker Compose version v2.13.0 +``` + +Setup docker images for smart-contracts & vnodes +```bash +## create docker network +docker network create push-shared-network + +## prepare image for hardhat +cd /Users/w/chain/push-node-smart-contracts +docker build . -t hardhat-main + +## prepare image for V +cd /Users/w/chain/push-vnode +docker build . -t vnode-main +``` + +Run (2 shell tabs recommended) + +```bash +## run mysql + redis + phpmyadmin (shell1) +## add up -d for background +export DB_PASS=s1mpl3 +export DB_NAME=vnode1 +export DB_USER=2roor +cd /Users/w/chain/push-vnode +docker-compose up + +## run hardhat + vnode1 + vnode2 + vnode3 (shell2) +## add up -d for background +export DB_PASS=s1mpl3 +export DB_USER=2roor +cd /Users/w/push-vnode +docker-compose -f net.yml up +``` + +Check that all docker DNS is online (OPTIONAL) +```bash +docker exec redis-main bash -c " getent hosts redis.local " +docker exec redis-main bash -c " getent hosts mysql.local " +docker exec redis-main bash -c " getent hosts phpmyadmin.local " +docker exec redis-main bash -c " getent hosts hardhat.local " +docker exec redis-main bash -c " getent hosts vnode1.local " +docker exec redis-main bash -c " getent hosts vnode2.local " +docker exec redis-main bash -c " getent hosts vnode3.local " +``` + +Test +```shell +## vnode1, vnode2, vnode3 are online and visible from the host machine +curl --location 'http://localhost:4001/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_listening", + "params": [], + "id": 1 +}' +echo ------------ +curl --location 'http://localhost:4002/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_listening", + "params": [], + "id": 2 +}' +echo ------------ +curl --location 'http://localhost:4003/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_listening", + "params": [], + "id": 3 +}' +echo ------------ +``` +Smoke-test validator api +```shell +### get api token +curl --location 'http://localhost:4001/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_getApiToken", + "params": [], + "id": 1 +}' +echo ------------ +### send a test transaction (DUMMY DATA) +curl --location 'http://localhost:4001/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_sendTransaction", + "params": ["1208494e49545f4449441a0d6569703135353a313a30784141220d6569703135353a313a30784242220d6569703135353a313a307843432a1a0a043078414112043078424218012204307843432a04307844443210d8555d2a5c474fa0a5f588563d50b2873ab40b7b226e6f646573223a5b7b226e6f64654964223a22307838653132644531324333356541426633356235366230344535334334453436386534363732374538222c2274734d696c6c6973223a313732343637333234303033302c2272616e646f6d486578223a2266376266376266303366656130613732353135363965303564653635363832646235353935353765222c2270696e67526573756c7473223a5b7b226e6f64654964223a22307866444145616637616643466262346534643136444336366244323033396664363030344346636538222c2274734d696c6c6973223a313732343637333234303032302c22737461747573223a317d2c7b226e6f64654964223a22307839384639443931304165663942334239413435313337616631434137363735654439306135333535222c2274734d696c6c6973223a313732343637333234303031392c22737461747573223a317d5d2c227369676e6174757265223a22307862333333636331623731633434633430386439366237626662363338353439336366313637636232626432353532376437383664333866376239383065616433303132663736636137386533616231613337653661316363666432306236393364656664303039633837313163666338396630353262333933363866316365383162227d2c7b226e6f64654964223a22307866444145616637616643466262346534643136444336366244323033396664363030344346636538222c2274734d696c6c6973223a313732343637333234303032352c2272616e646f6d486578223a2239323136383734646630653134383539376639643434643064666661656465393538343464643137222c2270696e67526573756c7473223a5b7b226e6f64654964223a22307838653132644531324333356541426633356235366230344535334334453436386534363732374538222c2274734d696c6c6973223a313732343637333232343635302c22737461747573223a317d2c7b226e6f64654964223a22307839384639443931304165663942334239413435313337616631434137363735654439306135333535222c2274734d696c6c6973223a313732343637333232343635352c22737461747573223a317d5d2c227369676e6174757265223a22307837626663343432343464633431376132383463313038353064613335313663353035613036326662323262623538353438383661613239393739653032663139376565633263393664356439343864306131643635306563323334646130356330663933626535653230393136316563326363626331653963396537343238623162227d2c7b226e6f64654964223a22307839384639443931304165663942334239413435313337616631434137363735654439306135333535222c2274734d696c6c6973223a313732343637333234303032342c2272616e646f6d486578223a2230643961313661383939636164306566633838336633343564306638306237666131356136353666222c2270696e67526573756c7473223a5b7b226e6f64654964223a22307838653132644531324333356541426633356235366230344535334334453436386534363732374538222c2274734d696c6c6973223a313732343637333232363934332c22737461747573223a317d2c7b226e6f64654964223a22307866444145616637616643466262346534643136444336366244323033396664363030344346636538222c2274734d696c6c6973223a313732343637333232363934372c22737461747573223a317d5d2c227369676e6174757265223a22307862613632393639366565653830343864313639303730336166656361663866626333663834333139643439316164623363663364663333313131396530303239303531373732303237393137313335333332653430666233323639333963626137663634373636663262363933343065366534663066623336333639386663393162227d5d7d4201ee4a0131"], + "id": 1 +}' +echo ------------ + +### read transaction queue size +curl --location 'http://localhost:4001/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_readBlockQueueSize", + "params": [], + "id": 1 +}' + +### read transaction queue +curl --location 'http://localhost:4001/api/v1/rpc/' \ +--header 'Content-Type: application/json' \ +--data '{ + "jsonrpc": "2.0", + "method": "push_readBlockQueue", + "params": ["0"], + "id": 1 +}' +``` \ No newline at end of file diff --git a/docker-compose-v01.yml b/docker-compose-v01.yml deleted file mode 100644 index 8a2886f..0000000 --- a/docker-compose-v01.yml +++ /dev/null @@ -1,72 +0,0 @@ -version: '3' -services: - redis-v01: - image: 'redis:latest' - container_name: redis-v01 - restart: always - networks: - - push-shared-network - volumes: - - ./external/redis-v01:/data - - mysql-v01: - image: mysql:5.7.13 - container_name: mysql-v01 - platform: linux/amd64 - command: --default-authentication-plugin=mysql_native_password - restart: always - environment: - MYSQL_ROOT_PASSWORD: 'pass' - MYSQL_DATABASE: vnode1 - MYSQL_USER: 2roor - MYSQL_PASSWORD: s1mpl3 - # Change this to your local path - volumes: - - ./external/mysql-v01:/var/lib/mysql/ - networks: - - push-shared-network - - phpmyadmin-v01: - image: phpmyadmin/phpmyadmin - container_name: phpmyadmin-v01 - depends_on: - - mysql-v01 - environment: - PMA_HOST: mysql-v01 - PMA_PORT: 3306 - PMA_ARBITRARY: 1 - UPLOAD_LIMIT: 300M - ports: - - 8183:80 - restart: always - networks: - - push-shared-network - - ipfs-v01: - container_name: ipfs-v01 - image: ipfs/go-ipfs:latest - volumes: - - ./external/ipfs-v01:/data - networks: - - push-shared-network - - app-v01: - build: . - container_name: app-v01 - ports: - - '4001:4001' - depends_on: - - mysql-v01 - - redis-v01 - - ipfs-v01 - environment: - - CONFIG_DIR=/app/config - - LOG_DIR=/app/config/log - volumes: - - /root/config/v01:/app/config - networks: - - push-shared-network - -networks: - push-shared-network: - external: true \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 3c42f65..afbdb65 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,20 +1,22 @@ version: '3' services: + redis: image: 'redis:latest' - + container_name: redis-main restart: always networks: - - main + push-shared-network: + aliases: + - redis.local ports: - 6379:6379 volumes: - ./external/redis:/data mysql: - image: mysql:5.7.32 - container_name: db - platform: linux/amd64 + image: mysql:8.0 + container_name: mysql-main command: --default-authentication-plugin=mysql_native_password restart: always environment: @@ -24,12 +26,17 @@ services: MYSQL_PASSWORD: ${DB_PASS} ports: - 3306:3306 - # Change this to your local path volumes: - ./external/mysql:/var/lib/mysql/ + - ./mysql-init:/docker-entrypoint-initdb.d/ + networks: + push-shared-network: + aliases: + - mysql.local phpmyadmin: image: phpmyadmin/phpmyadmin + container_name: phpmyadmin-main depends_on: - mysql environment: @@ -40,17 +47,12 @@ services: restart: always ports: - 8183:80 + networks: + push-shared-network: + aliases: + - phpmyadmin.local - ipfs: - image: ipfs/go-ipfs:latest - volumes: - - ./external/ipfs:/data - ports: - - '4001:4001' - - '127.0.0.1:8080:8080' - - '127.0.0.1:8081:8081' - - '127.0.0.1:5001:5001' networks: - main: - driver: bridge + push-shared-network: + external: true \ No newline at end of file diff --git a/mysql-init/create_databases.sql b/mysql-init/create_databases.sql new file mode 100644 index 0000000..df7e43b --- /dev/null +++ b/mysql-init/create_databases.sql @@ -0,0 +1,27 @@ + +CREATE DATABASE vnode2 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode2.* TO '2roor'@'%'; + +CREATE DATABASE vnode3 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode3.* TO '2roor'@'%'; + +CREATE DATABASE vnode4 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode4.* TO '2roor'@'%'; + +CREATE DATABASE vnode5 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode5.* TO '2roor'@'%'; + +CREATE DATABASE vnode6 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode6.* TO '2roor'@'%'; + +CREATE DATABASE vnode7 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode7.* TO '2roor'@'%'; + +CREATE DATABASE vnode8 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode8.* TO '2roor'@'%'; + +CREATE DATABASE vnode9 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode9.* TO '2roor'@'%'; + +CREATE DATABASE vnode10 CHARACTER SET utf8 COLLATE utf8_general_ci; +GRANT ALL PRIVILEGES ON vnode10.* TO '2roor'@'%'; diff --git a/net.yml b/net.yml new file mode 100644 index 0000000..d6942a1 --- /dev/null +++ b/net.yml @@ -0,0 +1,91 @@ +version: '3' +services: + + hardhat: + image: hardhat-main + container_name: hardhat + networks: + push-shared-network: + aliases: + - hardhat.local + ports: + - "8545:8545" + restart: always + + vnode1: + image: vnode-main + container_name: vnode1 + networks: + push-shared-network: + aliases: + - vnode1.local + depends_on: + - hardhat + environment: + DB_NAME: vnode1 + PORT: 4001 + CONFIG_DIR: /config + LOG_DIR: /log + DB_HOST: mysql.local + REDIS_URL: redis://redis.local:6379 + VALIDATOR_RPC_ENDPOINT: http://hardhat.local:8545 + VALIDATOR_RPC_NETWORK: 1337 + ports: + - "4001:4001" + volumes: + - ./push-docker/v01:/config + - ./push-docker/v01/log:/log + + + vnode2: + image: vnode-main + container_name: vnode2 + networks: + push-shared-network: + aliases: + - vnode2.local + depends_on: + - hardhat + environment: + DB_NAME: vnode2 + PORT: 4001 + CONFIG_DIR: /config + LOG_DIR: /log + DB_HOST: mysql.local + REDIS_URL: redis://redis.local:6379 + VALIDATOR_RPC_ENDPOINT: http://hardhat.local:8545 + VALIDATOR_RPC_NETWORK: 1337 + ports: + - "4002:4001" + volumes: + - ./push-docker/v02:/config + - ./push-docker/v02/log:/log + + + vnode3: + image: vnode-main + container_name: vnode3 + networks: + push-shared-network: + aliases: + - vnode3.local + depends_on: + - hardhat + environment: + DB_NAME: vnode3 + PORT: 4001 + CONFIG_DIR: /config + LOG_DIR: /log + DB_HOST: mysql.local + REDIS_URL: redis://redis.local:6379 + VALIDATOR_RPC_ENDPOINT: http://hardhat.local:8545 + VALIDATOR_RPC_NETWORK: 1337 + ports: + - "4003:4001" + volumes: + - ./push-docker/v03:/config + - ./push-docker/v03/log:/log + +networks: + push-shared-network: + external: true \ No newline at end of file diff --git a/package.json b/package.json index a36f0d3..a1e14c2 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,8 @@ "lint:eslint": "eslint --ignore-path .gitignore --ext .ts", "lint:js": "npm run lint:eslint src/", "lint:fix": "npm run lint:js -- --fix", - "prepare": "husky install" + "prepare": "husky install", + "build:proto": "scripts/protoc-generate.sh" }, "repository": { "type": "git", @@ -79,9 +80,10 @@ "ethers": "^5.7.2", "event-dispatch": "^0.4.1", "eventemitter3": "^3.1.0", - "express": "^4.16.2", + "express": "^4.19.2", "express-basic-auth": "^1.2.0", "express-joi-validation": "^5.0.1", + "express-json-rpc-router": "^1.4.0", "express-jwt": "^5.3.1", "firebase-admin": "8.12.1", "graphql": "^16.5.0", @@ -111,13 +113,14 @@ "redis": "^4.2.0", "reflect-metadata": "^0.1.12", "request-promise": "^4.2.6", - "secp256k1-v4": "https://github.com/HarshRajat/secp256k1-node", "semver": "^7.3.7", "socket.io": "^4.4.1", "swagger-jsdoc": "^6.2.8", "swagger-ui-express": "^4.6.3", "ts-luxon": "^4.3.2", "ts-node-dev": "1.0.0-pre.44", + "ts-proto": "^2.0.2", + "ts-protoc-gen": "^0.15.0", "typedi": "^0.8.0", "typescript": "^4.5.4", "typings": "^2.1.1", diff --git a/scripts/protoc-generate.sh b/scripts/protoc-generate.sh new file mode 100755 index 0000000..3ab2e30 --- /dev/null +++ b/scripts/protoc-generate.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +# Root directory of app +ROOT_DIR=$(git rev-parse --show-toplevel) + +# Path to Protoc Plugin +PROTOC_GEN_TS_PATH="${ROOT_DIR}/node_modules/.bin/protoc-gen-ts" + +# Directory holding all .proto files +SRC_DIR="${ROOT_DIR}/src/proto" + +# Directory to write generated code (.d.ts files) +OUT_DIR="${ROOT_DIR}/src/generated" + +# Clean all existing generated files +rm -r "${OUT_DIR}" +mkdir "${OUT_DIR}" + +# Generate all messages +protoc \ + --plugin="protoc-gen-ts=${PROTOC_GEN_TS_PATH}" \ + --ts_opt=esModuleInterop=true \ + --js_out="import_style=commonjs,binary:${OUT_DIR}" \ + --ts_out="${OUT_DIR}" \ + --proto_path="${SRC_DIR}" \ + $(find "${SRC_DIR}" -iname "*.proto") diff --git a/src/api/routes/validatorRoutes.ts b/src/api/routes/validatorRoutes.ts index b6d1784..f6a927c 100644 --- a/src/api/routes/validatorRoutes.ts +++ b/src/api/routes/validatorRoutes.ts @@ -13,7 +13,8 @@ import { ValidatorRandom } from '../../services/messaging/validatorRandom' import { ValidatorPing } from '../../services/messaging/validatorPing' import { MessageBlock, PayloadItem, SenderType } from '../../services/messaging-common/messageBlock' import { WinstonUtil } from '../../utilz/winstonUtil' - +import jsonRouter from "express-json-rpc-router"; +import {ValidatorRpc} from "./validatorRpc"; // /apis/v1/messaging const route = Router() @@ -28,9 +29,16 @@ function logResponseFinished(log: Logger, status: number, responseObj: any) { log.debug(`=== Reply ${status} with body: %o`, responseObj) } +function initRpc(app: Router) { + const validatorRpc = Container.get(ValidatorRpc); + app.use(`/v1/rpc`, jsonRouter({ methods: validatorRpc })); +} + export default (app: Router) => { + initRpc(app); + // Load the rest - app.use(`/${config.api.version}/messaging`, route) + app.use(`/v1/messaging`, route) app.use(errors()) // add external payload diff --git a/src/api/routes/validatorRpc.ts b/src/api/routes/validatorRpc.ts new file mode 100644 index 0000000..80e20e5 --- /dev/null +++ b/src/api/routes/validatorRpc.ts @@ -0,0 +1,81 @@ +import {Container, Inject, Service} from "typedi"; +import {Logger} from "winston"; +import {WinstonUtil} from "../../utilz/winstonUtil"; +import {ValidatorNode} from "../../services/messaging/validatorNode"; +import {ValidatorRandom} from "../../services/messaging/validatorRandom"; +import {BitUtil} from "../../utilz/bitUtil"; +import {NumUtil} from "../../utilz/numUtil"; +import {QueueManager} from "../../services/messaging/QueueManager"; + +type RpcResult = { + result: string; + error: string; +} + +@Service() +export class ValidatorRpc { + public log: Logger = WinstonUtil.newLog(ValidatorRpc); + + @Inject() + private validatorNode: ValidatorNode; + + @Inject() + private validatorRandom: ValidatorRandom; + + @Inject() + private queueManager: QueueManager; + + public push_getApiToken([]) { + const apiToken = this.validatorRandom.createValidatorToken(); + return { + "apiToken" : apiToken.validatorToken, + "apiUrl": apiToken.validatorUrl + } ; + } + + + + public async push_sendTransaction([ transactionDataBase16 ]) { + let txRaw = BitUtil.base16ToBytes(transactionDataBase16); + let txHash = await this.validatorNode.sendTransactionBlocking(txRaw); + return txHash; + } + + public async push_readBlockQueue([ offsetStr ]) { + const firstOffset = NumUtil.parseInt(offsetStr, 0); + let result = await Container.get(QueueManager).readItems("mblock", firstOffset); + return result; + } + + + public async push_readBlockQueueSize([]) { + let result = await this.queueManager.getQueueLastOffsetNum("mblock"); + return { + "lastOffset" : NumUtil.toString(result) + } + } + + public push_syncing([]) { + // todo return queue state + return { + "lastPublishedOffset": "1001" + } + } + + // todo NETWORK CALLS TO STORAGE NODES + // todo push_getTransactions + // todo push_getBlockTransactionCountByHash (1) + // todo push_getBlockByHash + // todo push_getTransactionByHash + // todo push_getTransactionByBlockHashAndIndex + // todo push_getTransactionCount + + + public push_networkId([]) { + return "1"; + } + + public push_listening([]) { + return "true"; + } +} \ No newline at end of file diff --git a/src/appInit.ts b/src/appInit.ts index 070425c..ef3e905 100644 --- a/src/appInit.ts +++ b/src/appInit.ts @@ -1,5 +1,5 @@ import 'reflect-metadata' // We need this in order to use @Decorators -import 'newrelic' +// import 'newrelic' import chalk from 'chalk' import express from 'express' diff --git a/src/config/config-general.ts b/src/config/config-general.ts index 1076228..521a69c 100644 --- a/src/config/config-general.ts +++ b/src/config/config-general.ts @@ -94,7 +94,7 @@ export default { * API configs */ api: { - prefix: '/apis', + prefix: '/api', version: 'v1' }, diff --git a/src/helpers/cryptoHelper.ts b/src/helpers/cryptoHelper.ts index 155dbf0..2ccff6a 100644 --- a/src/helpers/cryptoHelper.ts +++ b/src/helpers/cryptoHelper.ts @@ -2,7 +2,7 @@ const CryptoJS = require('crypto-js') import { decrypt, encrypt } from 'eccrypto' import EthCrypto from 'eth-crypto' -import { publicKeyConvert } from 'secp256k1-v4' + const publicKeyToAddress = require('ethereum-public-key-to-address') @@ -54,50 +54,7 @@ module.exports = { }, // Encryption with public key encryptWithPublicKey: async function (message, publicKey) { - // Convert compressed public key, starts with 03 or 04 - const pubKeyUint8Array = Uint8Array.from(new Buffer(publicKey, 'hex')) - //console.log("[ENCRYPTION] Public Key Uint8Array: " + pubKeyUint8Array); - - const convertedKeyAsUint8Array = publicKeyConvert(pubKeyUint8Array, false) - //console.log("[ENCRYPTION] Public Key Converted: " + convertedKeyAsUint8Array); - - const convertedPublicKeyHex = new Buffer(convertedKeyAsUint8Array) - //console.log("[ENCRYPTION] Converted Public Key Buffer: " + convertedPublicKeyHex); - - const pubKey = new Buffer(convertedPublicKeyHex, 'hex') - //console.log("[ENCRYPTION] pubkey getting sentout for encrypt: " + pubKey); - - return encrypt(pubKey, Buffer(message)).then((encryptedBuffers) => { - const cipher = { - iv: encryptedBuffers.iv.toString('hex'), - ephemPublicKey: encryptedBuffers.ephemPublicKey.toString('hex'), - ciphertext: encryptedBuffers.ciphertext.toString('hex'), - mac: encryptedBuffers.mac.toString('hex') - } - // use compressed key because it's smaller - // const compressedKey = new Buffer.from(publicKeyConvert(Web3Helper.getUint8ArrayFromHexStr(cipher.ephemPublicKey), true)).toString('hex') - const input = Uint8Array.from(new Buffer(cipher.ephemPublicKey, 'hex')) - const keyConvert = publicKeyConvert(input, true) - // console.log("[ENCRYPTION] Coverted key: " + keyConvert); - - const keyConvertBuffer = new Buffer(keyConvert) - // console.log("[ENCRYPTION] Coverted key in buffer : " + keyConvertBuffer); - // console.log(keyConvertBuffer); - - //console.log(keyConvert); - const compressedKey = keyConvertBuffer.toString('hex') - // console.log("[ENCRYPTION] Compressed key in buffer : "); - // console.log(compressedKey); - - const ret = Buffer.concat([ - new Buffer(cipher.iv, 'hex'), // 16bit - new Buffer(compressedKey, 'hex'), // 33bit - new Buffer(cipher.mac, 'hex'), // 32bit - new Buffer(cipher.ciphertext, 'hex') // var bit - ]).toString('hex') - - return ret - }) + }, // Decryption with public key decryptWithPrivateKey: async function (message, privateKey) { diff --git a/src/loaders/index.ts b/src/loaders/index.ts index dc13042..48b7cb8 100644 --- a/src/loaders/index.ts +++ b/src/loaders/index.ts @@ -60,9 +60,9 @@ export default async ({ await dbListenerLoader({ pool, logger }) logger.info('DB Listener loaded!') - logger.info('Loading jobs') - await jobsLoader({ logger }) - logger.info('Jobs loaded!') + // logger.info('Loading jobs') + // await jobsLoader({ logger }) + // logger.info('Jobs loaded!') if (config.pushNodesNet !== 'PROD') { logger.info('Loading Subgraph jobs') diff --git a/src/loaders/initializer.ts b/src/loaders/initializer.ts index 792a1dc..cc59f10 100644 --- a/src/loaders/initializer.ts +++ b/src/loaders/initializer.ts @@ -1,21 +1,29 @@ import { Container } from 'typedi' -import * as dbGenerator from '../helpers/dbGeneratorHelper' +// import * as dbGenerator from '../helpers/dbGeneratorHelper' import { startMigration } from '../helpers/migrationsHelper' import { startManualMigration } from '../migrations/manual' import HistoryFetcherService from '../services/historyFetcherService' +// import {createNewValidatorTables} from "../migrations/versioned/migrationV1"; +import {MySqlUtil} from "../utilz/mySqlUtil"; +import * as dbHelper from "../helpers/dbHelper"; export default async ({ logger, testMode }) => { - logger.info('Running DB Checks') - await dbGenerator.generateDBStructure(logger) - logger.info('DB Checks completed!') + logger.info('Running DB Checks'); + // await dbGenerator.generateTableProtocolMeta(logger); + // VALIDATOR TABLES ONLY + await createNewValidatorTables(); - logger.info('Running Migration') - await startMigration() - logger.info('Migration completed!') + // COMMENTED OUT - I don't need tables + // await dbGenerator.generateDBStructure(logger) + // logger.info('DB Checks completed!') - logger.info('Running Manual Migration') - startManualMigration() + // logger.info('Running Migration') + // await startMigration() + // logger.info('Migration completed!') + + // logger.info('Running Manual Migration') + // startManualMigration() logger.info('Syncing Protocol History') @@ -28,3 +36,47 @@ export default async ({ logger, testMode }) => { logger.transports.forEach((t) => (t.silent = false)) logger.info('Protocol History Synced!') } + + +// 1 CREATE TABLES FROM SCRATCH +export async function createNewValidatorTables() { + MySqlUtil.init(dbHelper.pool) + + await MySqlUtil.update(` + CREATE TABLE IF NOT EXISTS dset_client + ( + id INT NOT NULL AUTO_INCREMENT, + queue_name varchar(32) NOT NULL COMMENT 'target node queue name', + target_node_id varchar(128) NOT NULL COMMENT 'target node eth address', + target_node_url varchar(128) NOT NULL COMMENT 'target node url, filled from the contract', + target_offset bigint(20) NOT NULL DEFAULT 0 COMMENT 'initial offset to fetch target queue', + state tinyint(1) NOT NULL DEFAULT 1 COMMENT '1 = enabled, 0 = disabled', + PRIMARY KEY (id), + UNIQUE KEY uniq_dset_name_and_target (queue_name, target_node_id) + ) ENGINE = InnoDB + DEFAULT CHARSET = utf8; + `) + + await MySqlUtil.update(` + CREATE TABLE IF NOT EXISTS dset_queue_mblock + ( + id BIGINT NOT NULL AUTO_INCREMENT, + object_hash VARCHAR(255) NULL COMMENT 'optional: a uniq field to fight duplicates', + object MEDIUMTEXT NOT NULL, + PRIMARY KEY (id), + UNIQUE KEY uniq_mblock_object_hash (object_hash) + ) ENGINE = InnoDB + DEFAULT CHARSET = utf8; + `) + + await MySqlUtil.update(` + CREATE TABLE IF NOT EXISTS dset_queue_subscribers + ( + id BIGINT NOT NULL AUTO_INCREMENT, + object_hash VARCHAR(255) NULL COMMENT 'optional: a uniq field to fight duplicates', + object MEDIUMTEXT NOT NULL, + PRIMARY KEY (id) + ) ENGINE = InnoDB + DEFAULT CHARSET = utf8; + `) +} diff --git a/src/loaders/subGraphJobs.ts b/src/loaders/subGraphJobs.ts index 74f29df..78adad5 100644 --- a/src/loaders/subGraphJobs.ts +++ b/src/loaders/subGraphJobs.ts @@ -21,6 +21,7 @@ const gr = require('graphql-request') const { request, gql } = gr import epnsAPIHelper from '../helpers/epnsAPIHelper' import PayloadsService from '../services/payloadsService' +import {EnvLoader} from "../utilz/envLoader"; //helper function to format the scheduler as per node-schedule export function secondsToHms(pollTime) { @@ -189,6 +190,9 @@ export async function scheduleTask( } //Main function export default async function main({ logger }) { + if (EnvLoader.getPropertyAsBool('VALIDATOR_DISABLE_ALL_SERVICES')) { + return; + } logger.info('Initiating subgraph cron tasks') const channel = Container.get(Channel) const jobs = await channel.getAllSubGraphDetails() diff --git a/src/proto/push/block.proto b/src/proto/push/block.proto new file mode 100644 index 0000000..07c6df3 --- /dev/null +++ b/src/proto/push/block.proto @@ -0,0 +1,162 @@ +syntax = "proto3"; +package push; + +// BLOCK --------------------------------------------------------------------------------------------------------------- + +enum Role { + ROLE_UNSPECIFIED = 0; + VALIDATOR = 1; + ATTESTER = 2; +} + +enum Vote { + VOTE_UNSPECIFIED = 0; + ACCEPTED = 1; + REJECTED = 2; +} + +message DidMapping { + map didMapping = 1; +} + + +// section added by a block producer (we call him Validator in the context of the block) +message TxValidatorData { + // any validator can vote if he supports the tx or not + Vote vote = 1; + // additional context goes below. + // if it is signed = all attestors agree with this context of tx processing + DidMapping didMapping = 2; +} + +// section added by a block attester +// offset0 = block producer vote +// offset1..N = block attestor votes +message TxAttestorData { + // any attestor can vote if he supports the tx or not + Vote vote = 1; + // any additional fields below, that attestor wants to add for the 'storage' layer + // i.e. repeated string spam +} + +// transaction with voting data +message TransactionObj { + // raw bytes: you need to decode this based on category into a Transaction + Transaction tx = 1; + // validator(block producer) processes 'data' field and fills this output + TxValidatorData validatorData = 2; + // attestors process 'data' and 'metaData' and fill this output + repeated TxAttestorData attestorData = 3; +} + +message Signer { + string node = 1; + Role role = 2; + string sig = 3; +} + +message Block { + // block timestamp in millis; + uint64 ts = 1; + bytes attestToken = 4; + repeated TransactionObj txObj = 2; + repeated Signer signers = 3; +} + + +// TRANSACTION --------------------------------------------------------------------------------------------------------- + + +message Transaction { + uint32 type = 1; // 0 for non-value, 1 for value + string category = 2; // INIT_DID, INIT_SESSION_KEY, NOTIF, EMAIL + string sender = 3; + repeated string recipients = 4; + bytes data = 5; // data is also a protobuf message, depending on tx_type + bytes salt = 6; + bytes apiToken = 7; // f(apiToken) = v1 + bytes signature = 8; + string fee = 9; // we don't use this as of now, no native 256bits int support in protobuf +} + + + +// PAYLOADS ------------------------------------------------------------------------------------------------------------ + + +// INIT_DID +message InitDid { + string did = 1; + string masterPubKey = 2; + uint32 derivedKeyIndex = 3; + string derivedPubKey = 4; + string encDerivedPrivKey = 5; +} + +enum KeyAction { + UNSPECIFIED = 0; + PUBLISH_KEY = 1; + REVOKE_KEY = 2; +} + +// INIT_SESSION_KEY + +message SessionKeyAction { + int32 keyIndex = 1; + string keyAddress = 2; + KeyAction action = 3; +} + +// NOTIFICATION + +// PlainText Notification +// represents a targeted notificaiton with up to 1000 recipients (this is defined by a top level transaction) +message Notification { + // the app which sends the notif, i.e. "ShapeShift" + string app = 1; + // notification title, i.e. "ShapeShift - 2 PUFETH received!" + string title = 2; + // notification body, i.e. ""📩 Sender: 0x4bd5…7170\n👤 Receiver: 0x121d…876e (you)\n🪙 Asset: PUFETH\n💰 Amount: 2\n"" + string body = 3; + // TBD: clear definition of this field ???????????????????????????? + string channelUrl = 4; + // on click action, i.e. "https://etherscan.io/tx/0x3c93fd0617c5f7431d2899fa8e7ccea0ec09d4210a96c68b0fddf5772833871e" + string actionUrl = 5; + // big image url + string img = 6; + // small image url + string icon = 7; +} + +// Encrypted Notificaiton +enum EncryptionType { + ENCRYPTION_UNSPECIFIED = 0; + ECC = 1; +} + +message EncryptionDetails { + string recipientDID = 1; + EncryptionType type = 2; + int32 keyIndex = 3; + bytes encryptedSecret = 4; +} + +message EncryptedNotif { + bytes encryptedNotif = 1; + EncryptionDetails sourceEnc = 2; + repeated EncryptionDetails targetEnc = 3; +} + +// EMAIL + +message Attachment { + string filename = 1; + string type = 2; + string content = 3; // base64 encoded +} + +message Email { + string subject = 1; + string body = 2; // Plain text or HTML body of the email + repeated Attachment attachments = 3; +} diff --git a/src/services/channelsCompositeClasses/channelsClass.ts b/src/services/channelsCompositeClasses/channelsClass.ts index 2bf5d36..b2becc1 100644 --- a/src/services/channelsCompositeClasses/channelsClass.ts +++ b/src/services/channelsCompositeClasses/channelsClass.ts @@ -21,6 +21,7 @@ import fs from 'fs' import * as db from '../../helpers/dbHelper' import epnsAPIHelper from '../../helpers/epnsAPIHelper' import { isValidAddress } from '../../helpers/utilsHelper' +import {EnvLoader} from "../../utilz/envLoader"; const VALID_SUBGRAPH_FIELDS = ['subgraph_attempts', 'counter'] const CHANGED_NOTIFCIATION_SETTING_DELIMITER = '+' @@ -158,6 +159,9 @@ export default class Channel { // for processing ipfshash in batches of 50 public async batchProcessChannelData() { + if (EnvLoader.getPropertyAsBool('VALIDATOR_DISABLE_ALL_SERVICES')) { + return; + } const logger = this.logger logger.debug('Trying to batch process all channels data processing, 50 requests at a time') diff --git a/src/services/messaging-common/blockUtil.ts b/src/services/messaging-common/blockUtil.ts new file mode 100644 index 0000000..c1c3b5e --- /dev/null +++ b/src/services/messaging-common/blockUtil.ts @@ -0,0 +1,24 @@ +import {Transaction} from "../../generated/push/block_pb"; +import {EnvLoader} from "../../utilz/envLoader"; +import {HashUtil} from "../../utilz/hashUtil"; +import {BitUtil} from "../../utilz/bitUtil"; + +export class BlockUtil { + public static readonly MAX_TRANSACTION_SIZE_BYTES = EnvLoader.getPropertyAsNumber('MAX_TRANSACTION_SIZE_BYTES', 1000000); + + public static parseTransaction(txRaw: Uint8Array): Transaction { + if (txRaw == null || txRaw.length > BlockUtil.MAX_TRANSACTION_SIZE_BYTES) { + throw new Error('tx size is too big'); + } + const tx = Transaction.deserializeBinary(txRaw); + return tx; + } + + public static calculateTransactionHashBase16(txRaw: Uint8Array): string { + return BitUtil.bytesToBase16(HashUtil.sha256AsBytes(txRaw)); + } + + public static calculateBlockHashBase16(blockRaw: Uint8Array): string { + return BitUtil.bytesToBase16(HashUtil.sha256AsBytes(blockRaw)); + } +} \ No newline at end of file diff --git a/src/services/messaging-dset/queueClient.ts b/src/services/messaging-dset/queueClient.ts index 62fb8dd..066fc4e 100644 --- a/src/services/messaging-dset/queueClient.ts +++ b/src/services/messaging-dset/queueClient.ts @@ -95,7 +95,7 @@ export class QueueClient { baseUri: string, firstOffset: number = 0 ): Promise<{ items: QItem[]; lastOffset: number } | null> { - const url = `${baseUri}/apis/v1/dset/queue/${queueName}?firstOffset=${firstOffset}` + const url = `${baseUri}/api/v1/dset/queue/${queueName}?firstOffset=${firstOffset}` try { const re = await axios.get(url, { timeout: 3000 diff --git a/src/services/messaging-dset/queueServer.ts b/src/services/messaging-dset/queueServer.ts index 3a5616b..ee9e538 100644 --- a/src/services/messaging-dset/queueServer.ts +++ b/src/services/messaging-dset/queueServer.ts @@ -123,12 +123,12 @@ export class QueueServer implements Consumer { } public async getLastOffset(): Promise { - const row = await MySqlUtil.queryOneRow<{ lastOffset: number }>( - `select max(id) as lastOffset + const max = await MySqlUtil.queryOneValue( + `select max(id) from dset_queue_${this.queueName} limit 1` ) - return row == null ? 0 : row.lastOffset + return max == null ? 0 : max; } public async readWithLastOffset( diff --git a/src/services/messaging/QueueManager.ts b/src/services/messaging/QueueManager.ts index 4749ecb..14473fe 100644 --- a/src/services/messaging/QueueManager.ts +++ b/src/services/messaging/QueueManager.ts @@ -1,67 +1,13 @@ -/* -This is a distributed set, which has the same contents on every node. - -Every node syncs with every other node - reads it's [queue], -downloads all items starting from latest offset, -and saves them into [queue] and [set] - -example: - -1) initial state: -node1 <---> node2 -(a,b) (c) - -2) replicates to: -node1 <---> node2 -(a,b,c) (c,a,b) - -3) node 2 adds new item e -node1 <---> node2 -(a,b,c) (c,a,b, e) - -4) node1 reads new item from the node2 queue, and appends it to local set -node1 <---> node2 -(a,b,c, e) (c,a,b, e) - -every server: adds only new items - */ - import { Inject, Service } from 'typedi' import { MySqlUtil } from '../../utilz/mySqlUtil' import { Logger } from 'winston' import ChannelsService from '../channelsService' -import schedule from 'node-schedule' import { ValidatorContractState } from '../messaging-common/validatorContractState' import { WinstonUtil } from '../../utilz/winstonUtil' import { QueueServer } from '../messaging-dset/queueServer' -import { QueueClient } from '../messaging-dset/queueClient' -import { QueueClientHelper } from '../messaging-common/queueClientHelper' - -/* -The data flow: - -comm contract (via HistoryFetcher) -rest endpoint - | - | - V -ChannelsService - 1.addExternalSubscribers ----> SubscribersService -----> QueueInitializerValidator - 1.removeExternalSubscribers ----> | 3. Append to Queue - V - 2.validate, - tryAdd to db - - - SubscribersService <------ 1. QueueClient - | | - V | - 2.validate, | - tryAdd to db V - 3. Append to Queue - - - */ + + + @Service() export class QueueManager { public log: Logger = WinstonUtil.newLog(QueueManager) @@ -81,8 +27,7 @@ export class QueueManager { private static QUEUE_REPLY_PAGE_SIZE = 10 private static CLIENT_REQUEST_PER_SCHEDULED_JOB = 10 - private subscribersQueue: QueueServer - private subscribersQueueClient: QueueClient + private mBlockQueue: QueueServer private queueMap = new Map() @@ -92,35 +37,9 @@ export class QueueManager { public async postConstruct() { this.log.debug('postConstruct()') - const qv = QueueManager - // setup queues that serve data to the outside world - this.subscribersQueue = new QueueServer( - qv.QUEUE_SUBSCRIBERS, - qv.QUEUE_REPLY_PAGE_SIZE, - this.channelService - ) - await this.startQueue(this.subscribersQueue) - - this.mBlockQueue = new QueueServer(qv.QUEUE_MBLOCK, 10, null) + this.mBlockQueue = new QueueServer(QueueManager.QUEUE_MBLOCK, 10, null) await this.startQueue(this.mBlockQueue) - - // setup client that fetches data from remote queues - this.subscribersQueueClient = new QueueClient(this.subscribersQueue, qv.QUEUE_SUBSCRIBERS) - await QueueClientHelper.initClientForEveryQueueForEveryValidator(this.contractState, [ - qv.QUEUE_SUBSCIRBERS - ]) - const qs = this - schedule.scheduleJob(this.CLIENT_READ_SCHEDULE, async function () { - const taskName = 'Client Read Scheduled' - try { - await qs.subscribersQueueClient.pollRemoteQueue(qv.CLIENT_REQUEST_PER_SCHEDULED_JOB) - qs.log.info(`Cron Task Completed -- ${taskName}`) - } catch (err) { - qs.log.error(`Cron Task Failed -- ${taskName}`) - qs.log.error(`Error Object: %o`, err) - } - }) } private async startQueue(queue: QueueServer) { @@ -137,6 +56,11 @@ export class QueueManager { return result } + public async getQueueLastOffsetNum(queueName: string): Promise { + return await this.getQueue(queueName).getLastOffset() + } + + // todo: remove public async getQueueLastOffset(queueName: string): Promise { const lastOffset = await this.getQueue(queueName).getLastOffset() return { result: lastOffset } diff --git a/src/services/messaging/transactionError.ts b/src/services/messaging/transactionError.ts new file mode 100644 index 0000000..86ed235 --- /dev/null +++ b/src/services/messaging/transactionError.ts @@ -0,0 +1,6 @@ +export class TransactionError extends Error { + constructor(message: string) { + super(message); + this.name = "ValidationError"; + } +} \ No newline at end of file diff --git a/src/services/messaging/validatorClient.ts b/src/services/messaging/validatorClient.ts index 6f6a100..9debd34 100644 --- a/src/services/messaging/validatorClient.ts +++ b/src/services/messaging/validatorClient.ts @@ -16,7 +16,7 @@ export class ValidatorClient { timeout: number = 500000 constructor(baseUri: string) { - this.baseUri = baseUri + '/apis/v1' + this.baseUri = baseUri + '/api/v1' this.log.level = 'error' } diff --git a/src/services/messaging/validatorLoader.ts b/src/services/messaging/validatorLoader.ts index 7c8c078..68d8031 100644 --- a/src/services/messaging/validatorLoader.ts +++ b/src/services/messaging/validatorLoader.ts @@ -3,15 +3,20 @@ import { QueueManager } from './QueueManager' import { MySqlUtil } from '../../utilz/mySqlUtil' import { ValidatorNode } from './validatorNode' import * as dbHelper from '../../helpers/dbHelper' +import {ValidatorRpc} from "../../api/routes/validatorRpc"; +import {Check} from "../../utilz/check"; export async function initValidator() { // Load validator - const validatorNode = Container.get(ValidatorNode) - await validatorNode.postConstruct() + const validatorNode = Container.get(ValidatorNode); + await validatorNode.postConstruct(); MySqlUtil.init(dbHelper.pool) // Load dset (requires a loaded contract) const dset = Container.get(QueueManager) - await dset.postConstruct() + await dset.postConstruct(); + + const validatorRpc = Container.get(ValidatorRpc); + Check.notNull(validatorRpc, 'ValidatorRpc is null'); } diff --git a/src/services/messaging/validatorNode.ts b/src/services/messaging/validatorNode.ts index ecdfb16..e21d252 100644 --- a/src/services/messaging/validatorNode.ts +++ b/src/services/messaging/validatorNode.ts @@ -1,15 +1,15 @@ -import { MsgConverterService } from './msgConverterService' -import { ObjectHasher } from '../../utilz/objectHasher' -import { ethers, Wallet } from 'ethers' -import { Inject, Service } from 'typedi' -import { Logger } from 'winston' -import { MsgDeliveryService } from './msgDeliveryService' -import { EthSig } from '../../utilz/ethSig' -import { ValidatorClient } from './validatorClient' -import { WaitNotify } from '../../utilz/waitNotify' -import { NodeInfo, ValidatorContractState } from '../messaging-common/validatorContractState' -import { ValidatorRandom } from './validatorRandom' -import { ValidatorPing } from './validatorPing' +import {MsgConverterService} from './msgConverterService' +import {ObjectHasher} from '../../utilz/objectHasher' +import {ethers, Wallet} from 'ethers' +import {Inject, Service} from 'typedi' +import {Logger} from 'winston' +import {MsgDeliveryService} from './msgDeliveryService' +import {EthSig} from '../../utilz/ethSig' +import {ValidatorClient} from './validatorClient' +import {WaitNotify} from '../../utilz/waitNotify' +import {NodeInfo, ValidatorContractState} from '../messaging-common/validatorContractState' +import {ValidatorRandom} from './validatorRandom' +import {ValidatorPing} from './validatorPing' import StrUtil from '../../utilz/strUtil' import { FeedItem, @@ -20,30 +20,38 @@ import { MessageBlockUtil, NetworkRole, NodeMeta, - PayloadItem, RecipientMissing, RecipientsMissing } from '../messaging-common/messageBlock' -import { WinstonUtil } from '../../utilz/winstonUtil' -import { RedisClient } from '../messaging-common/redisClient' -import { Coll } from '../../utilz/coll' +import {WinstonUtil} from '../../utilz/winstonUtil' +import {RedisClient} from '../messaging-common/redisClient' +import {Coll} from '../../utilz/coll' import DateUtil from '../../utilz/dateUtil' -import { QueueManager } from './QueueManager' -import { Check } from '../../utilz/check' +import {QueueManager} from './QueueManager' +import {Check} from '../../utilz/check' import schedule from 'node-schedule' -import { - StorageContractListener, - StorageContractState -} from '../messaging-common/storageContractState' -import { AxiosResponse } from 'axios' -import { PromiseUtil } from '../../utilz/promiseUtil' +import {StorageContractListener, StorageContractState} from '../messaging-common/storageContractState' +import {AxiosResponse} from 'axios' +import {PromiseUtil} from '../../utilz/promiseUtil' import SNodeClient from './snodeClient' -import { AggregatedReplyHelper, NodeHttpStatus } from './AggregatedReplyHelper' -import Subscribers, { SubscribersItem } from '../channelsCompositeClasses/subscribersClass' +import {AggregatedReplyHelper, NodeHttpStatus} from './AggregatedReplyHelper' +import Subscribers, {SubscribersItem} from '../channelsCompositeClasses/subscribersClass' import config from '../../config' import ChannelsService from '../channelsService' -import { MySqlUtil } from '../../utilz/mySqlUtil' +import {MySqlUtil} from '../../utilz/mySqlUtil' import {EnvLoader} from "../../utilz/envLoader"; +import { + Block, + InitDid, Signer, + Transaction, + TransactionObj, + TxAttestorData, + TxValidatorData +} from "../../generated/push/block_pb"; +import {BlockUtil} from "../messaging-common/blockUtil"; +import {BitUtil} from "../../utilz/bitUtil"; +import {TransactionError} from "./transactionError"; +import {EthUtil} from "../../utilz/EthUtil"; // todo move read/write qurum to smart contract constants // todo joi validate for getRecord @@ -86,14 +94,15 @@ export class ValidatorNode implements StorageContractListener { // state // block - private currentBlock: MessageBlock + private currentBlock: Block; // objects used to wait on block private blockMonitors: Map = new Map() private readQuorum = 2 private writeQuorum = 2 - constructor() {} + constructor() { + } // https://github.com/typestack/typedi/issues/6 public async postConstruct() { @@ -128,93 +137,192 @@ export class ValidatorNode implements StorageContractListener { return this.valContractState.getAllNodesMap() } - public async addPayloadToMemPool( - p: PayloadItem, - validatorTokenRequired: boolean = false - ): Promise { + public async sendTransaction(txRaw: Uint8Array, validatorTokenRequired: boolean): Promise { if (this.currentBlock == null) { - this.currentBlock = new MessageBlock() + this.currentBlock = new Block(); } + // check + const tx = BlockUtil.parseTransaction(txRaw); + this.log.debug('processing tx: %o', tx.toObject()) if (validatorTokenRequired) { // check that this Validator is a valid target, according to validatorToken - const valid = this.random.checkValidatorToken(p.validatorToken, this.nodeId) + let valid = true; + let validatorToken = BitUtil.bytesToBase64(tx.getApitoken()); + try { + valid = this.random.checkValidatorToken(validatorToken, this.nodeId); + } catch (e) { + // parsing error + let err = 'invalid apiToken for nodeId ' + this.nodeId; + this.log.error(err, e); + throw new TransactionError(err); + } if (!valid) { - this.log.error(`invalid validatorToken %s , for nodeId %s`, p.validatorToken, this.nodeId) - return false + // logical error + let err = 'invalid apiToken for nodeId ' + this.nodeId; + this.log.error(err); + throw new TransactionError(err); } } - const feedItem = await this.converterService.addExternalPayload(p) - this.currentBlock.requests.push(p) - this.currentBlock.responses.push(feedItem) + this.checkValidTransactionFields(tx); + // append transaction + let txObj = new TransactionObj(); + txObj.setTx(tx); + this.currentBlock.addTxobj(txObj); // todo handle bad conversions return true } + private checkValidTransactionFields(tx: Transaction) { + + if (tx.getType() != 0) { + throw new TransactionError(`Only non-value transactions are supported`); + } + let senderAddr = EthUtil.parseCaipAddress(tx.getSender()); + let recipientAddrs = tx.getRecipientsList().map(value => EthUtil.parseCaipAddress(value)); + let goodSender = !StrUtil.isEmpty(senderAddr.chainId) && !StrUtil.isEmpty(senderAddr.namespace) + && !StrUtil.isEmpty(senderAddr.addr); + if (!goodSender) { + throw new TransactionError(`sender field is invalid ${tx.getSender()}`); + } + + if (tx.getCategory() === 'INIT_DID') { + let txData = InitDid.deserializeBinary(tx.getData()); + if (StrUtil.isEmpty(txData.getDid())) { + throw new TransactionError(`did missing`); + } + if (StrUtil.isEmpty(txData.getMasterpubkey())) { + throw new TransactionError(`masterPubKey missing`); + } + if (StrUtil.isEmpty(txData.getDerivedpubkey())) { + throw new TransactionError(`derivedPubKey missing`); + } + if (StrUtil.isEmpty(txData.getEncderivedprivkey())) { + throw new TransactionError(`encDerivedPrivKey missing`); + } + } else if (tx.getCategory() === 'NOTIFICATION') { + // todo checks + } else { + throw new TransactionError(`unsupported transaction category`); + } + if (StrUtil.isEmpty(BitUtil.bytesToBase16(tx.getSalt()))) { + throw new TransactionError(`salt field is invalid`); + } + + let validSignature = true; // todo check signature + if (!validSignature) { + throw new TransactionError(`signature field is invalid`); + } + } + + /** * This method blocks for a long amount of time, * until processBlock() gets executed * @param p */ - public async addPayloadToMemPoolBlocking(p: PayloadItem): Promise { - try { - const monitor = new WaitNotify() - this.blockMonitors.set(p.id, monitor) - this.log.debug('adding monitor for id: %s', p.id) - const success = await this.addPayloadToMemPool(p) + public async sendTransactionBlocking(txRaw: Uint8Array): Promise { + // try { + const monitor = new WaitNotify(); + let txHash = BlockUtil.calculateTransactionHashBase16(txRaw); + this.blockMonitors.set(txHash, monitor) + this.log.debug('adding monitor for transaction hash: %s', txHash) + const success = await this.sendTransaction(txRaw, true); if (!success) { - return false + return null; } await monitor.wait(this.ADD_PAYLOAD_BLOCKING_TIMEOUT) // block until processBlock() - return true - } catch (e) { - this.log.error(e) - return false - } + return txHash; + // } catch (e) { + // this.log.error(e) + // return null; + // } } /** * Add first signature and start processing * @param cronJob */ - public async batchProcessBlock(cronJob: boolean): Promise { + public async batchProcessBlock(cronJob: boolean): Promise { this.log.info('batch started'); - if (this.currentBlock == null || this.currentBlock.requests.length == 0) { + if (this.currentBlock == null + || this.currentBlock.getTxobjList() == null + || this.currentBlock.getTxobjList().length == 0) { if (!cronJob) { this.log.error('block is empty') } - return null + return null; } - const block = this.currentBlock - const blockMonitors = this.blockMonitors + const block = this.currentBlock; + const blockMonitors = this.blockMonitors; // replace it with a new empty block - this.currentBlock = new MessageBlock() - this.blockMonitors = new Map() + this.currentBlock = new Block(); + this.blockMonitors = new Map(); + + + // populate block + block.setTs(DateUtil.currentTimeMillis()); + for (let txObj of block.getTxobjList()) { + let vd = new TxValidatorData(); + vd.setVote(1); + txObj.setValidatordata(vd); - if (block.responses.length != block.requests.length) { - throw new Error(`message block has incorrect length ${block.responses.length}`) + // todo fake attestation as of now (todo remove) + let ad = new TxAttestorData(); + ad.setVote(1); + txObj.setAttestordataList([ad, ad]); } - if (block.responsesSignatures.length != 0) { - throw new Error(`message block has incorrect signature length ${block.responses.length}`) + const tokenObj = this.random.createAttestToken(); + this.log.debug('random token: %o', tokenObj); + Check.isTrue(tokenObj.attestVector?.length > 0, 'attest vector is empty'); + Check.isTrue(tokenObj.attestVector[0] != null, 'attest vector is empty'); + block.setAttesttoken(BitUtil.stringToBytes(tokenObj.attestToken)); + + // collect attestation per each transaction + // and signature per block + // from every attestor + // todo + // todo fake attestation as of now (todo remove) + for (let txObj of block.getTxobjList()) { + + let ad = new TxAttestorData(); + ad.setVote(1); + txObj.setAttestordataList([ad, ad]); } + // todo fake signing as of now + let vSign = new Signer(); + vSign.setNode(this.nodeId); + vSign.setRole(1); + vSign.setSig("AA11"); + + let aSign1 = new Signer(); + aSign1.setNode("11"); + aSign1.setRole(2); + aSign1.setSig("11"); + + let aSign2 = new Signer(); + aSign1.setNode("22"); + aSign1.setRole(2); + aSign1.setSig("22"); + + block.setSignersList([vSign, aSign1, aSign2]); + + /* // sign every response - for (let i = 0; i < block.responses.length; i++) { - const feedItem = block.responses[i] + for (let i = 0; i < block.getTxobjList().length; i++) { + const txObj = block.getTxobjList()[i]; + // TODO START FROM HERE !!!!!!!!!!!!!!!!!!!!!!! const nodeMeta = { nodeId: this.nodeId, role: NetworkRole.VALIDATOR, tsMillis: Date.now() } - const fisData: FISData = { vote: 'ACCEPT' } + const fisData: FISData = {vote: 'ACCEPT'} const ethSig = await EthSig.create(this.wallet, feedItem, fisData, nodeMeta) const fiSig = new FeedItemSig(fisData, nodeMeta, ethSig) block.responsesSignatures.push([fiSig]) } // network status - const tokenObj = this.random.createAttestToken() - this.log.debug('random token: %o', tokenObj) - Check.isTrue(tokenObj.attestVector?.length > 0, 'attest vector is empty') - Check.isTrue(tokenObj.attestVector[0] != null, 'attest vector is empty') - block.attestToken = tokenObj.attestToken + const attestCount = 1 const safeAttestCountToAvoidDuplicates = attestCount + 5 // todo handle if some M amount of nodes refuses to attest! @@ -291,7 +399,7 @@ export class ValidatorNode implements StorageContractListener { arr.push(...asResult.reports) this.log.debug('attestor %s successfully received block signatures and published the block') } - } + }*/ // group same reports , take one // todo // const sortedNodeReports = Coll.sortMapOfArrays(nodeReportsMap, false); @@ -304,31 +412,35 @@ export class ValidatorNode implements StorageContractListener { // call a contract // 2: deliver - await this.publishCollectivelySignedMessageBlock(block) + await this.publishCollectivelySignedMessageBlock(block); // 3: unblock addPayloadToMemPoolBlocking() requests - for (let i = 0; i < block.responses.length; i++) { - const fi = block.responses[i] - const id = fi.payload.data.sid - const objMonitor = blockMonitors.get(id) + for (let txObj of block.getTxobjList()) { + let tx = txObj.getTx(); + let txHash = BlockUtil.calculateTransactionHashBase16(tx.serializeBinary()); + + const objMonitor = blockMonitors.get(txHash); if (objMonitor) { - this.log.debug('unblocking monitor %s', objMonitor) - objMonitor.notifyAll() + this.log.debug('unblocking monitor %s', objMonitor); + objMonitor.notifyAll(); } else { - this.log.debug('no monitor found for id %s', id) + this.log.debug('no monitor found for id %s', txHash); } } return block } // sends message block to all connected delivery nodes - public async publishCollectivelySignedMessageBlock(mb: MessageBlock) { - const queue = this.queueInitializer.getQueue(QueueManager.QUEUE_MBLOCK) + public async publishCollectivelySignedMessageBlock(mb: Block) { + const queue = this.queueInitializer.getQueue(QueueManager.QUEUE_MBLOCK); + let blockBytes = mb.serializeBinary(); + let blockAsBase16 = BitUtil.bytesToBase16(blockBytes); + const blockHashAsBase16 = BlockUtil.calculateBlockHashBase16(blockBytes); const insertResult = await queue.accept({ - object: mb, - object_hash: MessageBlockUtil.calculateHash(mb) - }) - this.log.debug(`published message block ${mb.id} success: ${insertResult}`) + object: blockAsBase16, + object_hash: blockHashAsBase16 + }); + this.log.debug(`published message block ${blockHashAsBase16} success: ${insertResult}`) } // ------------------------------ ATTESTOR ----------------------------------------- @@ -346,7 +458,7 @@ export class ValidatorNode implements StorageContractListener { ) const check1 = MessageBlockUtil.checkBlock(block, activeValidators) if (!check1.success) { - return { error: check1.err, signatures: null } + return {error: check1.err, signatures: null} } // attest token checks const item0sig0 = block.responsesSignatures[0][0] @@ -359,7 +471,7 @@ export class ValidatorNode implements StorageContractListener { ) ) { this.log.error('block attest token is invalid') - return { error: 'block attest token is invalid', signatures: null } + return {error: 'block attest token is invalid', signatures: null} } // conversion checks const sigs: FeedItemSig[] = [] @@ -383,7 +495,7 @@ export class ValidatorNode implements StorageContractListener { // fuzzy check subscribers const cmpSubscribers = this.compareSubscribersDroppingLatestIsAllowed(feedItem, feedItemNew) if (!cmpSubscribers.subscribersAreEqual) { - return { error: cmpSubscribers.error, signatures: null } + return {error: cmpSubscribers.error, signatures: null} } // sign const nodeMeta = { @@ -405,7 +517,7 @@ export class ValidatorNode implements StorageContractListener { await this.redisCli.getClient().set(key, JSON.stringify(block)) const expirationInSeconds = 60 await this.redisCli.getClient().expire(key, expirationInSeconds) - return { error: null, signatures: sigs } + return {error: null, signatures: sigs} } /** @@ -445,12 +557,12 @@ export class ValidatorNode implements StorageContractListener { if (!isFreshSubscriber) { const errMsg = `${recipientV.addr} (${deltaInMinutes}mins) exists in V, missing in A` this.log.error('%s %s', dbgPrefix, errMsg) - return { subscribersAreEqual: false, error: errMsg } + return {subscribersAreEqual: false, error: errMsg} } // V has a subscriber, while A doesn't // we allow to ignore this if this subscriber is a 'fresh' one this.log.debug('%s is a fresh subscriber only in A', dbgPrefix, recipientA) - const recipientMissing: RecipientMissing = { addr: recipientV.addr } + const recipientMissing: RecipientMissing = {addr: recipientV.addr} recipientsToRemove.recipients.push(recipientMissing) } // check A subscribers against V subscribers @@ -461,10 +573,10 @@ export class ValidatorNode implements StorageContractListener { if (!isFreshSubscriber) { const errMsg = `${recipientA.addr} (${deltaInMinutes}mins) exists in A, missing in V` this.log.error('%s %s', dbgPrefix, errMsg) - return { subscribersAreEqual: false, error: errMsg } + return {subscribersAreEqual: false, error: errMsg} } } - const result = { subscribersAreEqual: true, comparisonResult: recipientsToRemove } + const result = {subscribersAreEqual: true, comparisonResult: recipientsToRemove} this.log.debug('%s result %s', dbgPrefix, result) return result } @@ -596,7 +708,8 @@ export class ValidatorNode implements StorageContractListener { async handleReshard( currentNodeShards: Set | null, allNodeShards: Map> - ): Promise {} + ): Promise { + } public async getRecord(nsName: string, nsIndex: string, dt: string, key: string): Promise { this.log.debug(`getRecord() nsName=${nsName}, nsIndex=${nsIndex}, dt=${dt}, key=${key}`) diff --git a/src/utilz/EthUtil.ts b/src/utilz/EthUtil.ts index 5665d8b..4d38f37 100644 --- a/src/utilz/EthUtil.ts +++ b/src/utilz/EthUtil.ts @@ -1,7 +1,7 @@ import StrUtil from './strUtil' export class EthUtil { - static parseCaipAddress(addressinCAIP: string): CaipAddr | null { + public static parseCaipAddress(addressinCAIP: string): CaipAddr | null { if (StrUtil.isEmpty(addressinCAIP)) { return null } diff --git a/src/utilz/bitUtil.ts b/src/utilz/bitUtil.ts index d527419..0830276 100644 --- a/src/utilz/bitUtil.ts +++ b/src/utilz/bitUtil.ts @@ -1,5 +1,11 @@ import { Coll } from './coll' +// bytes (as hex numbers) = 0x41 0x41 0x42 0x42 +// Uint8Array (as decimal numbers) = 65 65 66 66 +// string (as non printable chars) = .. +// base16 string = 0xAABB +// base64 string = QUFCQg== + export class BitUtil { /** * XORs 2 buffers, byte by byte: src = src XOR add @@ -57,4 +63,38 @@ export class BitUtil { Coll.sortNumbersAsc(result) return result } + + + public static base16ToBytes(base16String:string):Uint8Array { + return Uint8Array.from(Buffer.from(base16String, 'hex')); + } + + public static bytesToBase16(arr:Uint8Array):string { + return Buffer.from(arr).toString('hex'); + } + + public static base64ToString(base64String:string):string { + return Buffer.from(base64String, 'base64').toString('binary'); + } + + public static bytesToBase64(bytes:Uint8Array):string { + return Buffer.from(bytes).toString('base64'); + } + + public static bytesToString(bytes:Uint8Array):string { + return Buffer.from(bytes).toString('utf8'); + } + + public static stringToBytes(str:string):Uint8Array { + return new Uint8Array(Buffer.from(str, 'utf-8')); + } + + public static stringToBase64(str:string):string { + return Buffer.from(str, 'binary').toString('base64'); + } + + public static base64ToBase16(base64String:string):string { + return Buffer.from(base64String, 'base64').toString('hex'); + } + } diff --git a/src/utilz/envLoader.ts b/src/utilz/envLoader.ts index eafdd9b..68f5c31 100644 --- a/src/utilz/envLoader.ts +++ b/src/utilz/envLoader.ts @@ -1,5 +1,6 @@ import dotenv from 'dotenv' import StrUtil from './strUtil' +import {NumUtil} from "./numUtil"; export class EnvLoader { public static loadEnvOrFail() { @@ -31,4 +32,9 @@ export class EnvLoader { } return val } + + public static getPropertyAsNumber(propName: string, defaultValue:number): number { + const val = process.env[propName] + return NumUtil.parseInt(val, defaultValue); + } } diff --git a/src/utilz/hashUtil.ts b/src/utilz/hashUtil.ts new file mode 100644 index 0000000..2bbe71a --- /dev/null +++ b/src/utilz/hashUtil.ts @@ -0,0 +1,12 @@ +import * as CryptoJS from 'crypto-js' + +export class HashUtil { + + public static sha256AsBytes(data: Uint8Array): Uint8Array { + const wa = CryptoJS.lib.WordArray.create(data); + const shaAsWordArray = CryptoJS.SHA256(wa); + const hexString = CryptoJS.enc.Hex.stringify(shaAsWordArray); + const shaAsArray = Uint8Array.from(Buffer.from(hexString, 'hex')); + return shaAsArray; + } +} \ No newline at end of file diff --git a/src/utilz/idUtil.ts b/src/utilz/idUtil.ts index 17f32cb..790ddab 100755 --- a/src/utilz/idUtil.ts +++ b/src/utilz/idUtil.ts @@ -5,4 +5,8 @@ export default class IdUtil { public static getUuidV4(): string { return uuid.v4(); } + + public static getUuidV4AsBytes(): Uint8Array { + return uuid.parse(uuid.v4()); + } } \ No newline at end of file diff --git a/tests/block/block.test.ts b/tests/block/block.test.ts new file mode 100644 index 0000000..d27dd33 --- /dev/null +++ b/tests/block/block.test.ts @@ -0,0 +1,114 @@ +import 'mocha' +import chai from 'chai' +import { + Block, + InitDid, + Signer, + Transaction, + TransactionObj, + TxAttestorData, + TxValidatorData +} from "../../src/generated/push/block_pb"; +import IdUtil from "../../src/utilz/idUtil"; +import {BitUtil} from "../../src/utilz/bitUtil"; +import {HashUtil} from "../../src/utilz/hashUtil"; +import DateUtil from "../../src/utilz/dateUtil"; + +const expect = chai.expect; + +/* +README: + +yarn install +yarn build:proto + generates from (src/proto) to (src/generated) +now you can use .proto stubs for typescript + */ + +describe('block tests', function () { + + + it('create transaction and block, serialize/deserialize', async function () { + console.log("building ------------------------- "); + // build transaction data (app-dependent) + const data = new InitDid(); + data.setDid('0xAA'); + data.setMasterpubkey('0xBB'); + data.setDerivedkeyindex(1); + data.setDerivedpubkey('0xCC'); + data.setEncderivedprivkey('0xDD'); + console.log("data as json", JSON.stringify(data.toObject())); + + // build transaction + const t = new Transaction(); + t.setType(3); + t.setCategory('INIT_DID'); + t.setSender('eip155:1:0xAA'); + t.setRecipientsList(['eip155:1:0xBB', 'eip155:1:0xCC']); + t.setData(data.serializeBinary()) + t.setSalt(IdUtil.getUuidV4AsBytes()); // uuid.parse(uuid.v4()) + t.setApitoken("eyJub2RlcyI6W3sibm9kZUlkIjoiMHg4ZTEyZEUxMkMzNWVBQmYzNWI1NmIwNEU1M0M0RTQ2OGU0NjcyN0U4IiwidHNNaWxsaXMiOjE3MjQ2NzMyNDAwMzAsInJhbmRvbUhleCI6ImY3YmY3YmYwM2ZlYTBhNzI1MTU2OWUwNWRlNjU2ODJkYjU1OTU1N2UiLCJwaW5nUmVzdWx0cyI6W3sibm9kZUlkIjoiMHhmREFFYWY3YWZDRmJiNGU0ZDE2REM2NmJEMjAzOWZkNjAwNENGY2U4IiwidHNNaWxsaXMiOjE3MjQ2NzMyNDAwMjAsInN0YXR1cyI6MX0seyJub2RlSWQiOiIweDk4RjlEOTEwQWVmOUIzQjlBNDUxMzdhZjFDQTc2NzVlRDkwYTUzNTUiLCJ0c01pbGxpcyI6MTcyNDY3MzI0MDAxOSwic3RhdHVzIjoxfV0sInNpZ25hdHVyZSI6IjB4YjMzM2NjMWI3MWM0NGM0MDhkOTZiN2JmYjYzODU0OTNjZjE2N2NiMmJkMjU1MjdkNzg2ZDM4ZjdiOTgwZWFkMzAxMmY3NmNhNzhlM2FiMWEzN2U2YTFjY2ZkMjBiNjkzZGVmZDAwOWM4NzExY2ZjODlmMDUyYjM5MzY4ZjFjZTgxYiJ9LHsibm9kZUlkIjoiMHhmREFFYWY3YWZDRmJiNGU0ZDE2REM2NmJEMjAzOWZkNjAwNENGY2U4IiwidHNNaWxsaXMiOjE3MjQ2NzMyNDAwMjUsInJhbmRvbUhleCI6IjkyMTY4NzRkZjBlMTQ4NTk3ZjlkNDRkMGRmZmFlZGU5NTg0NGRkMTciLCJwaW5nUmVzdWx0cyI6W3sibm9kZUlkIjoiMHg4ZTEyZEUxMkMzNWVBQmYzNWI1NmIwNEU1M0M0RTQ2OGU0NjcyN0U4IiwidHNNaWxsaXMiOjE3MjQ2NzMyMjQ2NTAsInN0YXR1cyI6MX0seyJub2RlSWQiOiIweDk4RjlEOTEwQWVmOUIzQjlBNDUxMzdhZjFDQTc2NzVlRDkwYTUzNTUiLCJ0c01pbGxpcyI6MTcyNDY3MzIyNDY1NSwic3RhdHVzIjoxfV0sInNpZ25hdHVyZSI6IjB4N2JmYzQ0MjQ0ZGM0MTdhMjg0YzEwODUwZGEzNTE2YzUwNWEwNjJmYjIyYmI1ODU0ODg2YWEyOTk3OWUwMmYxOTdlZWMyYzk2ZDVkOTQ4ZDBhMWQ2NTBlYzIzNGRhMDVjMGY5M2JlNWUyMDkxNjFlYzJjY2JjMWU5YzllNzQyOGIxYiJ9LHsibm9kZUlkIjoiMHg5OEY5RDkxMEFlZjlCM0I5QTQ1MTM3YWYxQ0E3Njc1ZUQ5MGE1MzU1IiwidHNNaWxsaXMiOjE3MjQ2NzMyNDAwMjQsInJhbmRvbUhleCI6IjBkOWExNmE4OTljYWQwZWZjODgzZjM0NWQwZjgwYjdmYTE1YTY1NmYiLCJwaW5nUmVzdWx0cyI6W3sibm9kZUlkIjoiMHg4ZTEyZEUxMkMzNWVBQmYzNWI1NmIwNEU1M0M0RTQ2OGU0NjcyN0U4IiwidHNNaWxsaXMiOjE3MjQ2NzMyMjY5NDMsInN0YXR1cyI6MX0seyJub2RlSWQiOiIweGZEQUVhZjdhZkNGYmI0ZTRkMTZEQzY2YkQyMDM5ZmQ2MDA0Q0ZjZTgiLCJ0c01pbGxpcyI6MTcyNDY3MzIyNjk0Nywic3RhdHVzIjoxfV0sInNpZ25hdHVyZSI6IjB4YmE2Mjk2OTZlZWU4MDQ4ZDE2OTA3MDNhZmVjYWY4ZmJjM2Y4NDMxOWQ0OTFhZGIzY2YzZGYzMzExMTllMDAyOTA1MTc3MjAyNzkxNzEzNTMzMmU0MGZiMzI2OTM5Y2JhN2Y2NDc2NmYyYjY5MzQwZTZlNGYwZmIzNjM2OThmYzkxYiJ9XX0="); // fake token + t.setFee("1"); // tbd + t.setSignature(BitUtil.base16ToBytes("EE")); // fake signature + console.log("tx as json", JSON.stringify(t.toObject())); + + const txAsBytes = t.serializeBinary(); + console.log("tx as base16", BitUtil.bytesToBase16(txAsBytes)); + console.log("tx hash", BitUtil.bytesToBase16(HashUtil.sha256AsBytes(txAsBytes))); + // build block + + // transactions + const to = new TransactionObj(); + to.setTx(t); + const vd = new TxValidatorData(); + vd.setVote(1); + const ad = new TxAttestorData(); + ad.setVote(1); + to.setValidatordata(vd); + to.setAttestordataList([ad]); + + // signers + const s1 = new Signer(); + s1.setNode('0x1111'); + s1.setRole(1); + s1.setSig('CC'); + const s2 = new Signer(); + s2.setNode('0x2222'); + s2.setRole(1); + s2.setSig('EE'); + + const b = new Block(); + b.setTs(DateUtil.currentTimeSeconds()); + b.setTxobjList([to]); + b.setAttesttoken('DD'); // fake attest token + b.setSignersList([s1, s2]); + b.setAttesttoken(BitUtil.base16ToBytes("C1CC")); + console.log("block as json", JSON.stringify(b.toObject())); + + const blockAsBytes = b.serializeBinary(); + console.log("block as base16", BitUtil.bytesToBase16(blockAsBytes)); + console.log("block hash", BitUtil.bytesToBase16(HashUtil.sha256AsBytes(blockAsBytes))); + + + // PARSE it back into objects + console.log("parsing ------------------------- "); + let t2 = Transaction.deserializeBinary(txAsBytes); + console.log("tx2 as json", JSON.stringify(t2.toObject())); + + let b2 = Block.deserializeBinary(blockAsBytes); + console.log("block2 as json", JSON.stringify(b2.toObject())); + }); + + it('test for setting data as string (do not use this)', async function () { + const t = new Transaction(); + let originalData = "AABB"; + console.log('assign data ', originalData); + let encoded = BitUtil.bytesToBase64(BitUtil.base16ToBytes("AABB")); + console.log('encoded for assignment ', encoded); + t.setData(encoded); + console.log("t as bin", BitUtil.bytesToBase16(t.serializeBinary())); + let protoEncodedAndDecoded: any = Transaction.deserializeBinary(t.serializeBinary()).getData(); + console.log('expeced assigned data, to be ', originalData, "but got", protoEncodedAndDecoded, '=', BitUtil.bytesToBase16(protoEncodedAndDecoded)); + }); + +}) diff --git a/tsconfig.json b/tsconfig.json index 9028e45..92a6fc6 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,12 +5,14 @@ "compilerOptions": { "target": "es2017", "lib": ["es2017", "esnext.asynciterable"], - "typeRoots": ["./node_modules/@types", "./src/types"], +// "typeRoots": ["./node_modules/@types", "./src/types"], + "typeRoots": ["./node_modules/@types", "./src/types", "./src/generated"], "allowSyntheticDefaultImports": true, "experimentalDecorators": true, "emitDecoratorMetadata": true, "forceConsistentCasingInFileNames": true, - "moduleResolution": "node", +// "moduleResolution": "node", + "moduleResolution": "nodenext", "module": "commonjs", "pretty": true, "sourceMap": true, @@ -18,7 +20,10 @@ "allowJs": true, "noEmit": false, "resolveJsonModule": true, - "esModuleInterop": true + "esModuleInterop": true, + +// "skipLibCheck": false + "skipLibCheck": true }, "include": ["./src/**/*", "./src/**/*.json"], "exclude": ["node_modules", "tests"] diff --git a/zips/do.sh zipped.zip b/zips/do.sh zipped.zip deleted file mode 100644 index 07627f5..0000000 Binary files a/zips/do.sh zipped.zip and /dev/null differ diff --git a/zips/docker-dir-for-vnodes.zip b/zips/docker-dir-for-vnodes.zip deleted file mode 100644 index d52e5de..0000000 Binary files a/zips/docker-dir-for-vnodes.zip and /dev/null differ