chore: upgrade libp2p and related deps (#1482)

* chore: update noise

* update: package.lock

* update: @chainsafe/libp2p-gossipsub

* rm unwanted libp2p interface deps & bump up libp2p

* refactor code for new deps

* update: new package.lock

* setup prettier, refactor eslint  and rm trailing commas

* update package.lock

* fix build

* import type for interface

* fix imports for merge

* update typedoc exports

* add: CustomEvent import

* use new libp2p interface

* add aegir as dev dep for tests
This commit is contained in:
Danish Arora 2023-08-16 20:18:13 +05:30 committed by GitHub
parent 7b6ead14ac
commit 87717981eb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
153 changed files with 15132 additions and 15182 deletions

View File

@ -1,6 +1,9 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": ["./tsconfig.json"]
},
"env": { "es6": true },
"ignorePatterns": ["node_modules", "build", "coverage", "proto"],
"plugins": ["import", "eslint-comments", "functional"],
@ -13,6 +16,7 @@
],
"globals": { "BigInt": true, "console": true, "WebAssembly": true },
"rules": {
"comma-dangle": ["error", "never"],
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"eslint-comments/disable-enable-pair": [
@ -58,10 +62,19 @@
"overrides": [
{
"files": ["*.spec.ts", "**/test_utils/*.ts", "*.js", "*.cjs"],
"env": {
"node": true
},
"rules": {
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-explicit-any": "off",
"no-console": "off"
"no-console": "off",
"import/no-extraneous-dependencies": [
"error",
{
"devDependencies": true
}
]
}
},
{

3
.prettierrc.json Normal file
View File

@ -0,0 +1,3 @@
{
"trailingComma": "none"
}

View File

@ -38,8 +38,8 @@ const main = async (url, branch) => {
repo: url,
branch: branch,
dotfiles: true,
silent: false,
silent: false
});
};
main(repoUrl, branch);
void main(repoUrl, branch);

View File

@ -70,7 +70,7 @@ async function readWorkspace(packagePath) {
name: json.name,
private: !!json.private,
version: json.version,
workspace: packagePath,
workspace: packagePath
};
}

27786
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,12 @@ module.exports = {
root: true,
parserOptions: {
ecmaVersion: 2021,
sourceType: "module",
sourceType: "module"
},
env: {
browser: true,
es2021: true,
node: true,
node: true
},
extends: ["eslint:recommended"],
rules: {
@ -17,8 +17,8 @@ module.exports = {
semi: ["error", "always"],
"no-unused-vars": [
"error",
{ vars: "all", args: "after-used", ignoreRestSiblings: false },
{ vars: "all", args: "after-used", ignoreRestSiblings: false }
],
"no-console": "warn",
},
"no-console": "warn"
}
};

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"],
files: ["src/lib/**/!(node).spec.ts"],
preprocessors: {
"src/lib/**/!(node).spec.ts": ["webpack"],
"src/lib/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true,
client: {
mocha: {
timeout: 6000, // Default is 2s
},
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }],
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js",
}),
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"],
},
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map",
},
devtool: "inline-source-map"
}
});
};

View File

@ -85,11 +85,6 @@
"uuid": "^9.0.0"
},
"devDependencies": {
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@multiformats/multiaddr": "^12.0.0",
"@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0",
@ -119,7 +114,7 @@
},
"peerDependencies": {
"@multiformats/multiaddr": "^12.0.0",
"libp2p": "^0.45.9"
"libp2p": "^0.46.3"
},
"peerDependenciesMeta": {
"@multiformats/multiaddr": {

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -4,7 +4,7 @@ export { createEncoder, createDecoder } from "./lib/message/version_0.js";
export type {
Encoder,
Decoder,
DecodedMessage,
DecodedMessage
} from "./lib/message/version_0.js";
export * as message from "./lib/message/index.js";
@ -22,7 +22,7 @@ export {
PageDirection,
wakuStore,
StoreCodec,
createCursor,
createCursor
} from "./lib/store/index.js";
export { waitForRemotePeer } from "./lib/wait_for_remote_peer.js";

View File

@ -1,12 +1,12 @@
import type { Stream } from "@libp2p/interface-connection";
import type { Libp2p } from "@libp2p/interface-libp2p";
import type { PeerId } from "@libp2p/interface-peer-id";
import { Peer, PeerStore } from "@libp2p/interface-peer-store";
import type { Libp2p } from "@libp2p/interface";
import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface/peer-id";
import { Peer, PeerStore } from "@libp2p/interface/peer-store";
import type { IBaseProtocol, Libp2pComponents } from "@waku/interfaces";
import {
getPeersForProtocol,
selectConnection,
selectPeerForProtocol,
selectPeerForProtocol
} from "@waku/utils/libp2p";
/**
@ -19,13 +19,13 @@ export class BaseProtocol implements IBaseProtocol {
constructor(
public multicodec: string,
private components: Libp2pComponents,
private components: Libp2pComponents
) {
this.addLibp2pEventListener = components.events.addEventListener.bind(
components.events,
components.events
);
this.removeLibp2pEventListener = components.events.removeEventListener.bind(
components.events,
components.events
);
}
@ -46,13 +46,13 @@ export class BaseProtocol implements IBaseProtocol {
const { peer } = await selectPeerForProtocol(
this.peerStore,
[this.multicodec],
peerId,
peerId
);
return peer;
}
protected async newStream(peer: Peer): Promise<Stream> {
const connections = this.components.connectionManager.getConnections(
peer.id,
peer.id
);
const connection = selectConnection(connections);
if (!connection) {

View File

@ -1,6 +1,6 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info";
import type { Peer } from "@libp2p/interface-peer-store";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Peer } from "@libp2p/interface/peer-store";
import { CustomEvent, EventEmitter } from "@libp2p/interfaces/events";
import {
ConnectionManagerOptions,
@ -9,7 +9,7 @@ import {
IPeersByDiscoveryEvents,
IRelay,
KeepAliveOptions,
PeersByDiscoveryResult,
PeersByDiscoveryResult
} from "@waku/interfaces";
import { Libp2p, Tags } from "@waku/interfaces";
import debug from "debug";
@ -41,7 +41,7 @@ export class ConnectionManager
libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions,
relay?: IRelay,
options?: ConnectionManagerOptions,
options?: ConnectionManagerOptions
): ConnectionManager {
let instance = ConnectionManager.instances.get(peerId);
if (!instance) {
@ -49,7 +49,7 @@ export class ConnectionManager
libp2p,
keepAliveOptions,
relay,
options,
options
);
ConnectionManager.instances.set(peerId, instance);
}
@ -92,12 +92,12 @@ export class ConnectionManager
return {
DISCOVERED: {
[Tags.BOOTSTRAP]: peersDiscoveredByBootstrap,
[Tags.PEER_EXCHANGE]: peersDiscoveredByPeerExchange,
[Tags.PEER_EXCHANGE]: peersDiscoveredByPeerExchange
},
CONNECTED: {
[Tags.BOOTSTRAP]: peersConnectedByBootstrap,
[Tags.PEER_EXCHANGE]: peersConnectedByPeerExchange,
},
[Tags.PEER_EXCHANGE]: peersConnectedByPeerExchange
}
};
}
@ -105,7 +105,7 @@ export class ConnectionManager
libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions,
relay?: IRelay,
options?: Partial<ConnectionManagerOptions>,
options?: Partial<ConnectionManagerOptions>
) {
super();
this.libp2p = libp2p;
@ -113,7 +113,7 @@ export class ConnectionManager
maxDialAttemptsForPeer: DEFAULT_MAX_DIAL_ATTEMPTS_FOR_PEER,
maxBootstrapPeersAllowed: DEFAULT_MAX_BOOTSTRAP_PEERS_ALLOWED,
maxParallelDials: DEFAULT_MAX_PARALLEL_DIALS,
...options,
...options
};
this.keepAliveManager = new KeepAliveManager(keepAliveOptions, relay);
@ -126,7 +126,7 @@ export class ConnectionManager
// which means that before the ConnectionManager is initialized, some peers may have been discovered
// we will dial the peers in peerStore ONCE before we start to listen to the `peer:discovery` events within the ConnectionManager
this.dialPeerStorePeers().catch((error) =>
log(`Unexpected error while dialing peer store peers`, error),
log(`Unexpected error while dialing peer store peers`, error)
);
}
@ -159,15 +159,15 @@ export class ConnectionManager
this.keepAliveManager.stopAll();
this.libp2p.removeEventListener(
"peer:connect",
this.onEventHandlers["peer:connect"],
this.onEventHandlers["peer:connect"]
);
this.libp2p.removeEventListener(
"peer:disconnect",
this.onEventHandlers["peer:disconnect"],
this.onEventHandlers["peer:disconnect"]
);
this.libp2p.removeEventListener(
"peer:discovery",
this.onEventHandlers["peer:discovery"],
this.onEventHandlers["peer:discovery"]
);
}
@ -198,7 +198,7 @@ export class ConnectionManager
log(
`Error dialing peer ${peerId.toString()} - ${
(error as any).message
}`,
}`
);
}
this.dialErrorsForPeer.set(peerId.toString(), error);
@ -225,14 +225,14 @@ export class ConnectionManager
}
log(
`Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`,
`Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`
);
this.dialErrorsForPeer.delete(peerId.toString());
await this.libp2p.peerStore.delete(peerId);
} catch (error) {
throw new Error(
`Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`,
`Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`
);
}
}
@ -245,7 +245,7 @@ export class ConnectionManager
log(`Dropped connection with peer ${peerId.toString()}`);
} catch (error) {
log(
`Error dropping connection with peer ${peerId.toString()} - ${error}`,
`Error dropping connection with peer ${peerId.toString()} - ${error}`
);
}
}
@ -266,14 +266,14 @@ export class ConnectionManager
private startPeerDiscoveryListener(): void {
this.libp2p.addEventListener(
"peer:discovery",
this.onEventHandlers["peer:discovery"],
this.onEventHandlers["peer:discovery"]
);
}
private startPeerConnectionListener(): void {
this.libp2p.addEventListener(
"peer:connect",
this.onEventHandlers["peer:connect"],
this.onEventHandlers["peer:connect"]
);
}
@ -292,7 +292,7 @@ export class ConnectionManager
*/
this.libp2p.addEventListener(
"peer:disconnect",
this.onEventHandlers["peer:disconnect"],
this.onEventHandlers["peer:disconnect"]
);
}
@ -315,7 +315,7 @@ export class ConnectionManager
const { id: peerId } = evt.detail;
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP,
Tags.BOOTSTRAP
);
this.dispatchEvent(
@ -324,9 +324,9 @@ export class ConnectionManager
? EPeersByDiscoveryEvents.PEER_DISCOVERY_BOOTSTRAP
: EPeersByDiscoveryEvents.PEER_DISCOVERY_PEER_EXCHANGE,
{
detail: peerId,
},
),
detail: peerId
}
)
);
try {
@ -343,7 +343,7 @@ export class ConnectionManager
this.keepAliveManager.start(peerId, this.libp2p.services.ping);
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP,
Tags.BOOTSTRAP
);
if (isBootstrap) {
@ -361,9 +361,9 @@ export class ConnectionManager
new CustomEvent<PeerId>(
EPeersByDiscoveryEvents.PEER_CONNECT_BOOTSTRAP,
{
detail: peerId,
},
),
detail: peerId
}
)
);
}
} else {
@ -371,9 +371,9 @@ export class ConnectionManager
new CustomEvent<PeerId>(
EPeersByDiscoveryEvents.PEER_CONNECT_PEER_EXCHANGE,
{
detail: peerId,
},
),
detail: peerId
}
)
);
}
})();
@ -382,7 +382,7 @@ export class ConnectionManager
return (evt: CustomEvent<PeerId>): void => {
this.keepAliveManager.stop(evt.detail);
};
},
}
};
/**

View File

@ -35,27 +35,27 @@ export class FilterSubscribeRpc {
static createSubscribeRequest(
pubsubTopic: string,
contentTopics: string[],
contentTopics: string[]
): FilterSubscribeRpc {
return new FilterSubscribeRpc({
requestId: uuid(),
filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBE,
pubsubTopic,
contentTopics,
contentTopics
});
}
static createUnsubscribeRequest(
pubsubTopic: string,
contentTopics: string[],
contentTopics: string[]
): FilterSubscribeRpc {
return new FilterSubscribeRpc({
requestId: uuid(),
filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE,
pubsubTopic,
contentTopics,
contentTopics
});
}
@ -65,7 +65,7 @@ export class FilterSubscribeRpc {
filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE_ALL,
pubsubTopic,
contentTopics: [],
contentTopics: []
});
}
@ -75,7 +75,7 @@ export class FilterSubscribeRpc {
filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBER_PING,
pubsubTopic: "",
contentTopics: [],
contentTopics: []
});
}

View File

@ -1,7 +1,7 @@
import { Stream } from "@libp2p/interface-connection";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { Peer } from "@libp2p/interface-peer-store";
import type { IncomingStreamData } from "@libp2p/interface-registrar";
import { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer } from "@libp2p/interface/peer-store";
import type { IncomingStreamData } from "@libp2p/interface-internal/registrar";
import type {
Callback,
ContentTopic,
@ -16,7 +16,7 @@ import type {
ProtocolCreateOptions,
ProtocolOptions,
PubSubTopic,
Unsubscribe,
Unsubscribe
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { groupByContentTopic, toAsyncIterator } from "@waku/utils";
@ -31,7 +31,7 @@ import { DefaultPubSubTopic } from "../constants.js";
import {
FilterPushRpc,
FilterSubscribeResponse,
FilterSubscribeRpc,
FilterSubscribeRpc
} from "./filter_rpc.js";
const log = debug("waku:filter:v2");
@ -43,7 +43,7 @@ type SubscriptionCallback<T extends IDecodedMessage> = {
const FilterCodecs = {
SUBSCRIBE: "/vac/waku/filter-subscribe/2.0.0-beta1",
PUSH: "/vac/waku/filter-push/2.0.0-beta1",
PUSH: "/vac/waku/filter-push/2.0.0-beta1"
};
class Subscription {
@ -59,7 +59,7 @@ class Subscription {
constructor(
pubSubTopic: PubSubTopic,
remotePeer: Peer,
newStream: (peer: Peer) => Promise<Stream>,
newStream: (peer: Peer) => Promise<Stream>
) {
this.peer = remotePeer;
this.pubSubTopic = pubSubTopic;
@ -69,7 +69,7 @@ class Subscription {
async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
callback: Callback<T>
): Promise<void> {
const decodersArray = Array.isArray(decoders) ? decoders : [decoders];
const decodersGroupedByCT = groupByContentTopic(decodersArray);
@ -79,7 +79,7 @@ class Subscription {
const request = FilterSubscribeRpc.createSubscribeRequest(
this.pubSubTopic,
contentTopics,
contentTopics
);
try {
@ -88,7 +88,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source),
async (source) => await all(source)
);
const { statusCode, requestId, statusDesc } =
@ -96,7 +96,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
`Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
);
}
@ -104,7 +104,7 @@ class Subscription {
"Subscribed to peer ",
this.peer.id.toString(),
"for content topics",
contentTopics,
contentTopics
);
} catch (e) {
throw new Error(
@ -113,7 +113,7 @@ class Subscription {
" for content topics: " +
contentTopics +
": " +
e,
e
);
}
@ -125,7 +125,7 @@ class Subscription {
// Decoder that decode to different implementations of `IDecodedMessage`
const subscriptionCallback = {
decoders,
callback,
callback
} as unknown as SubscriptionCallback<IDecodedMessage>;
// The callback and decoder may override previous values, this is on
@ -138,7 +138,7 @@ class Subscription {
const stream = await this.newStream(this.peer);
const unsubscribeRequest = FilterSubscribeRpc.createUnsubscribeRequest(
this.pubSubTopic,
contentTopics,
contentTopics
);
try {
@ -163,7 +163,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source),
async (source) => await all(source)
);
const { statusCode, requestId, statusDesc } =
@ -171,7 +171,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
`Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
);
}
@ -186,7 +186,7 @@ class Subscription {
const stream = await this.newStream(this.peer);
const request = FilterSubscribeRpc.createUnsubscribeAllRequest(
this.pubSubTopic,
this.pubSubTopic
);
try {
@ -195,7 +195,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source),
async (source) => await all(source)
);
const { statusCode, requestId, statusDesc } =
@ -203,7 +203,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
`Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
);
}
@ -231,7 +231,7 @@ class Filter extends BaseProtocol implements IReceiver {
private getActiveSubscription(
pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr,
peerIdStr: PeerIdStr
): Subscription | undefined {
return this.activeSubscriptions.get(`${pubSubTopic}_${peerIdStr}`);
}
@ -239,7 +239,7 @@ class Filter extends BaseProtocol implements IReceiver {
private setActiveSubscription(
pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr,
subscription: Subscription,
subscription: Subscription
): Subscription {
this.activeSubscriptions.set(`${pubSubTopic}_${peerIdStr}`, subscription);
return subscription;
@ -259,7 +259,7 @@ class Filter extends BaseProtocol implements IReceiver {
async createSubscription(
pubSubTopic?: string,
peerId?: PeerId,
peerId?: PeerId
): Promise<Subscription> {
const _pubSubTopic =
pubSubTopic ?? this.options.pubSubTopic ?? DefaultPubSubTopic;
@ -271,7 +271,7 @@ class Filter extends BaseProtocol implements IReceiver {
this.setActiveSubscription(
_pubSubTopic,
peer.id.toString(),
new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer)),
new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer))
);
return subscription;
@ -279,7 +279,7 @@ class Filter extends BaseProtocol implements IReceiver {
public toSubscriptionIterator<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions | undefined,
opts?: ProtocolOptions | undefined
): Promise<IAsyncIterator<T>> {
return toAsyncIterator(this, decoders, opts);
}
@ -302,7 +302,7 @@ class Filter extends BaseProtocol implements IReceiver {
async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
opts?: ProtocolOptions,
opts?: ProtocolOptions
): Promise<Unsubscribe> {
const subscription = await this.createSubscription(undefined, opts?.peerId);
@ -310,8 +310,8 @@ class Filter extends BaseProtocol implements IReceiver {
const contentTopics = Array.from(
groupByContentTopic(
Array.isArray(decoders) ? decoders : [decoders],
).keys(),
Array.isArray(decoders) ? decoders : [decoders]
).keys()
);
return async () => {
@ -341,7 +341,7 @@ class Filter extends BaseProtocol implements IReceiver {
const peerIdStr = streamData.connection.remotePeer.toString();
const subscription = this.getActiveSubscription(
pubsubTopic,
peerIdStr,
peerIdStr
);
if (!subscription) {
@ -357,7 +357,7 @@ class Filter extends BaseProtocol implements IReceiver {
},
(e) => {
log("Error with receiving pipe", e);
},
}
);
} catch (e) {
log("Error decoding message", e);
@ -366,7 +366,7 @@ class Filter extends BaseProtocol implements IReceiver {
}
export function wakuFilter(
init: Partial<ProtocolCreateOptions> = {},
init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => IFilter {
return (libp2p: Libp2p) => new Filter(libp2p, init);
}
@ -374,7 +374,7 @@ export function wakuFilter(
async function pushMessage<T extends IDecodedMessage>(
subscriptionCallback: SubscriptionCallback<T>,
pubSubTopic: PubSubTopic,
message: WakuMessage,
message: WakuMessage
): Promise<void> {
const { decoders, callback } = subscriptionCallback;
@ -388,7 +388,7 @@ async function pushMessage<T extends IDecodedMessage>(
const decodePromises = decoders.map((dec) =>
dec
.fromProtoObj(pubSubTopic, message as IProtoMessage)
.then((decoded) => decoded || Promise.reject("Decoding failed")),
.then((decoded) => decoded || Promise.reject("Decoding failed"))
);
const decodedMessage = await Promise.any(decodePromises);

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { IRelay } from "@waku/interfaces";
import type { KeepAliveOptions } from "@waku/interfaces";
import debug from "debug";
@ -44,7 +44,7 @@ export class KeepAliveManager {
if (relay && relayPeriodSecs !== 0) {
const encoder = createEncoder({
contentTopic: RelayPingContentTopic,
ephemeral: true,
ephemeral: true
});
const interval = setInterval(() => {
log("Sending Waku Relay ping message");
@ -73,7 +73,7 @@ export class KeepAliveManager {
public stopAll(): void {
for (const timer of [
...Object.values(this.pingKeepAliveTimers),
...Object.values(this.relayKeepAliveTimers),
...Object.values(this.relayKeepAliveTimers)
]) {
clearInterval(timer);
}

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import {
IEncoder,
ILightPush,
@ -7,7 +7,7 @@ import {
ProtocolCreateOptions,
ProtocolOptions,
SendError,
SendResult,
SendResult
} from "@waku/interfaces";
import { PushResponse } from "@waku/proto";
import { isSizeValid } from "@waku/utils";
@ -41,7 +41,7 @@ class LightPush extends BaseProtocol implements ILightPush {
async send(
encoder: IEncoder,
message: IMessage,
opts?: ProtocolOptions,
opts?: ProtocolOptions
): Promise<SendResult> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -56,7 +56,7 @@ class LightPush extends BaseProtocol implements ILightPush {
log("Failed to send waku light push: message is bigger that 1MB");
return {
recipients,
error: SendError.SIZE_TOO_BIG,
error: SendError.SIZE_TOO_BIG
};
}
@ -65,7 +65,7 @@ class LightPush extends BaseProtocol implements ILightPush {
log("Failed to encode to protoMessage, aborting push");
return {
recipients,
error: SendError.ENCODE_FAILED,
error: SendError.ENCODE_FAILED
};
}
const query = PushRpc.createRequest(protoMessage, pubSubTopic);
@ -74,7 +74,7 @@ class LightPush extends BaseProtocol implements ILightPush {
lp.encode,
stream,
lp.decode,
async (source) => await all(source),
async (source) => await all(source)
);
try {
const bytes = new Uint8ArrayList();
@ -100,13 +100,13 @@ class LightPush extends BaseProtocol implements ILightPush {
}
return {
error,
recipients,
recipients
};
}
}
export function wakuLightPush(
init: Partial<ProtocolCreateOptions> = {},
init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => ILightPush {
return (libp2p: Libp2p) => new LightPush(libp2p, init);
}

View File

@ -7,15 +7,15 @@ export class PushRpc {
static createRequest(
message: proto.WakuMessage,
pubSubTopic: string,
pubSubTopic: string
): PushRpc {
return new PushRpc({
requestId: uuid(),
request: {
message: message,
pubsubTopic: pubSubTopic,
pubsubTopic: pubSubTopic
},
response: undefined,
response: undefined
});
}

View File

@ -13,14 +13,14 @@ describe("Waku Message version 0", function () {
fc.uint8Array({ minLength: 1 }),
async (contentTopic, pubSubTopic, payload) => {
const encoder = createEncoder({
contentTopic,
contentTopic
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!,
protoResult!
)) as DecodedMessage;
expect(result.contentTopic).to.eq(contentTopic);
@ -29,8 +29,8 @@ describe("Waku Message version 0", function () {
expect(result.ephemeral).to.be.false;
expect(result.payload).to.deep.eq(payload);
expect(result.timestamp).to.not.be.undefined;
},
),
}
)
);
});
@ -43,19 +43,19 @@ describe("Waku Message version 0", function () {
async (contentTopic, pubSubTopic, payload) => {
const encoder = createEncoder({
contentTopic,
ephemeral: true,
ephemeral: true
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!,
protoResult!
)) as DecodedMessage;
expect(result.ephemeral).to.be.true;
},
),
}
)
);
});
@ -69,7 +69,7 @@ describe("Waku Message version 0", function () {
// Encode the length of the payload
// Not a relevant real life example
const metaSetter = (
msg: IProtoMessage & { meta: undefined },
msg: IProtoMessage & { meta: undefined }
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -80,14 +80,14 @@ describe("Waku Message version 0", function () {
const encoder = createEncoder({
contentTopic,
ephemeral: true,
metaSetter,
metaSetter
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!,
protoResult!
)) as DecodedMessage;
const expectedMeta = metaSetter({
@ -97,12 +97,12 @@ describe("Waku Message version 0", function () {
ephemeral: undefined,
meta: undefined,
rateLimitProof: undefined,
version: undefined,
version: undefined
});
expect(result.meta).to.deep.eq(expectedMeta);
},
),
}
)
);
});
});

View File

@ -6,7 +6,7 @@ import type {
IMessage,
IMetaSetter,
IProtoMessage,
IRateLimitProof,
IRateLimitProof
} from "@waku/interfaces";
import { proto_message as proto } from "@waku/proto";
import debug from "debug";
@ -20,7 +20,7 @@ export { proto };
export class DecodedMessage implements IDecodedMessage {
constructor(
public pubSubTopic: string,
protected proto: proto.WakuMessage,
protected proto: proto.WakuMessage
) {}
get ephemeral(): boolean {
@ -73,7 +73,7 @@ export class Encoder implements IEncoder {
constructor(
public contentTopic: string,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter,
public metaSetter?: IMetaSetter
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -94,7 +94,7 @@ export class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined,
rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral,
ephemeral: this.ephemeral
};
if (this.metaSetter) {
@ -118,7 +118,7 @@ export class Encoder implements IEncoder {
export function createEncoder({
contentTopic,
ephemeral,
metaSetter,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(contentTopic, ephemeral, metaSetter);
}
@ -140,13 +140,13 @@ export class Decoder implements IDecoder<DecodedMessage> {
timestamp: protoMessage.timestamp ?? undefined,
meta: protoMessage.meta ?? undefined,
rateLimitProof: protoMessage.rateLimitProof ?? undefined,
ephemeral: protoMessage.ephemeral ?? false,
ephemeral: protoMessage.ephemeral ?? false
});
}
async fromProtoObj(
pubSubTopic: string,
proto: IProtoMessage,
proto: IProtoMessage
): Promise<DecodedMessage | undefined> {
// https://rfc.vac.dev/spec/14/
// > If omitted, the value SHOULD be interpreted as version 0.
@ -155,7 +155,7 @@ export class Decoder implements IDecoder<DecodedMessage> {
"Failed to decode due to incorrect version, expected:",
Version,
", actual:",
proto.version,
proto.version
);
return Promise.resolve(undefined);
}

View File

@ -4,7 +4,7 @@ export const DefaultWantedNumber = 1;
export enum Fleet {
Prod = "prod",
Test = "test",
Test = "test"
}
/**
@ -19,7 +19,7 @@ export enum Fleet {
*/
export function getPredefinedBootstrapNodes(
fleet: Fleet = Fleet.Prod,
wantedNumber: number = DefaultWantedNumber,
wantedNumber: number = DefaultWantedNumber
): string[] {
if (wantedNumber <= 0) {
return [];
@ -51,8 +51,8 @@ export const fleets = {
"node-01.do-ams3.wakuv2.prod":
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmL5okWopX7NqZWBUKVqW8iUxCEmd5GMHLVPwCgzYzQv3e",
"node-01.gc-us-central1-a.wakuv2.prod":
"/dns4/node-01.gc-us-central1-a.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmVkKntsECaYfefR1V2yCR79CegLATuTPE6B9TxgxBiiiA",
},
"/dns4/node-01.gc-us-central1-a.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmVkKntsECaYfefR1V2yCR79CegLATuTPE6B9TxgxBiiiA"
}
},
"wakuv2.test": {
"waku-websocket": {
@ -61,8 +61,8 @@ export const fleets = {
"node-01.do-ams3.wakuv2.test":
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
"node-01.gc-us-central1-a.wakuv2.test":
"/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS",
},
},
},
"/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS"
}
}
}
};

View File

@ -1,7 +1,7 @@
export function pushOrInitMapSet<K, V>(
map: Map<K, Set<V>>,
key: K,
newValue: V,
newValue: V
): void {
let arr = map.get(key);
if (typeof arr === "undefined") {

View File

@ -6,7 +6,7 @@ const OneMillion = BigInt(1_000_000);
export enum PageDirection {
BACKWARD = "backward",
FORWARD = "forward",
FORWARD = "forward"
}
export interface Params {
@ -43,7 +43,7 @@ export class HistoryRpc {
const pagingInfo = {
pageSize: BigInt(params.pageSize),
cursor: params.cursor,
direction,
direction
} as proto.PagingInfo;
let startTime, endTime;
@ -63,9 +63,9 @@ export class HistoryRpc {
contentFilters,
pagingInfo,
startTime,
endTime,
endTime
},
response: undefined,
response: undefined
});
}
@ -80,7 +80,7 @@ export class HistoryRpc {
}
function directionToProto(
pageDirection: PageDirection,
pageDirection: PageDirection
): proto.PagingInfo.Direction {
switch (pageDirection) {
case PageDirection.BACKWARD:

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface/peer-id";
import { sha256 } from "@noble/hashes/sha256";
import {
Cursor,
@ -7,7 +7,7 @@ import {
IDecoder,
IStore,
Libp2p,
ProtocolCreateOptions,
ProtocolCreateOptions
} from "@waku/interfaces";
import { proto_store as proto } from "@waku/proto";
import { isDefined } from "@waku/utils";
@ -106,7 +106,7 @@ class Store extends BaseProtocol implements IStore {
async queryOrderedCallback<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: QueryOptions,
options?: QueryOptions
): Promise<void> {
let abort = false;
for await (const promises of this.queryGenerator(decoders, options)) {
@ -129,7 +129,7 @@ class Store extends BaseProtocol implements IStore {
if (msg && !abort) {
abort = Boolean(await callback(msg));
}
}),
})
);
}
}
@ -155,9 +155,9 @@ class Store extends BaseProtocol implements IStore {
async queryCallbackOnPromise<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (
message: Promise<T | undefined>,
message: Promise<T | undefined>
) => Promise<void | boolean> | boolean | void,
options?: QueryOptions,
options?: QueryOptions
): Promise<void> {
let abort = false;
let promises: Promise<void>[] = [];
@ -192,7 +192,7 @@ class Store extends BaseProtocol implements IStore {
*/
async *queryGenerator<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: QueryOptions,
options?: QueryOptions
): AsyncGenerator<Promise<T | undefined>[]> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -207,7 +207,7 @@ class Store extends BaseProtocol implements IStore {
decoders.forEach((dec) => {
if (decodersAsMap.has(dec.contentTopic)) {
throw new Error(
"API does not support different decoder per content topic",
"API does not support different decoder per content topic"
);
}
decodersAsMap.set(dec.contentTopic, dec);
@ -219,15 +219,15 @@ class Store extends BaseProtocol implements IStore {
{
pubSubTopic: pubSubTopic,
pageDirection: PageDirection.BACKWARD,
pageSize: DefaultPageSize,
pageSize: DefaultPageSize
},
options,
{ contentTopics, startTime, endTime },
{ contentTopics, startTime, endTime }
);
log("Querying history with the following options", {
...options,
peerId: options?.peerId?.toString(),
peerId: options?.peerId?.toString()
});
const peer = await this.getPeer(options?.peerId);
@ -236,7 +236,7 @@ class Store extends BaseProtocol implements IStore {
this.newStream.bind(this, peer),
queryOpts,
decodersAsMap,
options?.cursor,
options?.cursor
)) {
yield messages;
}
@ -247,14 +247,14 @@ async function* paginate<T extends IDecodedMessage>(
streamFactory: () => Promise<Stream>,
queryOpts: Params,
decoders: Map<string, IDecoder<T>>,
cursor?: Cursor,
cursor?: Cursor
): AsyncGenerator<Promise<T | undefined>[]> {
if (
queryOpts.contentTopics.toString() !==
Array.from(decoders.keys()).toString()
) {
throw new Error(
"Internal error, the decoders should match the query's content topics",
"Internal error, the decoders should match the query's content topics"
);
}
@ -267,7 +267,7 @@ async function* paginate<T extends IDecodedMessage>(
log(
"Querying store peer",
`for (${queryOpts.pubSubTopic})`,
queryOpts.contentTopics,
queryOpts.contentTopics
);
const stream = await streamFactory();
@ -277,7 +277,7 @@ async function* paginate<T extends IDecodedMessage>(
lp.encode,
stream,
lp.decode,
async (source) => await all(source),
async (source) => await all(source)
);
const bytes = new Uint8ArrayList();
@ -300,7 +300,7 @@ async function* paginate<T extends IDecodedMessage>(
if (!response.messages || !response.messages.length) {
log(
"Stopping pagination due to store `response.messages` field missing or empty",
"Stopping pagination due to store `response.messages` field missing or empty"
);
break;
}
@ -314,7 +314,7 @@ async function* paginate<T extends IDecodedMessage>(
if (decoder) {
return decoder.fromProtoObj(
queryOpts.pubSubTopic,
toProtoMessage(protoMsg),
toProtoMessage(protoMsg)
);
}
}
@ -326,7 +326,7 @@ async function* paginate<T extends IDecodedMessage>(
// If the server does not return cursor then there is an issue,
// Need to abort, or we end up in an infinite loop
log(
"Stopping pagination due to `response.pagingInfo.cursor` missing from store response",
"Stopping pagination due to `response.pagingInfo.cursor` missing from store response"
);
break;
}
@ -348,7 +348,7 @@ async function* paginate<T extends IDecodedMessage>(
export async function createCursor(
message: IDecodedMessage,
pubsubTopic: string = DefaultPubSubTopic,
pubsubTopic: string = DefaultPubSubTopic
): Promise<Cursor> {
if (
!message ||
@ -369,12 +369,12 @@ export async function createCursor(
digest,
pubsubTopic,
senderTime: messageTime,
receiverTime: messageTime,
receiverTime: messageTime
};
}
export function wakuStore(
init: Partial<ProtocolCreateOptions> = {},
init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => IStore {
return (libp2p: Libp2p) => new Store(libp2p, init);
}

View File

@ -7,7 +7,7 @@ describe("to proto message", () => {
it("Fields are not dropped", () => {
const wire: WakuMessageProto = {
payload: new Uint8Array(),
contentTopic: "foo",
contentTopic: "foo"
};
const protoMessage = toProtoMessage(wire);

View File

@ -8,7 +8,7 @@ const EmptyMessage: IProtoMessage = {
timestamp: undefined,
meta: undefined,
rateLimitProof: undefined,
ephemeral: undefined,
ephemeral: undefined
};
export function toProtoMessage(wire: WakuMessageProto): IProtoMessage {

View File

@ -1,4 +1,4 @@
import type { IdentifyResult } from "@libp2p/interface-libp2p";
import type { IdentifyResult } from "@libp2p/interface";
import type { IBaseProtocol, IRelay, Waku } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import debug from "debug";
@ -28,7 +28,7 @@ const log = debug("waku:wait-for-remote-peer");
export async function waitForRemotePeer(
waku: Waku,
protocols?: Protocols[],
timeoutMs?: number,
timeoutMs?: number
): Promise<void> {
protocols = protocols ?? getEnabledProtocols(waku);
@ -64,7 +64,7 @@ export async function waitForRemotePeer(
await rejectOnTimeout(
Promise.all(promises),
timeoutMs,
"Timed out waiting for a remote peer.",
"Timed out waiting for a remote peer."
);
} else {
await Promise.all(promises);
@ -114,7 +114,7 @@ const awaitTimeout = (ms: number, rejectReason: string): Promise<void> =>
async function rejectOnTimeout<T>(
promise: Promise<T>,
timeoutMs: number,
rejectReason: string,
rejectReason: string
): Promise<void> {
await Promise.race([promise, awaitTimeout(timeoutMs, rejectReason)]);
}

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection";
import { isPeerId, PeerId } from "@libp2p/interface-peer-id";
import type { Stream } from "@libp2p/interface/connection";
import { isPeerId, PeerId } from "@libp2p/interface/peer-id";
import { multiaddr, Multiaddr, MultiaddrInput } from "@multiformats/multiaddr";
import type {
IFilter,
@ -7,7 +7,7 @@ import type {
IRelay,
IStore,
Libp2p,
Waku,
Waku
} from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import debug from "debug";
@ -56,7 +56,7 @@ export class WakuNode implements Waku {
store?: (libp2p: Libp2p) => IStore,
lightPush?: (libp2p: Libp2p) => ILightPush,
filter?: (libp2p: Libp2p) => IFilter,
relay?: (libp2p: Libp2p) => IRelay,
relay?: (libp2p: Libp2p) => IRelay
) {
this.libp2p = libp2p;
@ -86,14 +86,14 @@ export class WakuNode implements Waku {
peerId,
libp2p,
{ pingKeepAlive, relayKeepAlive },
this.relay,
this.relay
);
log(
"Waku node created",
peerId,
`relay: ${!!this.relay}, store: ${!!this.store}, light push: ${!!this
.lightPush}, filter: ${!!this.filter}`,
.lightPush}, filter: ${!!this.filter}`
);
}
@ -105,7 +105,7 @@ export class WakuNode implements Waku {
*/
async dial(
peer: PeerId | MultiaddrInput,
protocols?: Protocols[],
protocols?: Protocols[]
): Promise<Stream> {
const _protocols = protocols ?? [];
const peerId = mapToPeerIdOrMultiaddr(peer);
@ -121,11 +121,11 @@ export class WakuNode implements Waku {
if (_protocols.includes(Protocols.Relay)) {
if (this.relay) {
this.relay.gossipSub.multicodecs.forEach((codec: string) =>
codecs.push(codec),
codecs.push(codec)
);
} else {
log(
"Relay codec not included in dial codec: protocol not mounted locally",
"Relay codec not included in dial codec: protocol not mounted locally"
);
}
}
@ -134,7 +134,7 @@ export class WakuNode implements Waku {
codecs.push(this.store.multicodec);
} else {
log(
"Store codec not included in dial codec: protocol not mounted locally",
"Store codec not included in dial codec: protocol not mounted locally"
);
}
}
@ -143,7 +143,7 @@ export class WakuNode implements Waku {
codecs.push(this.lightPush.multicodec);
} else {
log(
"Light Push codec not included in dial codec: protocol not mounted locally",
"Light Push codec not included in dial codec: protocol not mounted locally"
);
}
}
@ -152,7 +152,7 @@ export class WakuNode implements Waku {
codecs.push(this.filter.multicodec);
} else {
log(
"Filter codec not included in dial codec: protocol not mounted locally",
"Filter codec not included in dial codec: protocol not mounted locally"
);
}
}
@ -191,7 +191,7 @@ export class WakuNode implements Waku {
}
}
function mapToPeerIdOrMultiaddr(
peerId: PeerId | MultiaddrInput,
peerId: PeerId | MultiaddrInput
): PeerId | Multiaddr {
return isPeerId(peerId) ? peerId : multiaddr(peerId);
}

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"],
preprocessors: {
"src/**/!(node).spec.ts": ["webpack"],
"src/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
@ -14,8 +14,8 @@ module.exports = function (config) {
singleRun: true,
client: {
mocha: {
timeout: 6000, // Default is 2s
},
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
@ -27,31 +27,31 @@ module.exports = function (config) {
{
loader: "ts-loader",
options: {
configFile: "tsconfig.karma.json",
},
},
],
},
],
configFile: "tsconfig.karma.json"
}
}
]
}
]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js",
}),
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"],
},
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map",
},
devtool: "inline-source-map"
}
});
};

View File

@ -51,8 +51,6 @@
"node": ">=16"
},
"dependencies": {
"@libp2p/interface-peer-discovery": "^2.0.0",
"@libp2p/interfaces": "^3.3.2",
"@waku/enr": "0.0.16",
"@waku/utils": "0.0.10",
"debug": "^4.3.4",
@ -61,8 +59,6 @@
"uint8arrays": "^4.0.4"
},
"devDependencies": {
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/peer-id": "^2.0.4",
"@libp2p/peer-id-factory": "^2.0.4",
"@multiformats/multiaddr": "^12.0.0",

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -19,7 +19,7 @@ const singleBranch = `enrtree-branch:${branchDomainA}`;
const doubleBranch = `enrtree-branch:${branchDomainA},${branchDomainB}`;
const multiComponentBranch = [
`enrtree-branch:${branchDomainA},${partialBranchA}`,
`${partialBranchB},${branchDomainB}`,
`${partialBranchB},${branchDomainB}`
];
// Note: once td.when is asked to throw for an input it will always throw.
@ -72,7 +72,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(1);
@ -88,7 +88,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(0);
@ -102,7 +102,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
let peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(0);
@ -120,7 +120,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(0);
});
@ -129,7 +129,7 @@ describe("DNS Node Discovery", () => {
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]);
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(0);
});
@ -140,7 +140,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peersA = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peersA.length).to.eq(1);
@ -149,7 +149,7 @@ describe("DNS Node Discovery", () => {
mockDns.addThrow(`${branchDomainD}.${host}`);
const peersB = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peersB.length).to.eq(1);
expect(peersA[0].ip).to.eq(peersB[0].ip);
@ -169,12 +169,12 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1,
relay: 1
});
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
);
});
@ -184,12 +184,12 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1,
relay: 1,
relay: 1
});
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
);
});
@ -198,19 +198,19 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1,
store: 1
});
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd",
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
);
});
it("retrieves all peers (2) when cannot fulfill all requirements", async () => {
mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]);
mockDns.addRes(`${branchDomainA}.${host}`, [
mockData.enrWithWaku2RelayStore,
mockData.enrWithWaku2RelayStore
]);
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
@ -218,45 +218,45 @@ describe("DNS Node Discovery w/ capabilities", () => {
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1,
relay: 2,
filter: 1,
filter: 1
});
expect(peers.length).to.eq(2);
const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
);
expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
);
});
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch);
mockDns.addRes(`${branchDomainA}.${host}`, [
mockData.enrWithWaku2RelayStore,
mockData.enrWithWaku2RelayStore
]);
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
mockDns.addRes(`${partialBranchA}${partialBranchB}.${host}`, [
mockData.enrWithWaku2Store,
mockData.enrWithWaku2Store
]);
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 2,
relay: 2,
relay: 2
});
expect(peers.length).to.eq(3);
const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
);
expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
);
expect(peerIds).to.contain(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd",
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
);
});
});
@ -278,7 +278,7 @@ describe("DNS Node Discovery [live data]", function () {
relay: maxQuantity,
store: maxQuantity,
filter: maxQuantity,
lightPush: maxQuantity,
lightPush: maxQuantity
});
expect(peers.length).to.eq(maxQuantity);
@ -301,7 +301,7 @@ describe("DNS Node Discovery [live data]", function () {
relay: maxQuantity,
store: maxQuantity,
filter: maxQuantity,
lightPush: maxQuantity,
lightPush: maxQuantity
});
expect(peers.length).to.eq(maxQuantity);

View File

@ -6,7 +6,7 @@ import { DnsOverHttps } from "./dns_over_https.js";
import { ENRTree } from "./enrtree.js";
import {
fetchNodesUntilCapabilitiesFulfilled,
yieldNodesUntilCapabilitiesFulfilled,
yieldNodesUntilCapabilitiesFulfilled
} from "./fetch_nodes.js";
const log = debug("waku:discovery:dns");
@ -34,7 +34,7 @@ export class DnsNodeDiscovery {
private readonly _errorTolerance: number = 10;
public static async dnsOverHttp(
dnsClient?: DnsClient,
dnsClient?: DnsClient
): Promise<DnsNodeDiscovery> {
if (!dnsClient) {
dnsClient = await DnsOverHttps.create();
@ -51,29 +51,29 @@ export class DnsNodeDiscovery {
*/
async getPeers(
enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): Promise<IEnr[]> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
const context: SearchContext = {
domain,
publicKey,
visits: {},
visits: {}
};
const peers = await fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount,
this._errorTolerance,
() => this._search(domain, context),
() => this._search(domain, context)
);
log(
"retrieved peers: ",
peers.map((peer) => {
return {
id: peer.peerId?.toString(),
multiaddrs: peer.multiaddrs?.map((ma) => ma.toString()),
multiaddrs: peer.multiaddrs?.map((ma) => ma.toString())
};
}),
})
);
return peers;
}
@ -88,20 +88,20 @@ export class DnsNodeDiscovery {
*/
async *getNextPeer(
enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): AsyncGenerator<IEnr> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
const context: SearchContext = {
domain,
publicKey,
visits: {},
visits: {}
};
for await (const peer of yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount,
this._errorTolerance,
() => this._search(domain, context),
() => this._search(domain, context)
)) {
yield peer;
}
@ -113,7 +113,7 @@ export class DnsNodeDiscovery {
*/
private async _search(
subdomain: string,
context: SearchContext,
context: SearchContext
): Promise<ENR | null> {
try {
const entry = await this._getTXTRecord(subdomain, context);
@ -139,7 +139,7 @@ export class DnsNodeDiscovery {
}
} catch (error) {
log(
`Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`,
`Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`
);
return null;
}
@ -157,7 +157,7 @@ export class DnsNodeDiscovery {
*/
private async _getTXTRecord(
subdomain: string,
context: SearchContext,
context: SearchContext
): Promise<string> {
if (this._DNSTreeCache[subdomain]) {
return this._DNSTreeCache[subdomain];

View File

@ -18,7 +18,7 @@ export class DnsOverHttps implements DnsClient {
*/
public static async create(
endpoints?: Endpoint[],
retries?: number,
retries?: number
): Promise<DnsOverHttps> {
const _endpoints = endpoints ?? (await wellknown.endpoints("doh"));
@ -27,7 +27,7 @@ export class DnsOverHttps implements DnsClient {
private constructor(
private endpoints: Endpoint[],
private retries: number = 3,
private retries: number = 3
) {}
/**
@ -42,12 +42,12 @@ export class DnsOverHttps implements DnsClient {
try {
const res = await query(
{
question: { type: "TXT", name: domain },
question: { type: "TXT", name: domain }
},
{
endpoints: this.endpoints,
retries: this.retries,
},
retries: this.retries
}
);
answers = res.answers;
} catch (error) {

View File

@ -19,7 +19,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree root entry must start with 'enrtree-root:'",
"ENRTree root entry must start with 'enrtree-root:'"
);
}
});
@ -56,7 +56,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree tree entry must start with 'enrtree:'",
"ENRTree tree entry must start with 'enrtree:'"
);
}
});
@ -75,7 +75,7 @@ describe("ENRTree", () => {
const expected = [
"D2SNLTAGWNQ34NTQTPHNZDECFU",
"67BLTJEU5R2D5S3B4QKJSBRFCY",
"A2HDMZBB4JIU53VTEGC4TG6P4A",
"A2HDMZBB4JIU53VTEGC4TG6P4A"
];
const branches = ENRTree.parseBranch(dns.enrBranch);
@ -88,7 +88,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree branch entry must start with 'enrtree-branch:'",
"ENRTree branch entry must start with 'enrtree-branch:'"
);
}
});

View File

@ -29,7 +29,7 @@ export class ENRTree {
static parseAndVerifyRoot(root: string, publicKey: string): string {
if (!root.startsWith(this.ROOT_PREFIX))
throw new Error(
`ENRTree root entry must start with '${this.ROOT_PREFIX}'`,
`ENRTree root entry must start with '${this.ROOT_PREFIX}'`
);
const rootValues = ENRTree.parseRootValues(root);
@ -43,13 +43,13 @@ export class ENRTree {
const signedComponentBuffer = utf8ToBytes(signedComponent);
const signatureBuffer = fromString(rootValues.signature, "base64url").slice(
0,
64,
64
);
const isVerified = verifySignature(
signatureBuffer,
keccak256(signedComponentBuffer),
new Uint8Array(decodedPublicKey),
new Uint8Array(decodedPublicKey)
);
if (!isVerified) throw new Error("Unable to verify ENRTree root signature");
@ -59,7 +59,7 @@ export class ENRTree {
static parseRootValues(txt: string): ENRRootValues {
const matches = txt.match(
/^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/,
/^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/
);
if (!Array.isArray(matches))
@ -89,7 +89,7 @@ export class ENRTree {
static parseTree(tree: string): ENRTreeValues {
if (!tree.startsWith(this.TREE_PREFIX))
throw new Error(
`ENRTree tree entry must start with '${this.TREE_PREFIX}'`,
`ENRTree tree entry must start with '${this.TREE_PREFIX}'`
);
const matches = tree.match(/^enrtree:\/\/([^@]+)@(.+)$/);
@ -115,7 +115,7 @@ export class ENRTree {
static parseBranch(branch: string): string[] {
if (!branch.startsWith(this.BRANCH_PREFIX))
throw new Error(
`ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`,
`ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`
);
return branch.split(this.BRANCH_PREFIX)[1].split(",");

View File

@ -15,8 +15,8 @@ async function createEnr(waku2: Waku2): Promise<ENR> {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
)
];
enr.waku2 = waku2;
@ -27,7 +27,7 @@ const Waku2None = {
relay: false,
store: false,
filter: false,
lightPush: false,
lightPush: false
};
describe("Fetch nodes until capabilities are fulfilled", function () {
@ -39,7 +39,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ relay: 1 },
0,
getNode,
getNode
);
expect(res.length).to.eq(1);
@ -62,7 +62,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1 },
1,
getNode,
getNode
);
expect(res.length).to.eq(1);
@ -76,7 +76,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const relayStoreNode = await createEnr({
...Waku2None,
relay: true,
store: true,
store: true
});
const retrievedNodes = [relayNode1, relayNode2, relayNode3, relayStoreNode];
@ -91,7 +91,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1, relay: 2 },
1,
getNode,
getNode
);
expect(res.length).to.eq(3);
@ -108,7 +108,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ filter: 1, relay: 1 },
5,
getNode,
getNode
);
expect(res.length).to.eq(1);

View File

@ -13,13 +13,13 @@ const log = debug("waku:discovery:fetch_nodes");
export async function fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number,
getNode: () => Promise<IEnr | null>,
getNode: () => Promise<IEnr | null>
): Promise<IEnr[]> {
const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0,
store: wantedNodeCapabilityCount.store ?? 0,
filter: wantedNodeCapabilityCount.filter ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0
};
const maxSearches =
@ -29,7 +29,7 @@ export async function fetchNodesUntilCapabilitiesFulfilled(
relay: 0,
store: 0,
filter: 0,
lightPush: 0,
lightPush: 0
};
let totalSearches = 0;
@ -64,13 +64,13 @@ export async function fetchNodesUntilCapabilitiesFulfilled(
export async function* yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number,
getNode: () => Promise<IEnr | null>,
getNode: () => Promise<IEnr | null>
): AsyncGenerator<IEnr> {
const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0,
store: wantedNodeCapabilityCount.store ?? 0,
filter: wantedNodeCapabilityCount.filter ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0
};
const maxSearches =
@ -80,7 +80,7 @@ export async function* yieldNodesUntilCapabilitiesFulfilled(
relay: 0,
store: 0,
filter: 0,
lightPush: 0,
lightPush: 0
};
let totalSearches = 0;
@ -108,7 +108,7 @@ export async function* yieldNodesUntilCapabilitiesFulfilled(
function isSatisfied(
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount,
actual: NodeCapabilityCount
): boolean {
return (
actual.relay >= wanted.relay &&
@ -146,7 +146,7 @@ function addCapabilities(node: Waku2, total: NodeCapabilityCount): void {
function helpsSatisfyCapabilities(
node: Waku2,
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount,
actual: NodeCapabilityCount
): boolean {
if (isSatisfied(wanted, actual)) {
throw "Internal Error: Waku2 wanted capabilities are already fulfilled";
@ -168,12 +168,12 @@ function helpsSatisfyCapabilities(
*/
function missingCapabilities(
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount,
actual: NodeCapabilityCount
): Waku2 {
return {
relay: actual.relay < wanted.relay,
store: actual.store < wanted.store,
filter: actual.filter < wanted.filter,
lightPush: actual.lightPush < wanted.lightPush,
lightPush: actual.lightPush < wanted.lightPush
};
}

View File

@ -1,11 +1,11 @@
import { CustomEvent, EventEmitter } from "@libp2p/interface/events";
import type {
PeerDiscovery,
PeerDiscoveryEvents,
} from "@libp2p/interface-peer-discovery";
import { peerDiscovery as symbol } from "@libp2p/interface-peer-discovery";
import type { PeerInfo } from "@libp2p/interface-peer-info";
import type { PeerStore } from "@libp2p/interface-peer-store";
import { CustomEvent, EventEmitter } from "@libp2p/interfaces/events";
PeerDiscoveryEvents
} from "@libp2p/interface/peer-discovery";
import { peerDiscovery as symbol } from "@libp2p/interface/peer-discovery";
import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { PeerStore } from "@libp2p/interface/peer-store";
import type { IEnr } from "@waku/interfaces";
import debug from "debug";
@ -17,7 +17,7 @@ const log = debug("waku:peer-discovery-dns");
const enrTree = {
TEST: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@test.waku.nodes.status.im",
PROD: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@prod.waku.nodes.status.im",
PROD: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@prod.waku.nodes.status.im"
};
const DEFAULT_BOOTSTRAP_TAG_NAME = "bootstrap";
@ -93,7 +93,7 @@ export class PeerDiscoveryDns
this.nextPeer = dns.getNextPeer.bind(
dns,
enrUrls,
wantedNodeCapabilityCount,
wantedNodeCapabilityCount
);
}
@ -112,9 +112,9 @@ export class PeerDiscoveryDns
tags: {
[DEFAULT_BOOTSTRAP_TAG_NAME]: {
value: this._options.tagValue ?? DEFAULT_BOOTSTRAP_TAG_VALUE,
ttl: this._options.tagTTL ?? DEFAULT_BOOTSTRAP_TAG_TTL,
},
},
ttl: this._options.tagTTL ?? DEFAULT_BOOTSTRAP_TAG_TTL
}
}
};
let isPeerChanged = false;
@ -135,7 +135,7 @@ export class PeerDiscoveryDns
if (isPeerChanged) {
this.dispatchEvent(
new CustomEvent<PeerInfo>("peer", { detail: peerInfo }),
new CustomEvent<PeerInfo>("peer", { detail: peerInfo })
);
}
}
@ -159,7 +159,7 @@ export class PeerDiscoveryDns
export function wakuDnsDiscovery(
enrUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): (components: DnsDiscoveryComponents) => PeerDiscoveryDns {
return (components: DnsDiscoveryComponents) =>
new PeerDiscoveryDns(components, { enrUrls, wantedNodeCapabilityCount });

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"],
preprocessors: {
"src/**/*.ts": ["webpack"],
"src/**/*.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true,
client: {
mocha: {
timeout: 6000, // Default is 2s
},
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }],
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js",
}),
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"],
},
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map",
},
devtool: "inline-source-map"
}
});
};

View File

@ -61,8 +61,6 @@
"js-sha3": "^0.8.0"
},
"devDependencies": {
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/peer-id-factory": "^2.0.4",
"@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0",

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -1,4 +1,4 @@
import { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { ENRKey, ENRValue } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
@ -9,7 +9,7 @@ import { getPublicKeyFromPeerId } from "./peer_id.js";
export class EnrCreator {
static fromPublicKey(
publicKey: Uint8Array,
kvs: Record<ENRKey, ENRValue> = {},
kvs: Record<ENRKey, ENRValue> = {}
): Promise<ENR> {
// EIP-778 specifies that the key must be in compressed format, 33 bytes
if (publicKey.length !== 33) {
@ -18,13 +18,13 @@ export class EnrCreator {
return ENR.create({
...kvs,
id: utf8ToBytes("v4"),
secp256k1: publicKey,
secp256k1: publicKey
});
}
static async fromPeerId(
peerId: PeerId,
kvs: Record<ENRKey, ENRValue> = {},
kvs: Record<ENRKey, ENRValue> = {}
): Promise<ENR> {
switch (peerId.type) {
case "secp256k1":

View File

@ -12,15 +12,15 @@ import sha3 from "js-sha3";
*/
export async function sign(
message: Uint8Array,
privateKey: Uint8Array,
privateKey: Uint8Array
): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true,
der: false,
der: false
});
return concat(
[signature, new Uint8Array([recoveryId])],
signature.length + 1,
signature.length + 1
);
}
@ -42,7 +42,7 @@ export function compressPublicKey(publicKey: Uint8Array): Uint8Array {
export function verifySignature(
signature: Uint8Array,
message: Uint8Array | string,
publicKey: Uint8Array,
publicKey: Uint8Array
): boolean {
try {
const _signature = secp.Signature.fromCompact(signature.slice(0, 64));

View File

@ -10,7 +10,7 @@ export class EnrDecoder {
static fromString(encoded: string): Promise<ENR> {
if (!encoded.startsWith(ENR.RECORD_PREFIX)) {
throw new Error(
`"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`,
`"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`
);
}
return EnrDecoder.fromRLP(fromString(encoded.slice(4), "base64url"));
@ -64,7 +64,7 @@ function checkValues(values: Uint8Array[]): {
}
if (!seq || Array.isArray(seq)) {
throw new Error(
"Decoded ENR invalid sequence number: must be a byte array",
"Decoded ENR invalid sequence number: must be a byte array"
);
}
@ -75,7 +75,7 @@ function checkSignature(
seq: Uint8Array,
kvs: Uint8Array[],
enr: ENR,
signature: Uint8Array,
signature: Uint8Array
): void {
const rlpEncodedBytes = hexToBytes(RLP.encode([seq, ...kvs]));
if (!enr.verify(rlpEncodedBytes, signature)) {

View File

@ -9,7 +9,7 @@ import { ENR } from "./enr.js";
export class EnrEncoder {
static async toValues(
enr: ENR,
privateKey?: Uint8Array,
privateKey?: Uint8Array
): Promise<(ENRKey | ENRValue | number[])[]> {
// sort keys and flatten into [k, v, k, v, ...]
const content: Array<ENRKey | ENRValue | number[]> = Array.from(enr.keys())
@ -20,7 +20,7 @@ export class EnrEncoder {
content.unshift(new Uint8Array([Number(enr.seq)]));
if (privateKey) {
content.unshift(
await enr.sign(hexToBytes(RLP.encode(content)), privateKey),
await enr.sign(hexToBytes(RLP.encode(content)), privateKey)
);
} else {
if (!enr.signature) {
@ -33,7 +33,7 @@ export class EnrEncoder {
static async toBytes(enr: ENR, privateKey?: Uint8Array): Promise<Uint8Array> {
const encoded = hexToBytes(
RLP.encode(await EnrEncoder.toValues(enr, privateKey)),
RLP.encode(await EnrEncoder.toValues(enr, privateKey))
);
if (encoded.length >= MAX_RECORD_SIZE) {
throw new Error("ENR must be less than 300 bytes");

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
import { multiaddr } from "@multiformats/multiaddr";
import * as secp from "@noble/secp256k1";
@ -14,7 +14,7 @@ import { EnrEncoder } from "./encoder.js";
import {
ENR,
TransportProtocol,
TransportProtocolPerIpVersion,
TransportProtocolPerIpVersion
} from "./enr.js";
import { getPrivateKeyFromPeerId } from "./peer_id.js";
@ -29,15 +29,15 @@ describe("ENR", function () {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
)
];
enr.waku2 = {
relay: true,
store: false,
filter: true,
lightPush: false,
lightPush: false
};
const txt = await EnrEncoder.toString(enr, privateKey);
@ -53,19 +53,19 @@ describe("ENR", function () {
expect(enr2.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr2.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss",
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss",
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
);
expect(enr2.waku2).to.deep.equal({
relay: true,
store: false,
filter: true,
lightPush: false,
lightPush: false
});
});
@ -87,13 +87,13 @@ describe("ENR", function () {
expect(enr.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss",
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
);
});
@ -107,7 +107,7 @@ describe("ENR", function () {
expect(enr.ip).to.be.equal("134.209.139.210");
expect(enr.publicKey).to.not.be.undefined;
expect(enr.peerId?.toString()).to.be.equal(
"16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
"16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ"
);
});
@ -159,7 +159,7 @@ describe("ENR", function () {
const enr = await ENR.create(
{ id: utf8ToBytes("v3") },
BigInt(0),
new Uint8Array(),
new Uint8Array()
);
enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here");
@ -174,7 +174,7 @@ describe("ENR", function () {
const enr = await ENR.create(
{ id: utf8ToBytes("v4") },
BigInt(0),
new Uint8Array(),
new Uint8Array()
);
enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here");
@ -200,7 +200,7 @@ describe("ENR", function () {
beforeEach(async function () {
const seq = BigInt(1);
privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291",
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
);
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
record.setLocationMultiaddr(multiaddr("/ip4/127.0.0.1/udp/30303"));
@ -210,7 +210,7 @@ describe("ENR", function () {
it("should properly compute the node id", () => {
expect(record.nodeId).to.equal(
"a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7",
"a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7"
);
});
@ -245,7 +245,7 @@ describe("ENR", function () {
beforeEach(async () => {
privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291",
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
);
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
});
@ -262,14 +262,14 @@ describe("ENR", function () {
record.set("udp", tuples0[1][1]);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(),
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
).to.equal(multi0.toString());
// set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/udp/30300");
record.setLocationMultiaddr(multi1);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(),
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
).to.equal(multi1.toString());
// and get the underlying records
const tuples1 = multi1.tuples();
@ -290,14 +290,14 @@ describe("ENR", function () {
record.set("tcp", tuples0[1][1]);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(),
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
).to.equal(multi0.toString());
// set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/tcp/30300");
record.setLocationMultiaddr(multi1);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(),
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
).to.equal(multi1.toString());
// and get the underlying records
const tuples1 = multi1.tuples();
@ -312,7 +312,7 @@ describe("ENR", function () {
const tcp = 8080;
const udp = 8080;
const wsMultiaddr = multiaddr(
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss",
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss"
);
let peerId: PeerId;
let enr: ENR;
@ -331,43 +331,43 @@ describe("ENR", function () {
it("should properly create location multiaddrs - udp4", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4),
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4)
).to.deep.equal(multiaddr(`/ip4/${ip4}/udp/${udp}`));
});
it("should properly create location multiaddrs - tcp4", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4),
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4)
).to.deep.equal(multiaddr(`/ip4/${ip4}/tcp/${tcp}`));
});
it("should properly create location multiaddrs - udp6", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6),
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6)
).to.deep.equal(multiaddr(`/ip6/${ip6}/udp/${udp}`));
});
it("should properly create location multiaddrs - tcp6", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6),
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6)
).to.deep.equal(multiaddr(`/ip6/${ip6}/tcp/${tcp}`));
});
it("should properly create location multiaddrs - udp", () => {
// default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`),
multiaddr(`/ip4/${ip4}/udp/${udp}`)
);
// if ip6 is set, use it
enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/udp/${udp}`),
multiaddr(`/ip6/${ip6}/udp/${udp}`)
);
// if ip6 does not exist, use ip4
enr.ip6 = undefined;
enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`),
multiaddr(`/ip4/${ip4}/udp/${udp}`)
);
enr.ip6 = ip6;
});
@ -375,18 +375,18 @@ describe("ENR", function () {
it("should properly create location multiaddrs - tcp", () => {
// default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`),
multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
);
// if ip6 is set, use it
enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`),
multiaddr(`/ip6/${ip6}/tcp/${tcp}`)
);
// if ip6 does not exist, use ip4
enr.ip6 = undefined;
enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`),
multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
);
enr.ip6 = ip6;
});
@ -397,19 +397,19 @@ describe("ENR", function () {
expect(peerInfo.id.toString()).to.equal(peerId.toString());
expect(peerInfo.multiaddrs.length).to.equal(5);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString(),
multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString()
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString(),
multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString()
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/udp/${udp}`).toString(),
multiaddr(`/ip4/${ip4}/udp/${udp}`).toString()
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/udp/${udp}`).toString(),
multiaddr(`/ip6/${ip6}/udp/${udp}`).toString()
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
wsMultiaddr.toString(),
wsMultiaddr.toString()
);
});
});
@ -428,7 +428,7 @@ describe("ENR", function () {
relay: false,
store: false,
filter: false,
lightPush: false,
lightPush: false
};
});

View File

@ -1,12 +1,12 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Multiaddr } from "@multiformats/multiaddr";
import type {
ENRKey,
ENRValue,
IEnr,
NodeId,
SequenceNumber,
SequenceNumber
} from "@waku/interfaces";
import debug from "debug";
@ -21,13 +21,13 @@ const log = debug("waku:enr");
export enum TransportProtocol {
TCP = "tcp",
UDP = "udp",
UDP = "udp"
}
export enum TransportProtocolPerIpVersion {
TCP4 = "tcp4",
UDP4 = "udp4",
TCP6 = "tcp6",
UDP6 = "udp6",
UDP6 = "udp6"
}
export class ENR extends RawEnr implements IEnr {
@ -37,7 +37,7 @@ export class ENR extends RawEnr implements IEnr {
static async create(
kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1),
signature?: Uint8Array,
signature?: Uint8Array
): Promise<ENR> {
const enr = new ENR(kvs, seq, signature);
try {
@ -61,7 +61,7 @@ export class ENR extends RawEnr implements IEnr {
}
}
getLocationMultiaddr: (
protocol: TransportProtocol | TransportProtocolPerIpVersion,
protocol: TransportProtocol | TransportProtocolPerIpVersion
) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this);
setLocationMultiaddr(multiaddr: Multiaddr): void {
@ -93,7 +93,7 @@ export class ENR extends RawEnr implements IEnr {
for (const protocol of Object.values(TransportProtocolPerIpVersion)) {
const ma = this.getLocationMultiaddr(
protocol as TransportProtocolPerIpVersion,
protocol as TransportProtocolPerIpVersion
);
if (ma) multiaddrs.push(ma);
}
@ -109,7 +109,7 @@ export class ENR extends RawEnr implements IEnr {
return {
id,
multiaddrs: this.getAllLocationMultiaddrs(),
protocols: [],
protocols: []
};
}
@ -122,7 +122,7 @@ export class ENR extends RawEnr implements IEnr {
* @param protocol
*/
getFullMultiaddr(
protocol: TransportProtocol | TransportProtocolPerIpVersion,
protocol: TransportProtocol | TransportProtocolPerIpVersion
): Multiaddr | undefined {
if (this.peerId) {
const locationMultiaddr = this.getLocationMultiaddr(protocol);

View File

@ -5,7 +5,7 @@ import { multiaddrFromFields } from "./multiaddr_from_fields.js";
export function locationMultiaddrFromEnrFields(
enr: IEnr,
protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6",
protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6"
): Multiaddr | undefined {
switch (protocol) {
case "udp":
@ -42,6 +42,6 @@ export function locationMultiaddrFromEnrFields(
isIpv6 ? "ip6" : "ip4",
protoName,
ipVal,
protoVal,
protoVal
);
}

View File

@ -6,12 +6,12 @@ export function multiaddrFromFields(
ipFamily: string,
protocol: string,
ipBytes: Uint8Array,
protocolBytes: Uint8Array,
protocolBytes: Uint8Array
): Multiaddr {
let ma = multiaddr("/" + ipFamily + "/" + convertToString(ipFamily, ipBytes));
ma = ma.encapsulate(
multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes)),
multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes))
);
return ma;

View File

@ -8,11 +8,11 @@ describe("ENR multiaddrs codec", function () {
const multiaddrs = [
multiaddr("/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
)
];
const bytes = encodeMultiaddrs(multiaddrs);
@ -20,13 +20,13 @@ describe("ENR multiaddrs codec", function () {
const multiaddrsAsStr = result.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss",
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
);
});
});

View File

@ -12,7 +12,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
const sizeDataView = new DataView(
bytes.buffer,
index,
MULTIADDR_LENGTH_SIZE,
MULTIADDR_LENGTH_SIZE
);
const size = sizeDataView.getUint16(0);
index += MULTIADDR_LENGTH_SIZE;
@ -28,7 +28,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
export function encodeMultiaddrs(multiaddrs: Multiaddr[]): Uint8Array {
const totalLength = multiaddrs.reduce(
(acc, ma) => acc + MULTIADDR_LENGTH_SIZE + ma.bytes.length,
0,
0
);
const bytes = new Uint8Array(totalLength);
const dataView = new DataView(bytes.buffer);

View File

@ -1,10 +1,10 @@
import { unmarshalPrivateKey, unmarshalPublicKey } from "@libp2p/crypto/keys";
import { supportedKeys } from "@libp2p/crypto/keys";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import { peerIdFromKeys } from "@libp2p/peer-id";
export function createPeerIdFromPublicKey(
publicKey: Uint8Array,
publicKey: Uint8Array
): Promise<PeerId> {
const _publicKey = new supportedKeys.secp256k1.Secp256k1PublicKey(publicKey);
return peerIdFromKeys(_publicKey.bytes, undefined);
@ -20,7 +20,7 @@ export function getPublicKeyFromPeerId(peerId: PeerId): Uint8Array {
// Only used in tests
export async function getPrivateKeyFromPeerId(
peerId: PeerId,
peerId: PeerId
): Promise<Uint8Array> {
if (peerId.type !== "secp256k1") {
throw new Error("Unsupported peer id type");

View File

@ -1,7 +1,7 @@
import type { Multiaddr } from "@multiformats/multiaddr";
import {
convertToBytes,
convertToString,
convertToString
} from "@multiformats/multiaddr/convert";
import type { ENRKey, ENRValue, SequenceNumber, Waku2 } from "@waku/interfaces";
import { bytesToUtf8 } from "@waku/utils/bytes";
@ -17,7 +17,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
protected constructor(
kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1),
signature?: Uint8Array,
signature?: Uint8Array
) {
super(Object.entries(kvs));
this.seq = seq;
@ -147,7 +147,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
this,
"waku2",
waku2,
(w) => new Uint8Array([encodeWaku2(w)]),
(w) => new Uint8Array([encodeWaku2(w)])
);
}
}
@ -155,7 +155,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
function getStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
proto: string
): string | undefined {
const raw = map.get(key);
if (!raw) return;
@ -165,7 +165,7 @@ function getStringValue(
function getNumberAsStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
proto: string
): number | undefined {
const raw = map.get(key);
if (!raw) return;
@ -176,7 +176,7 @@ function setStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
value: string | undefined,
value: string | undefined
): void {
deleteUndefined(map, key, value, convertToBytes.bind({}, proto));
}
@ -185,7 +185,7 @@ function setNumberAsStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
value: number | undefined,
value: number | undefined
): void {
setStringValue(map, key, proto, value?.toString(10));
}
@ -194,7 +194,7 @@ function deleteUndefined<K, V, W>(
map: Map<K, W>,
key: K,
value: V | undefined,
transform: (v: V) => W,
transform: (v: V) => W
): void {
if (value !== undefined) {
map.set(key, transform(value));

View File

@ -5,10 +5,10 @@ import { bytesToHex } from "@waku/utils/bytes";
import { keccak256 } from "./crypto.js";
export async function sign(
privKey: Uint8Array,
msg: Uint8Array,
msg: Uint8Array
): Promise<Uint8Array> {
return secp.sign(keccak256(msg), privKey, {
der: false,
der: false
});
}

View File

@ -11,7 +11,7 @@ const waku2FieldEncodings = {
allTrue: 15,
allFalse: 0,
relayAndFilterTrue: 5,
storeAndLightPushTrue: 10,
storeAndLightPushTrue: 10
};
describe("ENR waku2 codec", function () {
@ -22,7 +22,7 @@ describe("ENR waku2 codec", function () {
relay: false,
store: false,
filter: false,
lightPush: false,
lightPush: false
};
});

View File

@ -19,7 +19,7 @@ export function decodeWaku2(byte: number): Waku2 {
relay: false,
store: false,
filter: false,
lightPush: false,
lightPush: false
};
if (byte % 2) waku2.relay = true;

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -47,19 +47,12 @@
"node": ">=16"
},
"devDependencies": {
"@chainsafe/libp2p-gossipsub": "^9.1.0",
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@chainsafe/libp2p-gossipsub": "^10.0.0",
"@multiformats/multiaddr": "^12.0.0",
"cspell": "^7.0.0",
"npm-run-all": "^4.1.5",
"typescript": "^5.0.4",
"libp2p": "^0.45.9"
"libp2p": "^0.46.3"
},
"typedoc": {
"entryPoint": "./src/index.ts"

View File

@ -1,10 +1,10 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { Peer } from "@libp2p/interface-peer-store";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer } from "@libp2p/interface/peer-store";
import type { EventEmitter } from "@libp2p/interfaces/events";
export enum Tags {
BOOTSTRAP = "bootstrap",
PEER_EXCHANGE = "peer-exchange",
PEER_EXCHANGE = "peer-exchange"
}
export interface ConnectionManagerOptions {
@ -28,7 +28,7 @@ export enum EPeersByDiscoveryEvents {
PEER_DISCOVERY_BOOTSTRAP = "peer:discovery:bootstrap",
PEER_DISCOVERY_PEER_EXCHANGE = "peer:discovery:peer-exchange",
PEER_CONNECT_BOOTSTRAP = "peer:connected:bootstrap",
PEER_CONNECT_PEER_EXCHANGE = "peer:connected:peer-exchange",
PEER_CONNECT_PEER_EXCHANGE = "peer:connected:peer-exchange"
}
export interface IPeersByDiscoveryEvents {

View File

@ -1,5 +1,5 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Multiaddr } from "@multiformats/multiaddr";
export type ENRKey = string;

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { IDecodedMessage, IDecoder } from "./message.js";
import type { ContentTopic } from "./misc.js";
@ -12,7 +12,7 @@ export type ContentFilter = {
export interface IFilterSubscription {
subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
callback: Callback<T>
): Promise<void>;
unsubscribe(contentTopics: ContentTopic[]): Promise<void>;
@ -26,6 +26,6 @@ export type IFilter = IReceiver &
IBaseProtocol & {
createSubscription(
pubSubTopic?: string,
peerId?: PeerId,
peerId?: PeerId
): Promise<IFilterSubscription>;
};

View File

@ -1,5 +1,5 @@
import type { GossipSub } from "@chainsafe/libp2p-gossipsub";
import type { Libp2p as BaseLibp2p } from "@libp2p/interface-libp2p";
import type { Libp2p as BaseLibp2p } from "@libp2p/interface";
import type { Libp2pInit } from "libp2p";
import type { identifyService } from "libp2p/identify";
import type { PingService } from "libp2p/ping";

View File

@ -73,6 +73,6 @@ export interface IDecoder<T extends IDecodedMessage> {
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
fromProtoObj: (
pubSubTopic: string,
proto: IProtoMessage,
proto: IProtoMessage
) => Promise<T | undefined>;
}

View File

@ -1,6 +1,6 @@
import type { ConnectionManager } from "@libp2p/interface-connection-manager";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { PeerStore } from "@libp2p/interface-peer-store";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerStore } from "@libp2p/interface/peer-store";
import type { ConnectionManager } from "@libp2p/interface-internal/connection-manager";
import { IEnr } from "./enr.js";
import { IBaseProtocol } from "./protocols.js";

View File

@ -1,6 +1,6 @@
import type { Libp2p } from "@libp2p/interface-libp2p";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { Peer, PeerStore } from "@libp2p/interface-peer-store";
import type { Libp2p } from "@libp2p/interface";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer, PeerStore } from "@libp2p/interface/peer-store";
import type { Libp2pOptions } from "libp2p";
import type { IDecodedMessage } from "./message.js";
@ -9,7 +9,7 @@ export enum Protocols {
Relay = "relay",
Store = "store",
LightPush = "lightpush",
Filter = "filter",
Filter = "filter"
}
export interface IBaseProtocol {
@ -62,7 +62,7 @@ export type ProtocolOptions = {
};
export type Callback<T extends IDecodedMessage> = (
msg: T,
msg: T
) => void | Promise<void>;
export enum SendError {
@ -70,7 +70,7 @@ export enum SendError {
ENCODE_FAILED = "Failed to encode",
DECODE_FAILED = "Failed to decode",
SIZE_TOO_BIG = "Size is too big",
NO_RPC_RESPONSE = "No RPC response",
NO_RPC_RESPONSE = "No RPC response"
}
export interface SendResult {

View File

@ -9,11 +9,11 @@ export type ActiveSubscriptions = Map<PubSubTopic, ContentTopic[]>;
export interface IReceiver {
toSubscriptionIterator: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions,
opts?: ProtocolOptions
) => Promise<IAsyncIterator<T>>;
subscribe: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
opts?: ProtocolOptions,
opts?: ProtocolOptions
) => Unsubscribe | Promise<Unsubscribe>;
}

View File

@ -11,7 +11,7 @@ import type { ISender } from "./sender.js";
* @property start - Function to start the relay, returning a Promise that resolves when initialization is complete.
* @property getMeshPeers - Function to retrieve the mesh peers for a given topic or all topics if none is specified. Returns an array of peer IDs as strings.
*/
interface IRelayAPI {
export interface IRelayAPI {
readonly gossipSub: GossipSub;
start: () => Promise<void>;
getMeshPeers: (topic?: TopicStr) => PeerIdStr[];

View File

@ -5,6 +5,6 @@ export interface ISender {
send: (
encoder: IEncoder,
message: IMessage,
opts?: ProtocolOptions,
opts?: ProtocolOptions
) => Promise<SendResult>;
}

View File

@ -3,7 +3,7 @@ import type { IBaseProtocol, ProtocolOptions } from "./protocols.js";
export enum PageDirection {
BACKWARD = "backward",
FORWARD = "forward",
FORWARD = "forward"
}
export interface TimeFilter {
@ -49,17 +49,17 @@ export interface IStore extends IBaseProtocol {
queryOrderedCallback: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions,
options?: StoreQueryOptions
) => Promise<void>;
queryCallbackOnPromise: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (
message: Promise<T | undefined>,
message: Promise<T | undefined>
) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions,
options?: StoreQueryOptions
) => Promise<void>;
queryGenerator: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: StoreQueryOptions,
options?: StoreQueryOptions
) => AsyncGenerator<Promise<T | undefined>[]>;
}

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection";
import type { PeerId } from "@libp2p/interface-peer-id";
import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface/peer-id";
import type { Multiaddr } from "@multiformats/multiaddr";
import { IConnectionManager } from "./connection_manager.js";

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -10,7 +10,7 @@ Symmetric encryption uses a unique key to encrypt and decrypt messages.
import {
createDecoder,
createEncoder,
generateSymmetricKey,
generateSymmetricKey
} from "@waku/message-encryption/symmetric";
// Generate a random key
@ -40,7 +40,7 @@ import {
createDecoder,
createEncoder,
generatePrivateKey,
getPublicKey,
getPublicKey
} from "@waku/message-encryption/ecies";
// Generate a random private key

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"],
preprocessors: {
"src/**/*.ts": ["webpack"],
"src/**/*.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true,
client: {
mocha: {
timeout: 6000, // Default is 2s
},
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }],
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js",
}),
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"],
},
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map",
},
devtool: "inline-source-map"
}
});
};

View File

@ -80,12 +80,6 @@
"js-sha3": "^0.8.0"
},
"devDependencies": {
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.0.2",

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -2,9 +2,9 @@ export const Symmetric = {
keySize: 32,
ivSize: 12,
tagSize: 16,
algorithm: { name: "AES-GCM", length: 128 },
algorithm: { name: "AES-GCM", length: 128 }
};
export const Asymmetric = {
keySize: 32,
keySize: 32
};

View File

@ -13,7 +13,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const counters = new Uint8Array([ctr >> 24, ctr >> 16, ctr >> 8, ctr]);
const countersSecret = concat(
[counters, secret],
counters.length + secret.length,
counters.length + secret.length
);
const willBeHashResult = sha256(countersSecret);
willBeResult = willBeResult.then((result) =>
@ -21,9 +21,9 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const _hashResult = new Uint8Array(hashResult);
return concat(
[result, _hashResult],
result.length + _hashResult.length,
result.length + _hashResult.length
);
}),
})
);
written += 32;
ctr += 1;
@ -34,7 +34,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
function aesCtrEncrypt(
counter: Uint8Array,
key: ArrayBufferLike,
data: ArrayBufferLike,
data: ArrayBufferLike
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["encrypt"])
@ -42,8 +42,8 @@ function aesCtrEncrypt(
getSubtle().encrypt(
{ name: "AES-CTR", counter: counter, length: 128 },
cryptoKey,
data,
),
data
)
)
.then((bytes) => new Uint8Array(bytes));
}
@ -51,7 +51,7 @@ function aesCtrEncrypt(
function aesCtrDecrypt(
counter: Uint8Array,
key: ArrayBufferLike,
data: ArrayBufferLike,
data: ArrayBufferLike
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["decrypt"])
@ -59,15 +59,15 @@ function aesCtrDecrypt(
getSubtle().decrypt(
{ name: "AES-CTR", counter: counter, length: 128 },
cryptoKey,
data,
),
data
)
)
.then((bytes) => new Uint8Array(bytes));
}
function hmacSha256Sign(
key: ArrayBufferLike,
msg: ArrayBufferLike,
msg: ArrayBufferLike
): PromiseLike<Uint8Array> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
return getSubtle()
@ -79,12 +79,12 @@ function hmacSha256Sign(
function hmacSha256Verify(
key: ArrayBufferLike,
msg: ArrayBufferLike,
sig: ArrayBufferLike,
sig: ArrayBufferLike
): Promise<boolean> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
const _key = getSubtle().importKey("raw", key, algorithm, false, ["verify"]);
return _key.then((cryptoKey) =>
getSubtle().verify(algorithm, cryptoKey, sig, msg),
getSubtle().verify(algorithm, cryptoKey, sig, msg)
);
}
@ -99,11 +99,11 @@ function hmacSha256Verify(
function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
if (privateKeyA.length !== 32) {
throw new Error(
`Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`,
`Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`
);
} else if (publicKeyB.length !== 65) {
throw new Error(
`Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`,
`Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`
);
} else if (publicKeyB[0] !== 4) {
throw new Error("Bad public key, a valid public key would begin with 4");
@ -123,7 +123,7 @@ function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
*/
export async function encrypt(
publicKeyTo: Uint8Array,
msg: Uint8Array,
msg: Uint8Array
): Promise<Uint8Array> {
const ephemPrivateKey = randomBytes(32);
@ -143,7 +143,7 @@ export async function encrypt(
return concat(
[ephemPublicKey, ivCipherText, hmac],
ephemPublicKey.length + ivCipherText.length + hmac.length,
ephemPublicKey.length + ivCipherText.length + hmac.length
);
}
@ -159,15 +159,15 @@ const metaLength = 1 + 64 + 16 + 32;
*/
export async function decrypt(
privateKey: Uint8Array,
encrypted: Uint8Array,
encrypted: Uint8Array
): Promise<Uint8Array> {
if (encrypted.length <= metaLength) {
throw new Error(
`Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`,
`Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`
);
} else if (encrypted[0] !== 4) {
throw new Error(
`Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`,
`Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`
);
} else {
// deserialize
@ -182,7 +182,7 @@ export async function decrypt(
const px = derive(privateKey, ephemPublicKey);
const hash = await kdf(px, 32);
const [encryptionKey, macKey] = await sha256(hash.slice(16)).then(
(macKey) => [hash.slice(0, 16), macKey],
(macKey) => [hash.slice(0, 16), macKey]
);
if (!(await hmacSha256Verify(macKey, cipherAndIv, msgMac))) {

View File

@ -9,7 +9,7 @@ import { Asymmetric, Symmetric } from "../constants.js";
declare const self: Record<string, any> | undefined;
const crypto: { node?: any; web?: any } = {
node: nodeCrypto,
web: typeof self === "object" && "crypto" in self ? self.crypto : undefined,
web: typeof self === "object" && "crypto" in self ? self.crypto : undefined
};
export function getSubtle(): SubtleCrypto {
@ -19,7 +19,7 @@ export function getSubtle(): SubtleCrypto {
return crypto.node.webcrypto.subtle;
} else {
throw new Error(
"The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)",
"The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)"
);
}
}
@ -59,15 +59,15 @@ export const getPublicKey = secp.getPublicKey;
*/
export async function sign(
message: Uint8Array,
privateKey: Uint8Array,
privateKey: Uint8Array
): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true,
der: false,
der: false
});
return concat(
[signature, new Uint8Array([recoveryId])],
signature.length + 1,
signature.length + 1
);
}

View File

@ -5,12 +5,12 @@ import { getSubtle, randomBytes } from "./index.js";
export async function encrypt(
iv: Uint8Array,
key: Uint8Array,
clearText: Uint8Array,
clearText: Uint8Array
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["encrypt"])
.then((cryptoKey) =>
getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText),
getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText)
)
.then((cipher) => new Uint8Array(cipher));
}
@ -18,16 +18,12 @@ export async function encrypt(
export async function decrypt(
iv: Uint8Array,
key: Uint8Array,
cipherText: Uint8Array,
cipherText: Uint8Array
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["decrypt"])
.then((cryptoKey) =>
getSubtle().decrypt(
{ iv, ...Symmetric.algorithm },
cryptoKey,
cipherText,
),
getSubtle().decrypt({ iv, ...Symmetric.algorithm }, cryptoKey, cipherText)
)
.then((clear) => new Uint8Array(clear));
}

View File

@ -1,6 +1,6 @@
import {
DecodedMessage as DecodedMessageV0,
proto,
proto
} from "@waku/core/lib/message/version_0";
import type { IDecodedMessage } from "@waku/interfaces";
@ -15,7 +15,7 @@ export class DecodedMessage
proto: proto.WakuMessage,
decodedPayload: Uint8Array,
public signature?: Uint8Array,
public signaturePublicKey?: Uint8Array,
public signaturePublicKey?: Uint8Array
) {
super(pubSubTopic, proto);
this._decodedPayload = decodedPayload;

View File

@ -18,7 +18,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({
contentTopic,
publicKey,
publicKey
});
const bytes = await encoder.toWire({ payload });
@ -34,8 +34,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined;
},
),
}
)
);
});
@ -54,7 +54,7 @@ describe("Ecies Encryption", function () {
contentTopic,
payload,
alicePrivateKey,
bobPrivateKey,
bobPrivateKey
) => {
const alicePublicKey = getPublicKey(alicePrivateKey);
const bobPublicKey = getPublicKey(bobPrivateKey);
@ -62,7 +62,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({
contentTopic,
publicKey: bobPublicKey,
sigPrivKey: alicePrivateKey,
sigPrivKey: alicePrivateKey
});
const bytes = await encoder.toWire({ payload });
@ -78,8 +78,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(alicePublicKey);
},
),
}
)
);
});
@ -93,7 +93,7 @@ describe("Ecies Encryption", function () {
async (pubSubTopic, contentTopic, payload, privateKey) => {
const publicKey = getPublicKey(privateKey);
const metaSetter = (
msg: IProtoMessage & { meta: undefined },
msg: IProtoMessage & { meta: undefined }
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -104,7 +104,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({
contentTopic,
publicKey,
metaSetter,
metaSetter
});
const bytes = await encoder.toWire({ payload });
@ -121,12 +121,12 @@ describe("Ecies Encryption", function () {
ephemeral: undefined,
meta: undefined,
rateLimitProof: undefined,
version: undefined,
version: undefined
});
expect(result.meta).to.deep.equal(expectedMeta);
},
),
}
)
);
});
});
@ -136,7 +136,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void {
createEncoder({
contentTopic: undefined as unknown as string,
publicKey: new Uint8Array(),
publicKey: new Uint8Array()
});
};

View File

@ -5,7 +5,7 @@ import type {
IDecoder,
IEncoder,
IMessage,
IProtoMessage,
IProtoMessage
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import debug from "debug";
@ -15,14 +15,14 @@ import {
decryptAsymmetric,
encryptAsymmetric,
postCipher,
preCipher,
preCipher
} from "./waku_payload.js";
import {
generatePrivateKey,
getPublicKey,
OneMillion,
Version,
Version
} from "./index.js";
export { generatePrivateKey, getPublicKey };
@ -36,7 +36,7 @@ class Encoder implements IEncoder {
private publicKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter,
public metaSetter?: IMetaSetter
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -63,7 +63,7 @@ class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined,
rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral,
ephemeral: this.ephemeral
};
if (this.metaSetter) {
@ -99,28 +99,28 @@ export function createEncoder({
publicKey,
sigPrivKey,
ephemeral = false,
metaSetter,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(
contentTopic,
publicKey,
sigPrivKey,
ephemeral,
metaSetter,
metaSetter
);
}
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(
contentTopic: string,
private privateKey: Uint8Array,
private privateKey: Uint8Array
) {
super(contentTopic);
}
async fromProtoObj(
pubSubTopic: string,
protoMessage: IProtoMessage,
protoMessage: IProtoMessage
): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload;
@ -129,7 +129,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:",
Version,
", actual:",
protoMessage.version,
protoMessage.version
);
return;
}
@ -141,7 +141,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) {
log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e,
e
);
return;
}
@ -164,7 +164,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage,
res.payload,
res.sig?.signature,
res.sig?.publicKey,
res.sig?.publicKey
);
}
}
@ -184,7 +184,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
privateKey: Uint8Array,
privateKey: Uint8Array
): Decoder {
return new Decoder(contentTopic, privateKey);
}

View File

@ -1,7 +1,7 @@
import {
generatePrivateKey,
generateSymmetricKey,
getPublicKey,
getPublicKey
} from "./crypto/index.js";
import { DecodedMessage } from "./decoded_message.js";

View File

@ -16,7 +16,7 @@ describe("Symmetric Encryption", function () {
async (pubSubTopic, contentTopic, payload, symKey) => {
const encoder = createEncoder({
contentTopic,
symKey,
symKey
});
const bytes = await encoder.toWire({ payload });
@ -32,8 +32,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined;
},
),
}
)
);
});
@ -51,7 +51,7 @@ describe("Symmetric Encryption", function () {
const encoder = createEncoder({
contentTopic,
symKey,
sigPrivKey,
sigPrivKey
});
const bytes = await encoder.toWire({ payload });
@ -67,8 +67,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(sigPubKey);
},
),
}
)
);
});
@ -81,7 +81,7 @@ describe("Symmetric Encryption", function () {
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
async (pubSubTopic, contentTopic, payload, symKey) => {
const metaSetter = (
msg: IProtoMessage & { meta: undefined },
msg: IProtoMessage & { meta: undefined }
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -92,7 +92,7 @@ describe("Symmetric Encryption", function () {
const encoder = createEncoder({
contentTopic,
symKey,
metaSetter,
metaSetter
});
const bytes = await encoder.toWire({ payload });
@ -109,12 +109,12 @@ describe("Symmetric Encryption", function () {
ephemeral: undefined,
meta: undefined,
rateLimitProof: undefined,
version: undefined,
version: undefined
});
expect(result.meta).to.deep.equal(expectedMeta);
},
),
}
)
);
});
});
@ -124,7 +124,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void {
createEncoder({
contentTopic: undefined as unknown as string,
symKey: new Uint8Array(),
symKey: new Uint8Array()
});
};

View File

@ -5,7 +5,7 @@ import type {
IEncoder,
IMessage,
IMetaSetter,
IProtoMessage,
IProtoMessage
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import debug from "debug";
@ -15,7 +15,7 @@ import {
decryptSymmetric,
encryptSymmetric,
postCipher,
preCipher,
preCipher
} from "./waku_payload.js";
import { generateSymmetricKey, OneMillion, Version } from "./index.js";
@ -31,7 +31,7 @@ class Encoder implements IEncoder {
private symKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter,
public metaSetter?: IMetaSetter
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -58,7 +58,7 @@ class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined,
rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral,
ephemeral: this.ephemeral
};
if (this.metaSetter) {
@ -95,7 +95,7 @@ export function createEncoder({
symKey,
sigPrivKey,
ephemeral = false,
metaSetter,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(contentTopic, symKey, sigPrivKey, ephemeral, metaSetter);
}
@ -103,14 +103,14 @@ export function createEncoder({
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(
contentTopic: string,
private symKey: Uint8Array,
private symKey: Uint8Array
) {
super(contentTopic);
}
async fromProtoObj(
pubSubTopic: string,
protoMessage: IProtoMessage,
protoMessage: IProtoMessage
): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload;
@ -119,7 +119,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:",
Version,
", actual:",
protoMessage.version,
protoMessage.version
);
return;
}
@ -131,7 +131,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) {
log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e,
e
);
return;
}
@ -154,7 +154,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage,
res.payload,
res.sig?.signature,
res.sig?.publicKey,
res.sig?.publicKey
);
}
}
@ -174,7 +174,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
symKey: Uint8Array,
symKey: Uint8Array
): Decoder {
return new Decoder(contentTopic, symKey);
}

View File

@ -8,7 +8,7 @@ import {
encryptAsymmetric,
encryptSymmetric,
postCipher,
preCipher,
preCipher
} from "./waku_payload.js";
describe("Waku Payload", () => {
@ -24,8 +24,8 @@ describe("Waku Payload", () => {
const res = await decryptAsymmetric(enc, privKey);
expect(res).deep.equal(message);
},
),
}
)
);
});
@ -39,8 +39,8 @@ describe("Waku Payload", () => {
const res = await decryptSymmetric(enc, key);
expect(res).deep.equal(message);
},
),
}
)
);
});
@ -52,9 +52,9 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal(
message,
"Payload was not encrypted then decrypted correctly",
"Payload was not encrypted then decrypted correctly"
);
}),
})
);
});
@ -71,14 +71,14 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal(
message,
"Payload was not encrypted then decrypted correctly",
"Payload was not encrypted then decrypted correctly"
);
expect(res?.sig?.publicKey).deep.equal(
sigPubKey,
"signature Public key was not recovered from encrypted then decrypted signature",
"signature Public key was not recovered from encrypted then decrypted signature"
);
},
),
}
)
);
});
});

View File

@ -21,7 +21,7 @@ function getSizeOfPayloadSizeField(message: Uint8Array): number {
function getPayloadSize(
message: Uint8Array,
sizeOfPayloadSizeField: number,
sizeOfPayloadSizeField: number
): number {
let payloadSizeBytes = message.slice(1, 1 + sizeOfPayloadSizeField);
// int 32 == 4 bytes
@ -29,7 +29,7 @@ function getPayloadSize(
// If less than 4 bytes pad right (Little Endian).
payloadSizeBytes = concat(
[payloadSizeBytes, new Uint8Array(4 - sizeOfPayloadSizeField)],
4,
4
);
}
const payloadSizeDataView = new DataView(payloadSizeBytes.buffer);
@ -50,7 +50,7 @@ function isMessageSigned(message: Uint8Array): boolean {
*/
export async function encryptAsymmetric(
data: Uint8Array,
publicKey: Uint8Array | string,
publicKey: Uint8Array | string
): Promise<Uint8Array> {
return ecies.encrypt(hexToBytes(publicKey), data);
}
@ -63,7 +63,7 @@ export async function encryptAsymmetric(
*/
export async function decryptAsymmetric(
payload: Uint8Array,
privKey: Uint8Array,
privKey: Uint8Array
): Promise<Uint8Array> {
return ecies.decrypt(privKey, payload);
}
@ -79,7 +79,7 @@ export async function decryptAsymmetric(
*/
export async function encryptSymmetric(
data: Uint8Array,
key: Uint8Array | string,
key: Uint8Array | string
): Promise<Uint8Array> {
const iv = symmetric.generateIv();
@ -99,7 +99,7 @@ export async function encryptSymmetric(
*/
export async function decryptSymmetric(
payload: Uint8Array,
key: Uint8Array | string,
key: Uint8Array | string
): Promise<Uint8Array> {
const ivStart = payload.length - Symmetric.ivSize;
const cipher = payload.slice(0, ivStart);
@ -135,7 +135,7 @@ function computeSizeOfPayloadSizeField(payload: Uint8Array): number {
function validateDataIntegrity(
value: Uint8Array,
expectedSize: number,
expectedSize: number
): boolean {
if (value.length !== expectedSize) {
return false;
@ -157,7 +157,7 @@ function getHash(message: Uint8Array, isSigned: boolean): Uint8Array {
function ecRecoverPubKey(
messageHash: Uint8Array,
signature: Uint8Array,
signature: Uint8Array
): Uint8Array | undefined {
const recoveryDataView = new DataView(signature.slice(64).buffer);
const recovery = recoveryDataView.getUint8(0);
@ -175,7 +175,7 @@ function ecRecoverPubKey(
*/
export async function preCipher(
messagePayload: Uint8Array,
sigPrivKey?: Uint8Array,
sigPrivKey?: Uint8Array
): Promise<Uint8Array> {
let envelope = new Uint8Array([0]); // No flags
envelope = addPayloadSizeField(envelope, messagePayload);
@ -216,7 +216,7 @@ export async function preCipher(
* @internal
*/
export function postCipher(
message: Uint8Array,
message: Uint8Array
): { payload: Uint8Array; sig?: Signature } | undefined {
const sizeOfPayloadSizeField = getSizeOfPayloadSizeField(message);
if (sizeOfPayloadSizeField === 0) return;

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"],
preprocessors: {
"src/**/!(node).spec.ts": ["webpack"],
"src/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true,
client: {
mocha: {
timeout: 6000, // Default is 2s
},
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }],
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js",
}),
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"],
},
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map",
},
devtool: "inline-source-map"
}
});
};

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -18,7 +18,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined,
rateLimitProof: undefined,
timestamp: undefined,
version: undefined,
version: undefined
};
const hash = messageHash(pubSubTopic, message);
@ -38,7 +38,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined,
rateLimitProof: undefined,
timestamp: undefined,
version: undefined,
version: undefined
};
const hash = messageHash(pubSubTopic, message);
@ -58,7 +58,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined,
rateLimitProof: undefined,
timestamp: undefined,
version: undefined,
version: undefined
};
const hash = messageHash(pubSubTopic, message);

View File

@ -8,7 +8,7 @@ import { concat, utf8ToBytes } from "@waku/utils/bytes";
*/
export function messageHash(
pubsubTopic: string,
message: IProtoMessage,
message: IProtoMessage
): Uint8Array {
const pubsubTopicBytes = utf8ToBytes(pubsubTopic);
const contentTopicBytes = utf8ToBytes(message.contentTopic);
@ -20,7 +20,7 @@ export function messageHash(
pubsubTopicBytes,
message.payload,
contentTopicBytes,
message.meta,
message.meta
]);
} else {
bytes = concat([pubsubTopicBytes, message.payload, contentTopicBytes]);

View File

@ -1,6 +1,6 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json",
},
project: "./tsconfig.dev.json"
}
};

View File

@ -48,25 +48,18 @@
"node": ">=16"
},
"dependencies": {
"@libp2p/interface-peer-discovery": "^2.0.0",
"@libp2p/interfaces": "^3.3.2",
"@waku/core": "0.0.22",
"@waku/enr": "0.0.16",
"@waku/interfaces": "0.0.17",
"@waku/proto": "0.0.5",
"@waku/utils": "0.0.10",
"@waku/interfaces": "0.0.17",
"debug": "^4.3.4",
"it-all": "^3.0.2",
"it-length-prefixed": "^9.0.1",
"it-pipe": "^3.0.1"
},
"devDependencies": {
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.0.2",

View File

@ -11,14 +11,14 @@ export default {
input,
output: {
dir: "bundle",
format: "esm",
format: "esm"
},
plugins: [
commonjs(),
json(),
nodeResolve({
browser: true,
preferBuiltins: false,
}),
],
preferBuiltins: false
})
]
};

View File

@ -1,11 +1,11 @@
export {
wakuPeerExchange,
PeerExchangeCodec,
WakuPeerExchange,
WakuPeerExchange
} from "./waku_peer_exchange.js";
export {
wakuPeerExchangeDiscovery,
PeerExchangeDiscovery,
Options,
DEFAULT_PEER_EXCHANGE_TAG_NAME,
DEFAULT_PEER_EXCHANGE_TAG_NAME
} from "./waku_peer_exchange_discovery.js";

View File

@ -11,9 +11,9 @@ export class PeerExchangeRPC {
const { numPeers } = params;
return new PeerExchangeRPC({
query: {
numPeers: numPeers,
numPeers: numPeers
},
response: undefined,
response: undefined
});
}

Some files were not shown because too many files have changed in this diff Show More