From bb3550995248414afdde0bb3ee48a1a4e582925b Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 21 Apr 2023 11:04:00 +0100 Subject: [PATCH 1/8] feat!: make peerstore atomic Instead of having separate books for addresses, protocols etc, just have simple save/merge/patch methods for updating peer data. This means we can update peer data in one call instead of needing to make multiple async calls. Creation of new peer records is now centralised so events can be emitted cleanly and there's no chance of another async call creating the peer record before you so locking can be removed. --- .gitignore | 2 + README.md | 170 ------ package.json | 12 +- src/address-book.ts | 367 ------------ src/errors.ts | 3 +- src/index.ts | 205 +++---- src/key-book.ts | 140 ----- src/metadata-book.ts | 244 -------- src/pb/peer.proto | 17 +- src/pb/peer.ts | 224 ++++++-- src/pb/tags.proto | 11 - src/pb/tags.ts | 145 ----- src/proto-book.ts | 234 -------- src/store.ts | 302 ++++------ src/utils/bytes-to-peer.ts | 41 ++ src/utils/dedupe-addresses.ts | 38 ++ src/utils/dedupe-metadata.ts | 25 + src/utils/dedupe-tags.ts | 21 + src/utils/peer-data-to-datastore-peer.ts | 118 ++++ src/utils/peer-id-to-datastore-key.ts | 15 + test/address-book.spec.ts | 689 ----------------------- test/index.spec.ts | 161 ++++++ test/key-book.spec.ts | 129 ----- test/merge.spec.ts | 145 +++++ test/metadata-book.spec.ts | 358 ------------ test/patch.spec.ts | 135 +++++ test/peer-store.spec.ts | 324 ----------- test/proto-book.spec.ts | 388 ------------- test/save.spec.ts | 173 ++++++ test/utils/dedupe-addresses.spec.ts | 75 +++ test/utils/dedupe-metadata.spec.ts | 35 ++ test/utils/dedupe-tags.spec.ts | 35 ++ 32 files changed, 1419 insertions(+), 3562 deletions(-) delete mode 100644 src/address-book.ts delete mode 100644 src/key-book.ts delete mode 100644 src/metadata-book.ts delete mode 100644 src/pb/tags.proto delete mode 100644 src/pb/tags.ts delete mode 100644 src/proto-book.ts create mode 100644 src/utils/bytes-to-peer.ts create mode 100644 src/utils/dedupe-addresses.ts create mode 100644 src/utils/dedupe-metadata.ts create mode 100644 src/utils/dedupe-tags.ts create mode 100644 src/utils/peer-data-to-datastore-peer.ts create mode 100644 src/utils/peer-id-to-datastore-key.ts delete mode 100644 test/address-book.spec.ts create mode 100644 test/index.spec.ts delete mode 100644 test/key-book.spec.ts create mode 100644 test/merge.spec.ts delete mode 100644 test/metadata-book.spec.ts create mode 100644 test/patch.spec.ts delete mode 100644 test/peer-store.spec.ts delete mode 100644 test/proto-book.spec.ts create mode 100644 test/save.spec.ts create mode 100644 test/utils/dedupe-addresses.spec.ts create mode 100644 test/utils/dedupe-metadata.spec.ts create mode 100644 test/utils/dedupe-tags.spec.ts diff --git a/.gitignore b/.gitignore index 9a3cb9f..7ad9e67 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,9 @@ node_modules +build dist .docs .coverage node_modules package-lock.json yarn.lock +.vscode diff --git a/README.md b/README.md index b55bf4f..6d7dffe 100644 --- a/README.md +++ b/README.md @@ -11,26 +11,6 @@ - [Install](#install) - [Browser ` ``` -## Description - -Libp2p's PeerStore is responsible for keeping an updated register with the relevant information of the known peers. It should be the single source of truth for all peer data, where a subsystem can learn about peers' data and where someone can listen for updates. The PeerStore comprises four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. - -The PeerStore manages the high level operations on its inner books. Moreover, the PeerStore should be responsible for notifying interested parties of relevant events, through its Event Emitter. - -### Submitting records to the PeerStore - -Several libp2p subsystems will perform operations that might gather relevant information about peers. - -#### Identify - -- The Identify protocol automatically runs on every connection when multiplexing is enabled. The protocol will put the multiaddrs and protocols provided by the peer to the PeerStore. -- In the background, the Identify Service is also waiting for protocol change notifications of peers via the IdentifyPush protocol. Peers may leverage the `identify-push` message to communicate protocol changes to all connected peers, so that their PeerStore can be updated with the updated protocols. -- While it is currently not supported in js-libp2p, future iterations may also support the [IdentifyDelta protocol](https://github.com/libp2p/specs/pull/176). -- Taking into account that the Identify protocol records are directly from the peer, they should be considered the source of truth and weighted accordingly. - -#### Peer Discovery - -- Libp2p discovery protocols aim to discover new peers in the network. In a typical discovery protocol, addresses of the peer are discovered along with its peer id. Once this happens, a libp2p discovery protocol should emit a `peer` event with the information of the discovered peer and this information will be added to the PeerStore by libp2p. - -#### Dialer - -- Libp2p API supports dialing a peer given a `multiaddr`, and no prior knowledge of the peer. If the node is able to establish a connection with the peer, it and its multiaddr is added to the PeerStore. -- When a connection is being upgraded, more precisely after its encryption, or even in a discovery protocol, a libp2p node can get to know other parties public keys. In this scenario, libp2p will add the peer's public key to its `KeyBook`. - -#### DHT - -- On some DHT operations, such as finding providers for a given CID, nodes may exchange peer data as part of the query. This passive peer discovery should result in the DHT emitting the `peer` event in the same way [Peer Discovery](#peerdiscovery) does. - -### Retrieving records from the PeerStore - -When data in the PeerStore is updated the PeerStore will emit events based on the changes, to allow applications and other subsystems to take action on those changes. Any subsystem interested in these notifications should subscribe the [`PeerStore events`][peer-store-events]. - -#### Peer - -- Each time a new peer is discovered, the PeerStore should emit a [`peer` event][peer-store-events], so that interested parties can leverage this peer and establish a connection with it. - -#### Protocols - -- When the known protocols of a peer change, the PeerStore emits a [`change:protocols` event][peer-store-events]. - -#### Multiaddrs - -- When the known listening `multiaddrs` of a peer change, the PeerStore emits a [`change:multiaddrs` event][peer-store-events]. - -### PeerStore implementation - -The PeerStore wraps four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. Moreover, it provides a high level API for those components, as well as data events. - -### Components - -#### Address Book - -The `addressBook` keeps the known multiaddrs of a peer. The multiaddrs of each peer may change over time and the Address Book must account for this. - -`Map` - -A `peerId.toString()` identifier mapping to a `Address` object, which should have the following structure: - -```js -{ - multiaddr: -} -``` - -#### Key Book - -The `keyBook` tracks the public keys of the peers by keeping their [`PeerId`][peer-id]. - -`Map>` - -A `peerId.toString()` identifier mapping to a `Set` of protocol identifier strings. - -#### Metadata Book - -The `metadataBook` keeps track of the known metadata of a peer. Its metadata is stored in a key value fashion, where a key identifier (`string`) represents a metadata value (`Uint8Array`). - -`Map>` - -A `peerId.toString()` identifier mapping to the peer metadata Map. - -### API - -For the complete API documentation, you should check the [API.md](https://libp2p.github.io/js-libp2p-peer-store). - -Access to its underlying books: - -- `peerStore.addressBook.*` -- `peerStore.keyBook.*` -- `peerStore.metadataBook.*` -- `peerStore.protoBook.*` - -### Events - -- `peer` - emitted when a new peer is added. -- `change:multiaddrs` - emitted when a known peer has a different set of multiaddrs. -- `change:protocols` - emitted when a known peer supports a different set of protocols. -- `change:pubkey` - emitted when a peer's public key is known. -- `change:metadata` - emitted when known metadata of a peer changes. - -## Data Persistence - -The data stored in the PeerStore can be persisted if configured appropriately. Keeping a record of the peers already discovered by the peer, as well as their known data aims to improve the efficiency of peers joining the network after being offline. - -The libp2p node will need to receive a [datastore](https://github.com/ipfs/interface-datastore), in order to persist this data across restarts. A [datastore](https://github.com/ipfs/interface-datastore) stores its data in a key-value fashion. As a result, we need coherent keys so that we do not overwrite data. - -The PeerStore should not continuously update the datastore whenever data is changed. Instead, it should only store new data after reaching a certain threshold of "dirty" peers, as well as when the node is stopped, in order to batch writes to the datastore. - -The peer id will be appended to the datastore key for each data namespace. The namespaces were defined as follows: - -**AddressBook** - -All the known peer addresses are stored with a key pattern as follows: - -`/peers/addrs/` - -**ProtoBook** - -All the known peer protocols are stored with a key pattern as follows: - -`/peers/protos/` - -**KeyBook** - -All public keys are stored under the following pattern: - -` /peers/keys/` - -**MetadataBook** - -Metadata is stored under the following key pattern: - -`/peers/metadata//` - -## Future Considerations - -- If multiaddr TTLs are added, the PeerStore may schedule jobs to delete all addresses that exceed the TTL to prevent AddressBook bloating -- Further API methods will probably need to be added in the context of multiaddr validity and confidence. -- When improving libp2p configuration for specific runtimes, we should take into account the PeerStore recommended datastore. -- When improving libp2p configuration, we should think about a possible way of allowing the configuration of Bootstrap to be influenced by the persisted peers, as a way to decrease the load on Bootstrap nodes. - ## API Docs - diff --git a/package.json b/package.json index 31c2337..8d6b2c1 100644 --- a/package.json +++ b/package.json @@ -131,7 +131,7 @@ "clean": "aegir clean", "lint": "aegir lint", "dep-check": "aegir dep-check -i protons", - "generate": "protons src/pb/peer.proto src/pb/tags.proto", + "generate": "protons src/pb/*.proto", "build": "aegir build", "test": "aegir test", "test:chrome": "aegir test -t browser --cov", @@ -144,17 +144,14 @@ "docs": "aegir docs" }, "dependencies": { + "@libp2p/crypto": "^1.0.15", + "@libp2p/interface-libp2p": "^1.3.1", "@libp2p/interface-peer-id": "^2.0.0", - "@libp2p/interface-peer-info": "^1.0.3", "@libp2p/interface-peer-store": "^1.2.2", - "@libp2p/interface-record": "^2.0.1", "@libp2p/interfaces": "^3.2.0", - "@libp2p/logger": "^2.0.0", "@libp2p/peer-id": "^2.0.0", - "@libp2p/peer-record": "^5.0.0", "@multiformats/multiaddr": "^12.0.0", "interface-datastore": "^8.0.0", - "mortice": "^3.0.0", "multiformats": "^11.0.0", "protons-runtime": "^5.0.0", "uint8arraylist": "^2.1.1", @@ -162,12 +159,11 @@ }, "devDependencies": { "@libp2p/peer-id-factory": "^2.0.0", - "@libp2p/utils": "^3.0.2", "aegir": "^38.1.6", "datastore-core": "^9.0.1", "delay": "^5.0.0", "p-defer": "^4.0.0", - "p-wait-for": "^5.0.0", + "p-event": "^5.0.1", "protons": "^7.0.2", "sinon": "^15.0.1" } diff --git a/src/address-book.ts b/src/address-book.ts deleted file mode 100644 index 0552d8c..0000000 --- a/src/address-book.ts +++ /dev/null @@ -1,367 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { isMultiaddr } from '@multiformats/multiaddr' -import { codes } from './errors.js' -import { PeerRecord, RecordEnvelope } from '@libp2p/peer-record' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Address, AddressFilter, Peer, PeerMultiaddrsChangeData, PeerStore } from '@libp2p/interface-peer-store' -import type { Store } from './store.js' -import type { Envelope } from '@libp2p/interface-record' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { PeerInfo } from '@libp2p/interface-peer-info' -import type { Multiaddr } from '@multiformats/multiaddr' - -const log = logger('libp2p:peer-store:address-book') -const EVENT_NAME = 'change:multiaddrs' - -async function allowAll (): Promise { - return true -} - -export class PeerStoreAddressBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - private readonly addressFilter: AddressFilter - - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store, addressFilter?: AddressFilter) { - this.dispatchEvent = dispatchEvent - this.store = store - this.addressFilter = addressFilter ?? allowAll - } - - /** - * ConsumePeerRecord adds addresses from a signed peer record contained in a record envelope. - * This will return a boolean that indicates if the record was successfully processed and added - * into the AddressBook. - */ - async consumePeerRecord (envelope: Envelope): Promise { - log.trace('consumePeerRecord await write lock') - const release = await this.store.lock.writeLock() - log.trace('consumePeerRecord got write lock') - - let peerId - let peer: Peer | undefined - let updatedPeer - - try { - let peerRecord - try { - peerRecord = PeerRecord.createFromProtobuf(envelope.payload) - } catch (err: any) { - log.error('invalid peer record received') - return false - } - - peerId = peerRecord.peerId - const multiaddrs = peerRecord.multiaddrs - - // Verify peerId - if (!peerId.equals(envelope.peerId)) { - log('signing key does not match PeerId in the PeerRecord') - return false - } - - // ensure the record has multiaddrs - if (multiaddrs == null || multiaddrs.length === 0) { - return false - } - - if (await this.store.has(peerId)) { - peer = await this.store.load(peerId) - - if (peer.peerRecordEnvelope != null) { - const storedEnvelope = await RecordEnvelope.createFromProtobuf(peer.peerRecordEnvelope) - const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload) - - // ensure seq is greater than, or equal to, the last received - if (storedRecord.seqNumber >= peerRecord.seqNumber) { - log('sequence number was lower or equal to existing sequence number - stored: %d received: %d', storedRecord.seqNumber, peerRecord.seqNumber) - return false - } - } - } - - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter, true) - - // Replace unsigned addresses by the new ones from the record - // TODO: Once we have ttls for the addresses, we should merge these in - updatedPeer = await this.store.patchOrCreate(peerId, { - addresses, - peerRecordEnvelope: envelope.marshal().subarray() - }) - - log('stored provided peer record for %p', peerRecord.peerId) - } finally { - log.trace('consumePeerRecord release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(({ multiaddr }) => multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - return true - } - - async getRawEnvelope (peerId: PeerId): Promise { - log.trace('getRawEnvelope await read lock') - const release = await this.store.lock.readLock() - log.trace('getRawEnvelope got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.peerRecordEnvelope - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('getRawEnvelope release read lock') - release() - } - } - - /** - * Get an Envelope containing a PeerRecord for the given peer. - * Returns undefined if no record exists. - */ - async getPeerRecord (peerId: PeerId): Promise { - const raw = await this.getRawEnvelope(peerId) - - if (raw == null) { - return undefined - } - - return await RecordEnvelope.createFromProtobuf(raw) - } - - async get (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('get wait for read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.addresses - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return [] - } - - async set (peerId: PeerId, multiaddrs: Multiaddr[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(multiaddrs)) { - log.error('multiaddrs must be an array of Multiaddrs') - throw new CodeError('multiaddrs must be an array of Multiaddrs', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let hasPeer = false - let peer: Peer | undefined - let updatedPeer - - try { - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter) - - // No valid addresses found - if (addresses.length === 0) { - return - } - - try { - peer = await this.store.load(peerId) - hasPeer = true - - if (new Set([ - ...addresses.map(({ multiaddr }) => multiaddr.toString()), - ...peer.addresses.map(({ multiaddr }) => multiaddr.toString()) - ]).size === peer.addresses.length && addresses.length === peer.addresses.length) { - // not changing anything, no need to update - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { addresses }) - - log('set multiaddrs for %p', peerId) - } finally { - log.trace('set multiaddrs for %p', peerId) - log('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - // Notify the existence of a new peer - if (!hasPeer) { - this.dispatchEvent(new CustomEvent('peer', { - detail: { - id: peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - protocols: updatedPeer.protocols - } - })) - } - } - - async add (peerId: PeerId, multiaddrs: Multiaddr[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(multiaddrs)) { - log.error('multiaddrs must be an array of Multiaddrs') - throw new CodeError('multiaddrs must be an array of Multiaddrs', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('add await write lock') - const release = await this.store.lock.writeLock() - log.trace('add got write lock') - - let hasPeer = false - let peer: Peer | undefined - let updatedPeer - - try { - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter) - - // No valid addresses found - if (addresses.length === 0) { - return - } - - try { - peer = await this.store.load(peerId) - hasPeer = true - - if (new Set([ - ...addresses.map(({ multiaddr }) => multiaddr.toString()), - ...peer.addresses.map(({ multiaddr }) => multiaddr.toString()) - ]).size === peer.addresses.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { addresses }) - - log('added multiaddrs for %p', peerId) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - // Notify the existence of a new peer - if (!hasPeer) { - this.dispatchEvent(new CustomEvent('peer', { - detail: { - id: peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - protocols: updatedPeer.protocols - } - })) - } - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - addresses: [] - }) - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: [], - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - } - } -} - -async function filterMultiaddrs (peerId: PeerId, multiaddrs: Multiaddr[], addressFilter: AddressFilter, isCertified: boolean = false): Promise { - const output: Address[] = [] - - await Promise.all( - multiaddrs.map(async multiaddr => { - if (!isMultiaddr(multiaddr)) { - log.error('multiaddr must be an instance of Multiaddr') - throw new CodeError('multiaddr must be an instance of Multiaddr', codes.ERR_INVALID_PARAMETERS) - } - - const include = await addressFilter(peerId, multiaddr) - - if (!include) { - return - } - - output.push({ - multiaddr, - isCertified - }) - }) - ) - - return output -} diff --git a/src/errors.ts b/src/errors.ts index 60efb24..48c52e7 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -1,5 +1,4 @@ export const codes = { - ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS', - ERR_NOT_FOUND: 'ERR_NOT_FOUND' + ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS' } diff --git a/src/index.ts b/src/index.ts index 4970254..103c58d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,63 +1,81 @@ -import { logger } from '@libp2p/logger' import { EventEmitter } from '@libp2p/interfaces/events' -import { PeerStoreAddressBook } from './address-book.js' -import { PeerStoreKeyBook } from './key-book.js' -import { PeerStoreMetadataBook } from './metadata-book.js' -import { PeerStoreProtoBook } from './proto-book.js' -import { PersistentStore, Store } from './store.js' -import type { PeerStore, AddressBook, KeyBook, MetadataBook, ProtoBook, PeerStoreEvents, PeerStoreInit, Peer, TagOptions } from '@libp2p/interface-peer-store' +import { PeerUpdate, PersistentStore } from './store.js' +import type { PeerStore, Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' -import { CodeError } from '@libp2p/interfaces/errors' -import { Tag, Tags } from './pb/tags.js' import type { Datastore } from 'interface-datastore' - -const log = logger('libp2p:peer-store') +import type { Multiaddr } from '@multiformats/multiaddr' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from './errors.js' export interface PersistentPeerStoreComponents { peerId: PeerId datastore: Datastore } +export interface AddressFilter { + (peerId: PeerId, multiaddr: Multiaddr): Promise +} + +export interface PersistentPeerStoreInit { + addressFilter?: AddressFilter +} + +interface PeerStoreEvents { + /** + * This event is emitted when the stored data for a peer changes. + * + * If the peer store already contained data about the peer it will be set + * as the `previous` key on the event detail. + * + * @example + * + * ```js + * peerStore.addEventListener('peer:update', (event) => { + * const { peer, previous } = event.detail + * // ... + * }) + * ``` + */ + 'peer:update': CustomEvent + + /** + * Similar to the 'peer:update' event, this event is dispatched when the + * updated peer is the current node. + * + * @example + * + * ```js + * peerStore.addEventListener('self:peer:update', (event) => { + * const { peer, previous } = event.detail + * // ... + * }) + * ``` + */ + 'self:peer:update': CustomEvent +} + /** * An implementation of PeerStore that stores data in a Datastore */ export class PersistentPeerStore extends EventEmitter implements PeerStore { - public addressBook: AddressBook - public keyBook: KeyBook - public metadataBook: MetadataBook - public protoBook: ProtoBook - private readonly components: PersistentPeerStoreComponents - private readonly store: Store + private readonly store: PersistentStore - constructor (components: PersistentPeerStoreComponents, init: PeerStoreInit = {}) { + constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { super() this.components = components this.store = new PersistentStore(components) - this.addressBook = new PeerStoreAddressBook(this.dispatchEvent.bind(this), this.store, init.addressFilter) - this.keyBook = new PeerStoreKeyBook(this.dispatchEvent.bind(this), this.store) - this.metadataBook = new PeerStoreMetadataBook(this.dispatchEvent.bind(this), this.store) - this.protoBook = new PeerStoreProtoBook(this.dispatchEvent.bind(this), this.store) } async forEach (fn: (peer: Peer) => void): Promise { - log.trace('getPeers await read lock') - const release = await this.store.lock.readLock() - log.trace('getPeers got read lock') - - try { - for await (const peer of this.store.all()) { - if (peer.id.equals(this.components.peerId)) { - // Skip self peer if present - continue - } - - fn(peer) + for await (const peer of this.store.all()) { + if (peer.id.equals(this.components.peerId)) { + // Skip self peer if present + continue } - } finally { - log.trace('getPeers release read lock') - release() + + fn(peer) } } @@ -71,114 +89,55 @@ export class PersistentPeerStore extends EventEmitter implement return output } - /** - * Delete the information of the given peer in every book - */ async delete (peerId: PeerId): Promise { - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - try { - await this.store.delete(peerId) - } finally { - log.trace('delete release write lock') - release() + if (this.components.peerId.equals(peerId)) { + throw new CodeError('Cannot delete self peer', codes.ERR_INVALID_PARAMETERS) } - } - /** - * Get the stored information of a given peer - */ - async get (peerId: PeerId): Promise { - log.trace('get await read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - return await this.store.load(peerId) - } finally { - log.trace('get release read lock') - release() - } + await this.store.delete(peerId) } - /** - * Returns true if we have a record of the peer - */ async has (peerId: PeerId): Promise { - log.trace('has await read lock') - const release = await this.store.lock.readLock() - log.trace('has got read lock') - - try { - return await this.store.has(peerId) - } finally { - log.trace('has release read lock') - release() - } + return await this.store.has(peerId) } - async tagPeer (peerId: PeerId, tag: string, options: TagOptions = {}): Promise { - const providedValue = options.value ?? 0 - const value = Math.round(providedValue) - const ttl = options.ttl ?? undefined + async get (peerId: PeerId): Promise { + return await this.store.load(peerId) + } - if (value !== providedValue || value < 0 || value > 100) { - throw new CodeError('Tag value must be between 0-100', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - } + async save (id: PeerId, data: PeerData): Promise { + const result = await this.store.save(id, data) - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] + this.#emitIfUpdated(id, result) - if (buf != null) { - tags = Tags.decode(buf).tags - } + return result.peer + } - // do not allow duplicate tags - tags = tags.filter(t => t.name !== tag) + async patch (id: PeerId, data: PeerData): Promise { + const result = await this.store.patch(id, data) - tags.push({ - name: tag, - value, - expiry: ttl == null ? undefined : BigInt(Date.now() + ttl) - }) + this.#emitIfUpdated(id, result) - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags }).subarray()) + return result.peer } - async unTagPeer (peerId: PeerId, tag: string): Promise { - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] + async merge (id: PeerId, data: PeerData): Promise { + const result = await this.store.merge(id, data) - if (buf != null) { - tags = Tags.decode(buf).tags - } + this.#emitIfUpdated(id, result) - tags = tags.filter(t => t.name !== tag) - - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags }).subarray()) + return result.peer } - async getTags (peerId: PeerId): Promise> { - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] - - if (buf != null) { - tags = Tags.decode(buf).tags + #emitIfUpdated (id: PeerId, result: PeerUpdate): void { + if (!result.updated) { + return } - const now = BigInt(Date.now()) - const unexpiredTags = tags.filter(tag => tag.expiry == null || tag.expiry > now) - - if (unexpiredTags.length !== tags.length) { - // remove any expired tags - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags: unexpiredTags }).subarray()) + if (this.components.peerId.equals(id)) { + this.safeDispatchEvent('self:peer:update', { detail: result }) + } else { + this.safeDispatchEvent('peer:update', { detail: result }) } - - return unexpiredTags.map(t => ({ - name: t.name, - value: t.value ?? 0 - })) } } diff --git a/src/key-book.ts b/src/key-book.ts deleted file mode 100644 index 078964b..0000000 --- a/src/key-book.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { equals as uint8arrayEquals } from 'uint8arrays/equals' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { PeerStore, KeyBook, PeerPublicKeyChangeData, Peer } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:key-book') - -const EVENT_NAME = 'change:pubkey' - -export class PeerStoreKeyBook implements KeyBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The KeyBook is responsible for keeping the known public keys of a peer - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - /** - * Set the Peer public key - */ - async set (peerId: PeerId, publicKey: Uint8Array): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!(publicKey instanceof Uint8Array)) { - log.error('publicKey must be an instance of Uint8Array to store data') - throw new CodeError('publicKey must be an instance of PublicKey', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let updatedKey = false - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - - if ((peer.pubKey != null) && uint8arrayEquals(peer.pubKey, publicKey)) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - pubKey: publicKey - }) - updatedKey = true - } finally { - log.trace('set release write lock') - release() - } - - if (updatedKey) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - publicKey, - oldPublicKey: peer == null ? undefined : peer.pubKey - } - })) - } - } - - /** - * Get Public key of the given PeerId, if stored - */ - async get (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('get await write lock') - const release = await this.store.lock.readLock() - log.trace('get got write lock') - - try { - const peer = await this.store.load(peerId) - - return peer.pubKey - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log('get release write lock') - release() - } - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - pubKey: undefined - }) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('delete release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - publicKey: undefined, - oldPublicKey: peer == null ? undefined : peer.pubKey - } - })) - } -} diff --git a/src/metadata-book.ts b/src/metadata-book.ts deleted file mode 100644 index 698a009..0000000 --- a/src/metadata-book.ts +++ /dev/null @@ -1,244 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { equals as uint8ArrayEquals } from 'uint8arrays/equals' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { PeerStore, MetadataBook, PeerMetadataChangeData, Peer } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:metadata-book') - -const EVENT_NAME = 'change:metadata' - -export class PeerStoreMetadataBook implements MetadataBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The MetadataBook is responsible for keeping metadata - * about known peers - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - /** - * Get the known data of a provided peer - */ - async get (peerId: PeerId): Promise> { - peerId = peerIdFromPeerId(peerId) - - log.trace('get await read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.metadata - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return new Map() - } - - /** - * Get specific metadata value, if it exists - */ - async getValue (peerId: PeerId, key: string): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('getValue await read lock') - const release = await this.store.lock.readLock() - log.trace('getValue got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.metadata.get(key) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('getValue release write lock') - release() - } - } - - async set (peerId: PeerId, metadata: Map): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!(metadata instanceof Map)) { - log.error('valid metadata must be provided to store data') - throw new CodeError('valid metadata must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.mergeOrCreate(peerId, { - metadata - }) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - - /** - * Set metadata key and value of a provided peer - */ - async setValue (peerId: PeerId, key: string, value: Uint8Array): Promise { - peerId = peerIdFromPeerId(peerId) - - if (typeof key !== 'string' || !(value instanceof Uint8Array)) { - log.error('valid key and value must be provided to store data') - throw new CodeError('valid key and value must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('setValue await write lock') - const release = await this.store.lock.writeLock() - log.trace('setValue got write lock') - - let peer: Peer | undefined - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - const existingValue = peer.metadata.get(key) - - if (existingValue != null && uint8ArrayEquals(value, existingValue)) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { - metadata: new Map([[key, value]]) - }) - } finally { - log.trace('setValue release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata: updatedPeer.metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - if (peer != null) { - await this.store.patch(peerId, { - metadata: new Map() - }) - } - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata: new Map(), - oldMetadata: peer.metadata - } - })) - } - } - - async deleteValue (peerId: PeerId, key: string): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('deleteValue await write lock') - const release = await this.store.lock.writeLock() - log.trace('deleteValue got write lock') - - let metadata - let peer: Peer | undefined - - try { - peer = await this.store.load(peerId) - metadata = peer.metadata - - metadata.delete(key) - - await this.store.patch(peerId, { - metadata - }) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('deleteValue release write lock') - release() - } - - if (metadata != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - } -} diff --git a/src/pb/peer.proto b/src/pb/peer.proto index 1c9cc16..01c3be1 100644 --- a/src/pb/peer.proto +++ b/src/pb/peer.proto @@ -7,14 +7,17 @@ message Peer { // The protocols the peer supports repeated string protocols = 2; - // Any peer metadata - repeated Metadata metadata = 3; - // The public key of the peer - optional bytes pub_key = 4; + optional bytes public_key = 4; // The most recently received signed PeerRecord optional bytes peer_record_envelope = 5; + + // Any peer metadata + map metadata = 6; + + // Any tags the peer has + map tags = 7; } // Address represents a single multiaddr @@ -25,7 +28,7 @@ message Address { optional bool isCertified = 2; } -message Metadata { - string key = 1; - bytes value = 2; +message Tag { + uint32 value = 1; // tag value 0-100 + optional uint64 expiry = 2; // ms timestamp after which the tag is no longer valid } diff --git a/src/pb/peer.ts b/src/pb/peer.ts index ac612d1..9ceb63f 100644 --- a/src/pb/peer.ts +++ b/src/pb/peer.ts @@ -11,12 +11,148 @@ import type { Uint8ArrayList } from 'uint8arraylist' export interface Peer { addresses: Address[] protocols: string[] - metadata: Metadata[] - pubKey?: Uint8Array + publicKey?: Uint8Array peerRecordEnvelope?: Uint8Array + metadata: Map + tags: Map } export namespace Peer { + export interface Peer$metadataEntry { + key: string + value: Uint8Array + } + + export namespace Peer$metadataEntry { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.key != null && obj.key !== '')) { + w.uint32(10) + w.string(obj.key) + } + + if ((obj.value != null && obj.value.byteLength > 0)) { + w.uint32(18) + w.bytes(obj.value) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + key: '', + value: new Uint8Array(0) + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.string() + break + case 2: + obj.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Peer$metadataEntry.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Peer$metadataEntry => { + return decodeMessage(buf, Peer$metadataEntry.codec()) + } + } + + export interface Peer$tagsEntry { + key: string + value?: Tag + } + + export namespace Peer$tagsEntry { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.key != null && obj.key !== '')) { + w.uint32(10) + w.string(obj.key) + } + + if (obj.value != null) { + w.uint32(18) + Tag.codec().encode(obj.value, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + key: '' + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.string() + break + case 2: + obj.value = Tag.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Peer$tagsEntry.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Peer$tagsEntry => { + return decodeMessage(buf, Peer$tagsEntry.codec()) + } + } + let _codec: Codec export const codec = (): Codec => { @@ -40,16 +176,9 @@ export namespace Peer { } } - if (obj.metadata != null) { - for (const value of obj.metadata) { - w.uint32(26) - Metadata.codec().encode(value, w) - } - } - - if (obj.pubKey != null) { + if (obj.publicKey != null) { w.uint32(34) - w.bytes(obj.pubKey) + w.bytes(obj.publicKey) } if (obj.peerRecordEnvelope != null) { @@ -57,6 +186,20 @@ export namespace Peer { w.bytes(obj.peerRecordEnvelope) } + if (obj.metadata != null && obj.metadata.size !== 0) { + for (const [key, value] of obj.metadata.entries()) { + w.uint32(50) + Peer.Peer$metadataEntry.codec().encode({ key, value }, w) + } + } + + if (obj.tags != null && obj.tags.size !== 0) { + for (const [key, value] of obj.tags.entries()) { + w.uint32(58) + Peer.Peer$tagsEntry.codec().encode({ key, value }, w) + } + } + if (opts.lengthDelimited !== false) { w.ldelim() } @@ -64,7 +207,8 @@ export namespace Peer { const obj: any = { addresses: [], protocols: [], - metadata: [] + metadata: new Map(), + tags: new Map() } const end = length == null ? reader.len : reader.pos + length @@ -79,15 +223,22 @@ export namespace Peer { case 2: obj.protocols.push(reader.string()) break - case 3: - obj.metadata.push(Metadata.codec().decode(reader, reader.uint32())) - break case 4: - obj.pubKey = reader.bytes() + obj.publicKey = reader.bytes() break case 5: obj.peerRecordEnvelope = reader.bytes() break + case 6: { + const entry = Peer.Peer$metadataEntry.codec().decode(reader, reader.uint32()) + obj.metadata.set(entry.key, entry.value) + break + } + case 7: { + const entry = Peer.Peer$tagsEntry.codec().decode(reader, reader.uint32()) + obj.tags.set(entry.key, entry.value) + break + } default: reader.skipType(tag & 7) break @@ -177,29 +328,29 @@ export namespace Address { } } -export interface Metadata { - key: string - value: Uint8Array +export interface Tag { + value: number + expiry?: bigint } -export namespace Metadata { - let _codec: Codec +export namespace Tag { + let _codec: Codec - export const codec = (): Codec => { + export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, w, opts = {}) => { + _codec = message((obj, w, opts = {}) => { if (opts.lengthDelimited !== false) { w.fork() } - if ((obj.key != null && obj.key !== '')) { - w.uint32(10) - w.string(obj.key) + if ((obj.value != null && obj.value !== 0)) { + w.uint32(8) + w.uint32(obj.value) } - if ((obj.value != null && obj.value.byteLength > 0)) { - w.uint32(18) - w.bytes(obj.value) + if (obj.expiry != null) { + w.uint32(16) + w.uint64(obj.expiry) } if (opts.lengthDelimited !== false) { @@ -207,8 +358,7 @@ export namespace Metadata { } }, (reader, length) => { const obj: any = { - key: '', - value: new Uint8Array(0) + value: 0 } const end = length == null ? reader.len : reader.pos + length @@ -218,10 +368,10 @@ export namespace Metadata { switch (tag >>> 3) { case 1: - obj.key = reader.string() + obj.value = reader.uint32() break case 2: - obj.value = reader.bytes() + obj.expiry = reader.uint64() break default: reader.skipType(tag & 7) @@ -236,11 +386,11 @@ export namespace Metadata { return _codec } - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Metadata.codec()) + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Tag.codec()) } - export const decode = (buf: Uint8Array | Uint8ArrayList): Metadata => { - return decodeMessage(buf, Metadata.codec()) + export const decode = (buf: Uint8Array | Uint8ArrayList): Tag => { + return decodeMessage(buf, Tag.codec()) } } diff --git a/src/pb/tags.proto b/src/pb/tags.proto deleted file mode 100644 index 90e172f..0000000 --- a/src/pb/tags.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -message Tags { - repeated Tag tags = 1; -} - -message Tag { - string name = 1; // e.g. 'priority' - optional uint32 value = 2; // tag value 0-100 - optional uint64 expiry = 3; // ms timestamp after which the tag is no longer valid -} diff --git a/src/pb/tags.ts b/src/pb/tags.ts deleted file mode 100644 index 0d68422..0000000 --- a/src/pb/tags.ts +++ /dev/null @@ -1,145 +0,0 @@ -/* eslint-disable import/export */ -/* eslint-disable complexity */ -/* eslint-disable @typescript-eslint/no-namespace */ -/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ -/* eslint-disable @typescript-eslint/no-empty-interface */ - -import { encodeMessage, decodeMessage, message } from 'protons-runtime' -import type { Codec } from 'protons-runtime' -import type { Uint8ArrayList } from 'uint8arraylist' - -export interface Tags { - tags: Tag[] -} - -export namespace Tags { - let _codec: Codec - - export const codec = (): Codec => { - if (_codec == null) { - _codec = message((obj, w, opts = {}) => { - if (opts.lengthDelimited !== false) { - w.fork() - } - - if (obj.tags != null) { - for (const value of obj.tags) { - w.uint32(10) - Tag.codec().encode(value, w) - } - } - - if (opts.lengthDelimited !== false) { - w.ldelim() - } - }, (reader, length) => { - const obj: any = { - tags: [] - } - - const end = length == null ? reader.len : reader.pos + length - - while (reader.pos < end) { - const tag = reader.uint32() - - switch (tag >>> 3) { - case 1: - obj.tags.push(Tag.codec().decode(reader, reader.uint32())) - break - default: - reader.skipType(tag & 7) - break - } - } - - return obj - }) - } - - return _codec - } - - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Tags.codec()) - } - - export const decode = (buf: Uint8Array | Uint8ArrayList): Tags => { - return decodeMessage(buf, Tags.codec()) - } -} - -export interface Tag { - name: string - value?: number - expiry?: bigint -} - -export namespace Tag { - let _codec: Codec - - export const codec = (): Codec => { - if (_codec == null) { - _codec = message((obj, w, opts = {}) => { - if (opts.lengthDelimited !== false) { - w.fork() - } - - if ((obj.name != null && obj.name !== '')) { - w.uint32(10) - w.string(obj.name) - } - - if (obj.value != null) { - w.uint32(16) - w.uint32(obj.value) - } - - if (obj.expiry != null) { - w.uint32(24) - w.uint64(obj.expiry) - } - - if (opts.lengthDelimited !== false) { - w.ldelim() - } - }, (reader, length) => { - const obj: any = { - name: '' - } - - const end = length == null ? reader.len : reader.pos + length - - while (reader.pos < end) { - const tag = reader.uint32() - - switch (tag >>> 3) { - case 1: - obj.name = reader.string() - break - case 2: - obj.value = reader.uint32() - break - case 3: - obj.expiry = reader.uint64() - break - default: - reader.skipType(tag & 7) - break - } - } - - return obj - }) - } - - return _codec - } - - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Tag.codec()) - } - - export const decode = (buf: Uint8Array | Uint8ArrayList): Tag => { - return decodeMessage(buf, Tag.codec()) - } -} diff --git a/src/proto-book.ts b/src/proto-book.ts deleted file mode 100644 index 9479e7e..0000000 --- a/src/proto-book.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { Peer, PeerProtocolsChangeData, PeerStore, ProtoBook } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:proto-book') - -const EVENT_NAME = 'change:protocols' - -export class PeerStoreProtoBook implements ProtoBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The ProtoBook is responsible for keeping the known supported - * protocols of a peer - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - async get (peerId: PeerId): Promise { - log.trace('get wait for read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.protocols - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return [] - } - - async set (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let peer - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - - if (new Set([ - ...protocols - ]).size === peer.protocols.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { - protocols - }) - - log('stored provided protocols for %p', peerId) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async add (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('add await write lock') - const release = await this.store.lock.writeLock() - log.trace('add got write lock') - - let peer: Peer | undefined - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - - if (new Set([ - ...peer.protocols, - ...protocols - ]).size === peer.protocols.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { - protocols - }) - - log('added provided protocols for %p', peerId) - } finally { - log.trace('add release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async remove (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('remove await write lock') - const release = await this.store.lock.writeLock() - log.trace('remove got write lock') - - let peer: Peer | undefined - let updatedPeer: Peer - - try { - try { - peer = await this.store.load(peerId) - const protocolSet = new Set(peer.protocols) - - for (const protocol of protocols) { - protocolSet.delete(protocol) - } - - if (peer.protocols.length === protocolSet.size) { - return - } - - protocols = Array.from(protocolSet) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { - protocols - }) - } finally { - log.trace('remove release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - protocols: [] - }) - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: [], - oldProtocols: peer.protocols - } - })) - } - } -} diff --git a/src/store.ts b/src/store.ts index 9108fb9..e0e70b9 100644 --- a/src/store.ts +++ b/src/store.ts @@ -1,241 +1,171 @@ -import { logger } from '@libp2p/logger' import { peerIdFromBytes } from '@libp2p/peer-id' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { Key } from 'interface-datastore/key' import { base32 } from 'multiformats/bases/base32' -import { multiaddr } from '@multiformats/multiaddr' -import { Metadata, Peer as PeerPB } from './pb/peer.js' -import mortice from 'mortice' -import { equals as uint8arrayEquals } from 'uint8arrays/equals' -import type { Peer } from '@libp2p/interface-peer-store' +import { Peer as PeerPB } from './pb/peer.js' +import type { Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' import type { PersistentPeerStoreComponents } from './index.js' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { NAMESPACE_COMMON, peerIdToDatastoreKey } from './utils/peer-id-to-datastore-key.js' +import { toDatastorePeer } from './utils/peer-data-to-datastore-peer.js' +import { dedupeAddresses } from './utils/dedupe-addresses.js' +import { dedupeTags } from './utils/dedupe-tags.js' +import { dedupeMetadata } from './utils/dedupe-metadata.js' +import { bytesToPeer } from './utils/bytes-to-peer.js' +import { multiaddr } from '@multiformats/multiaddr' -const log = logger('libp2p:peer-store:store') - -const NAMESPACE_COMMON = '/peers/' - -export interface Store { - has: (peerId: PeerId) => Promise - save: (peer: Peer) => Promise - load: (peerId: PeerId) => Promise - delete: (peerId: PeerId) => Promise - merge: (peerId: PeerId, data: Partial) => Promise - mergeOrCreate: (peerId: PeerId, data: Partial) => Promise - patch: (peerId: PeerId, data: Partial) => Promise - patchOrCreate: (peerId: PeerId, data: Partial) => Promise - all: () => AsyncIterable - - lock: { - readLock: () => Promise<() => void> - writeLock: () => Promise<() => void> - } +/** + * Event detail emitted when peer data changes + */ +export interface PeerUpdate { + peer: Peer + previous?: Peer + updated: boolean } export class PersistentStore { private readonly components: PersistentPeerStoreComponents - public lock: any constructor (components: PersistentPeerStoreComponents) { this.components = components - this.lock = mortice({ - name: 'peer-store', - singleProcess: true - }) - } - - _peerIdToDatastoreKey (peerId: PeerId): Key { - if (peerId.type == null) { - log.error('peerId must be an instance of peer-id to store data') - throw new CodeError('peerId must be an instance of peer-id', codes.ERR_INVALID_PARAMETERS) - } - - const b32key = peerId.toCID().toString() - return new Key(`${NAMESPACE_COMMON}${b32key}`) } async has (peerId: PeerId): Promise { - return await this.components.datastore.has(this._peerIdToDatastoreKey(peerId)) + return await this.components.datastore.has(peerIdToDatastoreKey(peerId)) } async delete (peerId: PeerId): Promise { - await this.components.datastore.delete(this._peerIdToDatastoreKey(peerId)) + await this.components.datastore.delete(peerIdToDatastoreKey(peerId)) } async load (peerId: PeerId): Promise { - const buf = await this.components.datastore.get(this._peerIdToDatastoreKey(peerId)) - const peer = PeerPB.decode(buf) - const metadata = new Map() + const buf = await this.components.datastore.get(peerIdToDatastoreKey(peerId)) - for (const meta of peer.metadata) { - metadata.set(meta.key, meta.value) - } - - return { - ...peer, - id: peerId, - addresses: peer.addresses.map(({ multiaddr: ma, isCertified }) => { - return { - multiaddr: multiaddr(ma), - isCertified: isCertified ?? false - } - }), - metadata, - pubKey: peer.pubKey ?? undefined, - peerRecordEnvelope: peer.peerRecordEnvelope ?? undefined - } + return await bytesToPeer(peerId, buf) } - async save (peer: Peer): Promise { - if (peer.pubKey != null && peer.id.publicKey != null && !uint8arrayEquals(peer.pubKey, peer.id.publicKey)) { - log.error('peer publicKey bytes do not match peer id publicKey bytes') - throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) - } - - // dedupe addresses - const addressSet = new Set() - const addresses = peer.addresses - .filter(address => { - if (addressSet.has(address.multiaddr.toString())) { - return false - } - - addressSet.add(address.multiaddr.toString()) - return true - }) - .sort((a, b) => { - return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) - }) - .map(({ multiaddr, isCertified }) => ({ - multiaddr: multiaddr.bytes, - isCertified - })) - - const metadata: Metadata[] = [] - - ;[...peer.metadata.keys()].sort().forEach(key => { - const value = peer.metadata.get(key) - - if (value != null) { - metadata.push({ key, value }) - } - }) + async save (peerId: PeerId, data: PeerData): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) - const buf = PeerPB.encode({ - addresses, - protocols: peer.protocols.sort(), - pubKey: peer.pubKey, - metadata, - peerRecordEnvelope: peer.peerRecordEnvelope - }) - - await this.components.datastore.put(this._peerIdToDatastoreKey(peer.id), buf.subarray()) + const peerPb: PeerPB = toDatastorePeer(peerId, data) - return await this.load(peer.id) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async patch (peerId: PeerId, data: Partial): Promise { - const peer = await this.load(peerId) + async patch (peerId: PeerId, data: Partial): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) + + const peer = toDatastorePeer(peerId, data) + + const peerPb: PeerPB = { + addresses: dedupeAddresses(...(peer.addresses ?? existingPeer?.addresses ?? [])), + protocols: [...new Set(peer.protocols ?? existingPeer?.protocols)], + publicKey: peer.publicKey ?? existingPeer?.id.publicKey, + peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, + metadata: peer.metadata ?? existingPeer?.metadata, + tags: peer.tags ?? existingPeer?.tags + } - return await this._patch(peerId, data, peer) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async patchOrCreate (peerId: PeerId, data: Partial): Promise { - let peer: Peer - - try { - peer = await this.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - - peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() } + async merge (peerId: PeerId, data: PeerData): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) + + const peer = toDatastorePeer(peerId, data) + const peerPb: PeerPB = { + addresses: dedupeAddresses(...(existingPeer?.addresses ?? []), ...peer.addresses), + protocols: [...new Set([...(existingPeer?.protocols ?? []), ...peer.protocols])], + publicKey: peer.publicKey ?? existingPeer?.id.publicKey, + peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, + metadata: dedupeMetadata(peer.metadata, existingPeer?.metadata), + tags: dedupeTags(peer.tags, existingPeer?.tags) } - return await this._patch(peerId, data, peer) - } - - async _patch (peerId: PeerId, data: Partial, peer: Peer): Promise { - return await this.save({ - ...peer, - ...data, - id: peerId - }) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async merge (peerId: PeerId, data: Partial): Promise { - const peer = await this.load(peerId) + async * all (): AsyncGenerator { + for await (const { key, value } of this.components.datastore.query({ + prefix: NAMESPACE_COMMON + })) { + // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} + const base32Str = key.toString().split('/')[2] + const buf = base32.decode(base32Str) - return await this._merge(peerId, data, peer) + yield bytesToPeer(peerIdFromBytes(buf), value) + } } - async mergeOrCreate (peerId: PeerId, data: Partial): Promise { - /** @type {Peer} */ - let peer - + async #findExistingPeer (peerId: PeerId): Promise<{ existingBuf?: Uint8Array, existingPeer?: Peer }> { try { - peer = await this.load(peerId) + const existingBuf = await this.components.datastore.get(peerIdToDatastoreKey(peerId)) + const existingPeer = await bytesToPeer(peerId, existingBuf) + + return { + existingBuf, + existingPeer + } } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { + if (err.code !== 'ERR_NOT_FOUND') { throw err } - - peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() } } - return await this._merge(peerId, data, peer) + return {} } - async _merge (peerId: PeerId, data: Partial, peer: Peer): Promise { - // if the peer has certified addresses, use those in - // favour of the supplied versions - const addresses = new Map() - - peer.addresses.forEach((addr) => { - addresses.set(addr.multiaddr.toString(), addr.isCertified) + async #saveIfDifferent (peerId: PeerId, peer: PeerPB, existingBuf?: Uint8Array, existingPeer?: Peer): Promise { + // sort fields before write so bytes are consistent + peer.addresses = peer.addresses.sort((a, b) => { + return multiaddr(a.multiaddr).toString().localeCompare(multiaddr(b.multiaddr).toString()) + }) + peer.protocols = peer.protocols.sort((a, b) => { + return a.localeCompare(b) }) + peer.metadata = sortMapByKeys(peer.metadata) + peer.tags = sortMapByKeys(peer.tags) - ;(data.addresses ?? []).forEach(addr => { - const addrString = addr.multiaddr.toString() - const isAlreadyCertified = Boolean(addresses.get(addrString)) + const buf = PeerPB.encode(peer) - const isCertified = isAlreadyCertified || addr.isCertified + if (existingBuf != null && uint8ArrayEquals(buf, existingBuf)) { + return { + peer: await bytesToPeer(peerId, buf), + previous: existingPeer, + updated: false + } + } - addresses.set(addrString, isCertified) - }) + await this.components.datastore.put(peerIdToDatastoreKey(peerId), buf) - return await this.save({ - id: peerId, - addresses: Array.from(addresses.entries()).map(([addrStr, isCertified]) => { - return { - multiaddr: multiaddr(addrStr), - isCertified - } - }), - protocols: Array.from(new Set([ - ...(peer.protocols ?? []), - ...(data.protocols ?? []) - ])), - metadata: new Map([ - ...(peer.metadata?.entries() ?? []), - ...(data.metadata?.entries() ?? []) - ]), - pubKey: data.pubKey ?? (peer != null ? peer.pubKey : undefined), - peerRecordEnvelope: data.peerRecordEnvelope ?? (peer != null ? peer.peerRecordEnvelope : undefined) - }) + return { + peer: await bytesToPeer(peerId, buf), + previous: existingPeer, + updated: true + } } +} - async * all (): AsyncGenerator { - for await (const key of this.components.datastore.queryKeys({ - prefix: NAMESPACE_COMMON - })) { - // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} - const base32Str = key.toString().split('/')[2] - const buf = base32.decode(base32Str) - - yield this.load(peerIdFromBytes(buf)) - } +/** + * In JS maps are ordered by insertion order so create a new map with the + * keys inserted in alphabetical order. + */ +function sortMapByKeys (map: Map): Map { + const output = new Map() + + for (const key of [...map.keys()].sort((a, b) => { + return a.localeCompare(b) + })) { + output.set(key, map.get(key)) } + + return output } diff --git a/src/utils/bytes-to-peer.ts b/src/utils/bytes-to-peer.ts new file mode 100644 index 0000000..06021a6 --- /dev/null +++ b/src/utils/bytes-to-peer.ts @@ -0,0 +1,41 @@ +import { multiaddr } from '@multiformats/multiaddr' +import { Peer as PeerPB } from '../pb/peer.js' +import type { Peer, Tag } from '@libp2p/interface-peer-store' +import { createFromPubKey } from '@libp2p/peer-id-factory' +import { unmarshalPublicKey } from '@libp2p/crypto/keys' +import type { PeerId } from '@libp2p/interface-peer-id' + +export async function bytesToPeer (peerId: PeerId, buf: Uint8Array): Promise { + const peer = PeerPB.decode(buf) + + if (peer.publicKey != null && peerId.publicKey == null) { + peerId = await createFromPubKey(unmarshalPublicKey(peer.publicKey)) + } + + const tags = new Map() + + // remove any expired tags + const now = BigInt(Date.now()) + + for (const [key, tag] of peer.tags.entries()) { + if (tag.expiry != null && tag.expiry < now) { + continue + } + + tags.set(key, tag) + } + + return { + ...peer, + id: peerId, + addresses: peer.addresses.map(({ multiaddr: ma, isCertified }) => { + return { + multiaddr: multiaddr(ma), + isCertified: isCertified ?? false + } + }), + metadata: peer.metadata, + peerRecordEnvelope: peer.peerRecordEnvelope ?? undefined, + tags + } +} diff --git a/src/utils/dedupe-addresses.ts b/src/utils/dedupe-addresses.ts new file mode 100644 index 0000000..be8f857 --- /dev/null +++ b/src/utils/dedupe-addresses.ts @@ -0,0 +1,38 @@ +import { isMultiaddr, multiaddr } from '@multiformats/multiaddr' +import type { Address as AddressPB } from '../pb/peer.js' +import type { Address } from '@libp2p/interface-peer-store' + +export function dedupeAddresses (...addresses: Array
): AddressPB[] { + const addressMap = new Map() + + addresses.forEach(addr => { + if (addr == null) { + return + } + + if (addr.multiaddr instanceof Uint8Array) { + addr.multiaddr = multiaddr(addr.multiaddr) + } + + if (!isMultiaddr(addr.multiaddr)) { + return + } + + const isCertified = addr.isCertified ?? false + const maStr = addr.multiaddr.toString() + const existingAddr = addressMap.get(maStr) + + if (existingAddr != null) { + addr.isCertified = existingAddr.isCertified === true || isCertified + } else { + addressMap.set(maStr, { + multiaddr: addr.multiaddr.bytes, + isCertified + }) + } + }) + + return [...addressMap.values()].sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) + }) +} diff --git a/src/utils/dedupe-metadata.ts b/src/utils/dedupe-metadata.ts new file mode 100644 index 0000000..9999581 --- /dev/null +++ b/src/utils/dedupe-metadata.ts @@ -0,0 +1,25 @@ +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' + +/** + * a takes priority + */ +export function dedupeMetadata (a: Map, b?: Map): Map { + if (b == null) { + return a + } + + const output = new Map([...b.entries(), ...a.entries()]) + + for (const key of output.keys()) { + if (typeof key !== 'string') { + throw new CodeError('Peer metadata keys must be strings', codes.ERR_INVALID_PARAMETERS) + } + + if (!(output.get(key) instanceof Uint8Array)) { + throw new CodeError('Peer metadata values must be Uint8Arrays', codes.ERR_INVALID_PARAMETERS) + } + } + + return output +} diff --git a/src/utils/dedupe-tags.ts b/src/utils/dedupe-tags.ts new file mode 100644 index 0000000..89e38ae --- /dev/null +++ b/src/utils/dedupe-tags.ts @@ -0,0 +1,21 @@ +import type { Tag as TagPB } from '../pb/peer.js' + +/** + * a takes priority + */ +export function dedupeTags (a: Map, b?: Map): Map { + if (b == null) { + return a + } + + const output = new Map([...b.entries(), ...a.entries()]) + + for (const [key, tag] of output.entries()) { + output.set(key, { + value: tag.value ?? 0, + expiry: tag.expiry + }) + } + + return output +} diff --git a/src/utils/peer-data-to-datastore-peer.ts b/src/utils/peer-data-to-datastore-peer.ts new file mode 100644 index 0000000..95212ba --- /dev/null +++ b/src/utils/peer-data-to-datastore-peer.ts @@ -0,0 +1,118 @@ + +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { isMultiaddr } from '@multiformats/multiaddr' +import type { Peer as PeerPB } from '../pb/peer.js' +import { equals as uint8arrayEquals } from 'uint8arrays/equals' +import type { PeerData, TagOptions } from '@libp2p/interface-peer-store' +import type { PeerId } from '@libp2p/interface-peer-id' + +export function toDatastorePeer (peerId: PeerId, data: PeerData): PeerPB { + if (data == null) { + throw new CodeError('Invalid PeerData', codes.ERR_INVALID_PARAMETERS) + } + + if (data.publicKey != null && peerId.publicKey != null && !uint8arrayEquals(data.publicKey, peerId.publicKey)) { + throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) + } + + // merge addresses and multiaddrs, and dedupe + const addressSet = new Set() + + const output: PeerPB = { + addresses: (data.addresses ?? []) + .concat((data.multiaddrs ?? []).map(multiaddr => ({ multiaddr, isCertified: false }))) + .filter(address => { + if (!isMultiaddr(address.multiaddr)) { + throw new CodeError('Invalid mulitaddr', codes.ERR_INVALID_PARAMETERS) + } + + if (addressSet.has(address.multiaddr.toString())) { + return false + } + + addressSet.add(address.multiaddr.toString()) + return true + }) + .sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) + }) + .map(({ multiaddr, isCertified }) => ({ + multiaddr: multiaddr.bytes, + isCertified + })), + protocols: (data.protocols ?? []).sort(), + metadata: new Map(), + tags: new Map(), + publicKey: data.publicKey, + peerRecordEnvelope: data.peerRecordEnvelope + } + + // remove invalid metadata + if (data.metadata != null) { + if (data.metadata instanceof Map) { + output.metadata = data.metadata + } else { + for (const [key, value] of Object.entries(data.metadata)) { + output.metadata.set(key, value) + } + } + + for (const key of output.metadata.keys()) { + if (typeof key !== 'string') { + throw new CodeError('Peer metadata keys must be strings', codes.ERR_INVALID_PARAMETERS) + } + + if (!(output.metadata.get(key) instanceof Uint8Array)) { + throw new CodeError('Peer metadata values must be Uint8Arrays', codes.ERR_INVALID_PARAMETERS) + } + } + } + + if (data.tags != null) { + let tagOptions: Map + + if (data.tags instanceof Map) { + tagOptions = data.tags + } else { + tagOptions = new Map() + + for (const [key, value] of Object.entries(data.tags)) { + tagOptions.set(key, value) + } + } + + for (const [key, options] of tagOptions.entries()) { + const tag = { + name: key, + ttl: options.ttl, + value: options.value ?? 0 + } + + if (tag.value < 0 || tag.value > 100) { + throw new CodeError('Tag value must be between 0-100', codes.ERR_INVALID_PARAMETERS) + } + + if (parseInt(`${tag.value}`, 10) !== tag.value) { + throw new CodeError('Tag value must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.ttl != null) { + if (tag.ttl < 0) { + throw new CodeError('Tag ttl must be between greater than 0', codes.ERR_INVALID_PARAMETERS) + } + + if (parseInt(`${tag.ttl}`, 10) !== tag.ttl) { + throw new CodeError('Tag ttl must be an integer', codes.ERR_INVALID_PARAMETERS) + } + } + + output.tags.set(tag.name, { + value: tag.value, + expiry: tag.ttl == null ? undefined : BigInt(Date.now() + tag.ttl) + }) + } + } + + return output +} diff --git a/src/utils/peer-id-to-datastore-key.ts b/src/utils/peer-id-to-datastore-key.ts new file mode 100644 index 0000000..2f577dd --- /dev/null +++ b/src/utils/peer-id-to-datastore-key.ts @@ -0,0 +1,15 @@ +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { Key } from 'interface-datastore/key' +import { isPeerId, PeerId } from '@libp2p/interface-peer-id' + +export const NAMESPACE_COMMON = '/peers/' + +export function peerIdToDatastoreKey (peerId: PeerId): Key { + if (!isPeerId(peerId) || peerId.type == null) { + throw new CodeError('Invalid PeerId', codes.ERR_INVALID_PARAMETERS) + } + + const b32key = peerId.toCID().toString() + return new Key(`${NAMESPACE_COMMON}${b32key}`) +} diff --git a/test/address-book.spec.ts b/test/address-book.spec.ts deleted file mode 100644 index 35a7b25..0000000 --- a/test/address-book.spec.ts +++ /dev/null @@ -1,689 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 6] */ - -import { expect } from 'aegir/chai' -import { multiaddr } from '@multiformats/multiaddr' -import { arrayEquals } from '@libp2p/utils/array-equals' -import type { PeerId } from '@libp2p/interface-peer-id' -import pDefer from 'p-defer' -import { MemoryDatastore } from 'datastore-core/memory' -import { PersistentPeerStore } from '../src/index.js' -import { RecordEnvelope, PeerRecord } from '@libp2p/peer-record' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { AddressBook } from '@libp2p/interface-peer-store' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' - -const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') -const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') -const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') - -describe('addressBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('addressBook.set', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set('invalid peerId') - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no addresses provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set(peerId) - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no addresses should throw error') - }) - - it('throws invalid parameters error if invalid multiaddrs are provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set(peerId, ['invalid multiaddr']) - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid multiaddrs should throw error') - }) - - it('replaces the stored content by default and emit change event', async () => { - const defer = pDefer() - const supportedMultiaddrs = [addr1, addr2] - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(supportedMultiaddrs) - defer.resolve() - }, { - once: true - }) - - await ab.set(peerId, supportedMultiaddrs) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) - - return await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (same content) - await ab.set(peerId, supportedMultiaddrsB) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) - - await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - - const supportedMultiaddrs = [addr1, addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrs) - - // set 2 (same content) - await ab.set(peerId, supportedMultiaddrs) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - }) - - describe('addressBook.add', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no addresses provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add(peerId) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no addresses provided should throw error') - }) - - it('throws invalid parameters error if invalid multiaddrs are provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add(peerId, ['invalid multiaddr']) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid multiaddr should throw error') - }) - - it('does not emit event if no addresses are added', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.reject() - }) - - await ab.add(peerId, []) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('emits peer event if new peer has addresses added', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.resolve() - }) - - await ab.add(await createEd25519PeerId(), [ - multiaddr('/ip4/42.14.53.21/tcp/3981') - ]) - await defer.promise - }) - - it('emits peer event if new peer has addresses set', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.resolve() - }) - - await ab.set(await createEd25519PeerId(), [ - multiaddr('/ip4/42.14.53.21/tcp/3981') - ]) - await defer.promise - }) - - it('adds the new content and emits change event', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr3] - const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) - - let changeTrigger = 2 - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { multiaddrs } = evt.detail - changeTrigger-- - if (changeTrigger === 0 && arrayEquals(multiaddrs, finalMultiaddrs)) { - defer.resolve() - } - }) - - // Replace - await ab.set(peerId, supportedMultiaddrsA) - let addresses = await ab.get(peerId) - let multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrsA) - - // Add - await ab.add(peerId, supportedMultiaddrsB) - addresses = await ab.get(peerId) - multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(finalMultiaddrs) - - return await defer.promise - }) - - it('emits on add if the content to add not exists', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1] - const supportedMultiaddrsB = [addr2] - const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (content already existing) - await ab.add(peerId, supportedMultiaddrsB) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(finalMultiaddrs) - - await defer.promise - }) - - it('does not emit on add if the content to add already exists', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (content already existing) - await ab.add(peerId, supportedMultiaddrsB) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('does not add replicated content', async () => { - // set 1 - await ab.set(peerId, [addr1, addr1]) - - const addresses = await ab.get(peerId) - expect(addresses).to.have.lengthOf(1) - }) - }) - - describe('addressBook.get', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns empty if no multiaddrs are known for the provided peer', async () => { - const addresses = await ab.get(peerId) - - expect(addresses).to.be.empty() - }) - - it('returns the multiaddrs stored', async () => { - const supportedMultiaddrs = [addr1, addr2] - - await ab.set(peerId, supportedMultiaddrs) - - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) - }) - }) - - describe('addressBook.delete', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.delete('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('does not emit an event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:multiaddrs', () => { - defer.reject() - }) - - await ab.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('emits an event if the record exists', async () => { - const defer = pDefer() - - const supportedMultiaddrs = [addr1, addr2] - await ab.set(peerId, supportedMultiaddrs) - - // Listen after set - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { multiaddrs } = evt.detail - expect(multiaddrs.length).to.eql(0) - defer.resolve() - }) - - await ab.delete(peerId) - - return await defer.promise - }) - }) - - describe('certified records', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - describe('consumes a valid peer record and stores its data', () => { - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('no previous data in AddressBook', async () => { - const multiaddrs = [addr1, addr2] - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Validate AddressBook addresses - const addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('emits change:multiaddrs event when adding multiaddrs', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - return await defer.promise - }) - - it('emits change:multiaddrs event with same data currently in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, multiaddrs) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(false) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('emits change:multiaddrs event with previous partial data in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, [addr1]) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(1) - expect(addrs[0].isCertified).to.eql(false) - expect(addrs[0].multiaddr.equals(addr1)).to.eql(true) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('with previous different data in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrsUncertified = [addr3] - const multiaddrsCertified = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, multiaddrsUncertified) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrsUncertified.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(false) - expect(multiaddrsUncertified[index].equals(addr.multiaddr)).to.eql(true) - }) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs: multiaddrsCertified - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrsCertified.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrsCertified[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - }) - - describe('fails to consume invalid peer records', () => { - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('invalid peer record', async () => { - const invalidEnvelope = { - payload: uint8ArrayFromString('invalid-peerRecord') - } - - // @ts-expect-error invalid input - const consumed = await ab.consumePeerRecord(invalidEnvelope) - expect(consumed).to.eql(false) - }) - - it('peer that created the envelope is not the same as the peer record', async () => { - const multiaddrs = [addr1, addr2] - - // Create peer record - const peerId2 = await createEd25519PeerId() - const peerRecord = new PeerRecord({ - peerId: peerId2, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(false) - }) - - it('does not store an outdated record', async () => { - const multiaddrs = [addr1, addr2] - const peerRecord1 = new PeerRecord({ - peerId, - multiaddrs, - seqNumber: BigInt(Date.now()) - }) - const peerRecord2 = new PeerRecord({ - peerId, - multiaddrs, - seqNumber: BigInt(Date.now() - 1) - }) - const envelope1 = await RecordEnvelope.seal(peerRecord1, peerId) - const envelope2 = await RecordEnvelope.seal(peerRecord2, peerId) - - // Consume envelope1 (bigger seqNumber) - let consumed = await ab.consumePeerRecord(envelope1) - expect(consumed).to.eql(true) - - consumed = await ab.consumePeerRecord(envelope2) - expect(consumed).to.eql(false) - }) - - it('empty multiaddrs', async () => { - const peerRecord = new PeerRecord({ - peerId, - multiaddrs: [] - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(false) - }) - }) - }) -}) diff --git a/test/index.spec.ts b/test/index.spec.ts new file mode 100644 index 0000000..570afb4 --- /dev/null +++ b/test/index.spec.ts @@ -0,0 +1,161 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import delay from 'delay' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') + +describe('PersistentPeerStore', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + }) + + it('has an empty map of peers', async () => { + const peers = await peerStore.all() + expect(peers.length).to.equal(0) + }) + + describe('has', () => { + it('has peer data', async () => { + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + await peerStore.save(otherPeerId, { + multiaddrs: [ + addr1 + ] + }) + await expect(peerStore.has(otherPeerId)).to.eventually.be.true() + }) + }) + + describe('delete', () => { + it('deletes peer data', async () => { + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + await peerStore.save(otherPeerId, { + multiaddrs: [ + addr1 + ] + }) + await expect(peerStore.has(otherPeerId)).to.eventually.be.true() + await peerStore.delete(otherPeerId) + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + }) + + it('does not allow deleting the self peer', async () => { + await expect(peerStore.has(peerId)).to.eventually.be.false() + await peerStore.save(peerId, { + multiaddrs: [ + addr1 + ] + }) + + await expect(peerStore.delete(peerId)).to.eventually.be.rejected() + .with.property('code', 'ERR_INVALID_PARAMETERS') + }) + }) + + describe('tags', () => { + it('tags a peer', async () => { + const name = 'a-tag' + const peer = await peerStore.save(otherPeerId, { + tags: { + [name]: {} + } + }) + + expect(peer).to.have.property('tags') + .that.deep.equals(new Map([[name, { value: 0 }]]), 'Peer did not contain tag') + }) + + it('tags a peer with a value', async () => { + const name = 'a-tag' + const value = 50 + const peer = await peerStore.save(peerId, { + tags: { + [name]: { value } + } + }) + + expect(peer).to.have.property('tags') + .that.deep.equals(new Map([[name, { value }]]), 'Peer did not contain tag with a value') + }) + + it('tags a peer with a valid value', async () => { + const name = 'a-tag' + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: -1 } + } + }), 'PeerStore contain tag for peer where value was too small') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: 101 } + } + }), 'PeerStore contain tag for peer where value was too large') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: 5.5 } + } + }), 'PeerStore contain tag for peer where value was not an integer') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + }) + + it('tags a peer with an expiring value', async () => { + const name = 'a-tag' + const value = 50 + const peer = await peerStore.save(peerId, { + tags: { + [name]: { + value, + ttl: 50 + } + } + }) + + expect(peer).to.have.property('tags') + .that.has.key(name) + + await delay(100) + + const updatedPeer = await peerStore.get(peerId) + + expect(updatedPeer).to.have.property('tags') + .that.does.not.have.key(name) + }) + + it('untags a peer', async () => { + const name = 'a-tag' + const peer = await peerStore.save(peerId, { + tags: { + [name]: {} + } + }) + + expect(peer).to.have.property('tags') + .that.has.key(name) + + const updatedPeer = await peerStore.patch(peerId, { + tags: {} + }) + + expect(updatedPeer).to.have.property('tags') + .that.does.not.have.key(name) + }) + }) +}) diff --git a/test/key-book.spec.ts b/test/key-book.spec.ts deleted file mode 100644 index 1f25a37..0000000 --- a/test/key-book.spec.ts +++ /dev/null @@ -1,129 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { MemoryDatastore } from 'datastore-core/memory' -import { PersistentPeerStore } from '../src/index.js' -import pDefer from 'p-defer' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { KeyBook } from '@libp2p/interface-peer-store' - -describe('keyBook', () => { - let peerId: PeerId - let peerStore: PersistentPeerStore - let kb: KeyBook - let datastore: MemoryDatastore - - beforeEach(async () => { - peerId = await createEd25519PeerId() - datastore = new MemoryDatastore() - peerStore = new PersistentPeerStore({ peerId, datastore }) - kb = peerStore.keyBook - }) - - it('throws invalid parameters error if invalid PeerId is provided in set', async () => { - try { - // @ts-expect-error invalid input - await kb.set('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if invalid PeerId is provided in get', async () => { - try { - // @ts-expect-error invalid input - await kb.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('stores the peerId in the book and returns the public key', async () => { - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peerId, peerId.publicKey) - - // Get public key - const pubKey = await kb.get(peerId) - expect(peerId.publicKey).to.equalBytes(pubKey) - }) - - it('should not store if already stored', async () => { - const spy = sinon.spy(datastore, 'put') - const peer = await createEd25519PeerId() - - if (peer.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peer, peer.publicKey) - await kb.set(peer, peer.publicKey) - - expect(spy).to.have.property('callCount', 1) - }) - - it('should emit an event when setting a key', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:pubkey', (evt) => { - const { peerId: id, publicKey } = evt.detail - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - expect(id.toString()).to.equal(peerId.toString()) - expect(publicKey).to.equalBytes(peerId.publicKey) - defer.resolve() - }) - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peerId, peerId.publicKey) - await defer.promise - }) - - it('should not set when key does not match', async () => { - const edKey = await createEd25519PeerId() - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await expect(kb.set(edKey, peerId.publicKey)).to.eventually.be.rejectedWith(/bytes do not match/) - }) - - it('should emit an event when deleting a key', async () => { - const defer = pDefer() - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - await kb.set(peerId, peerId.publicKey) - - peerStore.addEventListener('change:pubkey', (evt) => { - const { peerId: id, publicKey } = evt.detail - expect(id.toString()).to.equal(peerId.toString()) - expect(publicKey).to.be.undefined() - defer.resolve() - }) - - await kb.delete(peerId) - await defer.promise - }) -}) diff --git a/test/merge.spec.ts b/test/merge.spec.ts new file mode 100644 index 0000000..e14b8a9 --- /dev/null +++ b/test/merge.spec.ts @@ -0,0 +1,145 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerData } from '@libp2p/interface-peer-store' +import { pEvent } from 'p-event' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') + +describe('merge', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + }) + + it('emits peer:update event on merge', async () => { + const eventPromise = pEvent(peerStore, 'peer:update') + + await peerStore.merge(otherPeerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('emits self:peer:update event on merge for self peer', async () => { + const eventPromise = pEvent(peerStore, 'self:peer:update') + + await peerStore.merge(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('merges multiaddrs', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.save(otherPeerId, peer) + + const updated = await peerStore.merge(otherPeerId, { + multiaddrs: [ + addr3 + ] + }) + + expect(updated).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr3, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + }]) + }) + + it('merges metadata', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.save(otherPeerId, peer) + + const peerUpdate: PeerData = { + metadata: { + bar: Uint8Array.from([3, 4, 5]) + } + } + + const updated = await peerStore.merge(otherPeerId, peerUpdate) + + expect(updated).to.have.property('metadata').that.deep.equals( + new Map([ + ['foo', Uint8Array.from([0, 1, 2])], + ['bar', Uint8Array.from([3, 4, 5])] + ]) + ) + }) + + it('merges tags', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.patch(otherPeerId, peer) + + const peerUpdate: PeerData = { + tags: { + tag2: { value: 20 } + } + } + + const updated = await peerStore.merge(otherPeerId, peerUpdate) + + expect(updated).to.have.property('tags').that.deep.equals( + new Map([ + ['tag1', { value: 10 }], + ['tag2', { value: 20 }] + ]) + ) + }) +}) diff --git a/test/metadata-book.spec.ts b/test/metadata-book.spec.ts deleted file mode 100644 index bf79c0b..0000000 --- a/test/metadata-book.spec.ts +++ /dev/null @@ -1,358 +0,0 @@ - -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { MemoryDatastore } from 'datastore-core/memory' -import pDefer from 'p-defer' -import { PersistentPeerStore } from '../src/index.js' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { MetadataBook } from '@libp2p/interface-peer-store' - -describe('metadataBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('metadataBook.set', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.set('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no metadata provided', async () => { - try { - // @ts-expect-error invalid input - await mb.set(peerId) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no key provided should throw error') - }) - - it('throws invalid parameters error if no value provided', async () => { - try { - // @ts-expect-error invalid input - await mb.setValue(peerId, 'location') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no value provided should throw error') - }) - - it('throws invalid parameters error if value is not a buffer', async () => { - try { - // @ts-expect-error invalid input - await mb.setValue(peerId, 'location', 'mars') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid value provided should throw error') - }) - - it('stores the content and emit change event', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - peerStore.addEventListener('change:metadata', (evt) => { - const { peerId, metadata } = evt.detail - expect(peerId).to.exist() - expect(metadata.get(metadataKey)).to.equalBytes(metadataValue) - defer.resolve() - }, { - once: true - }) - - await mb.setValue(peerId, metadataKey, metadataValue) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.equalBytes(metadataValue) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue) - - return await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue1 = uint8ArrayFromString('mars') - const metadataValue2 = uint8ArrayFromString('saturn') - - let changeCounter = 0 - peerStore.addEventListener('change:metadata', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await mb.setValue(peerId, metadataKey, metadataValue1) - - // set 2 (same content) - await mb.setValue(peerId, metadataKey, metadataValue2) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.equalBytes(metadataValue2) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue2) - - return await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - let changeCounter = 0 - peerStore.addEventListener('change:metadata', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await mb.setValue(peerId, metadataKey, metadataValue) - - // set 2 (same content) - await mb.setValue(peerId, metadataKey, metadataValue) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('metadataBook.get', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns empty if no metadata is known for the provided peer', async () => { - const metadata = await mb.get(peerId) - - expect(metadata).to.be.empty() - }) - - it('returns the metadata stored', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - const metadata = new Map() - metadata.set(metadataKey, metadataValue) - - await mb.set(peerId, metadata) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue) - }) - }) - - describe('metadataBook.getValue', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.getValue('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns undefined if no metadata is known for the provided peer', async () => { - const metadataKey = 'location' - const metadata = await mb.getValue(peerId, metadataKey) - - expect(metadata).to.not.exist() - }) - - it('returns the metadata value stored for the given key', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.exist() - expect(value).to.equalBytes(metadataValue) - }) - - it('returns undefined if no metadata is known for the provided peer and key', async () => { - const metadataKey = 'location' - const metadataBadKey = 'nickname' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - const metadata = await mb.getValue(peerId, metadataBadKey) - expect(metadata).to.not.exist() - }) - }) - - describe('metadataBook.delete', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.delete('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:metadata', () => { - defer.reject() - }) - - await mb.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('should emit an event if the record exists for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - // Listen after set - peerStore.addEventListener('change:metadata', () => { - defer.resolve() - }) - - await mb.delete(peerId) - - return await defer.promise - }) - }) - - describe('metadataBook.deleteValue', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.deleteValue('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - - peerStore.addEventListener('change:metadata', () => { - defer.reject() - }) - - await mb.deleteValue(peerId, metadataKey) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('should emit event if a record exists for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - // Listen after set - peerStore.addEventListener('change:metadata', () => { - defer.resolve() - }) - - await mb.deleteValue(peerId, metadataKey) - - return await defer.promise - }) - }) -}) diff --git a/test/patch.spec.ts b/test/patch.spec.ts new file mode 100644 index 0000000..146444f --- /dev/null +++ b/test/patch.spec.ts @@ -0,0 +1,135 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerData } from '@libp2p/interface-peer-store' +import { pEvent } from 'p-event' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') + +describe('patch', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + }) + + it('emits peer:update event on patch', async () => { + const eventPromise = pEvent(peerStore, 'peer:update') + + await peerStore.patch(otherPeerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('emits self:peer:update event on patch for self peer', async () => { + const eventPromise = pEvent(peerStore, 'self:peer:update') + + await peerStore.patch(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('replaces multiaddrs', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.patch(otherPeerId, peer) + + const peerUpdate: PeerData = { + multiaddrs: [ + addr3 + ] + } + + const updated = await peerStore.patch(otherPeerId, peerUpdate) + + expect(updated).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr3, + isCertified: false + }]) + }) + + it('replaces metadata', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.patch(peerId, peer) + + const peerUpdate: PeerData = { + metadata: { + bar: Uint8Array.from([3, 4, 5]) + } + } + + const updated = await peerStore.patch(otherPeerId, peerUpdate) + + expect(updated).to.have.property('metadata').that.deep.equals( + new Map([['bar', Uint8Array.from([3, 4, 5])]]) + ) + }) + + it('replaces tags', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + } + } + + await peerStore.patch(peerId, peer) + + const peerUpdate: PeerData = { + tags: { + tag2: { value: 20 } + } + } + + const updated = await peerStore.patch(otherPeerId, peerUpdate) + + expect(updated).to.have.property('tags').that.deep.equals( + new Map([['tag2', { value: 20 }]]) + ) + }) +}) diff --git a/test/peer-store.spec.ts b/test/peer-store.spec.ts deleted file mode 100644 index 6a07953..0000000 --- a/test/peer-store.spec.ts +++ /dev/null @@ -1,324 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { PersistentPeerStore } from '../src/index.js' -import { multiaddr } from '@multiformats/multiaddr' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { MemoryDatastore } from 'datastore-core/memory' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import delay from 'delay' - -const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') -const addr2 = multiaddr('/ip4/127.0.0.1/tcp/8001') -const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') -const addr4 = multiaddr('/ip4/127.0.0.1/tcp/8003') - -const proto1 = '/protocol1' -const proto2 = '/protocol2' -const proto3 = '/protocol3' - -describe('peer-store', () => { - let peerIds: PeerId[] - before(async () => { - peerIds = await Promise.all([ - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId() - ]) - }) - - describe('empty books', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('has an empty map of peers', async () => { - const peers = await peerStore.all() - expect(peers.length).to.equal(0) - }) - - it('deletes a peerId', async () => { - await peerStore.addressBook.set(peerIds[0], [multiaddr('/ip4/127.0.0.1/tcp/4001')]) - await expect(peerStore.has(peerIds[0])).to.eventually.be.true() - await peerStore.delete(peerIds[0]) - await expect(peerStore.has(peerIds[0])).to.eventually.be.false() - }) - - it('sets the peer\'s public key to the KeyBook', async () => { - if (peerIds[0].publicKey == null) { - throw new Error('Public key was missing') - } - - await peerStore.keyBook.set(peerIds[0], peerIds[0].publicKey) - await expect(peerStore.keyBook.get(peerIds[0])).to.eventually.deep.equal(peerIds[0].publicKey) - }) - }) - - describe('previously populated books', () => { - let peerStore: PersistentPeerStore - - beforeEach(async () => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - - // Add peer0 with { addr1, addr2 } and { proto1 } - await peerStore.addressBook.set(peerIds[0], [addr1, addr2]) - await peerStore.protoBook.set(peerIds[0], [proto1]) - - // Add peer1 with { addr3 } and { proto2, proto3 } - await peerStore.addressBook.set(peerIds[1], [addr3]) - await peerStore.protoBook.set(peerIds[1], [proto2, proto3]) - - // Add peer2 with { addr4 } - await peerStore.addressBook.set(peerIds[2], [addr4]) - - // Add peer3 with { addr4 } and { proto2 } - await peerStore.addressBook.set(peerIds[3], [addr4]) - await peerStore.protoBook.set(peerIds[3], [proto2]) - }) - - it('has peers', async () => { - const peers = await peerStore.all() - - expect(peers.length).to.equal(4) - expect(peers.map(peer => peer.id.toString())).to.have.members([ - peerIds[0].toString(), - peerIds[1].toString(), - peerIds[2].toString(), - peerIds[3].toString() - ]) - }) - - it('deletes a stored peer', async () => { - await peerStore.delete(peerIds[0]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(3) - expect(Array.from(peers.keys())).to.not.have.members([peerIds[0].toString()]) - }) - - it('deletes a stored peer which is only on one book', async () => { - await peerStore.delete(peerIds[2]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(3) - }) - - it('gets the stored information of a peer in all its books', async () => { - const peer = await peerStore.get(peerIds[0]) - expect(peer).to.exist() - expect(peer.protocols).to.have.members([proto1]) - - const peerMultiaddrs = peer.addresses.map((mi) => mi.multiaddr) - expect(peerMultiaddrs).to.have.deep.members([addr1, addr2]) - - expect(peer.id.toString()).to.equal(peerIds[0].toString()) - }) - - it('gets the stored information of a peer that is not present in all its books', async () => { - const peers = await peerStore.get(peerIds[2]) - expect(peers).to.exist() - expect(peers.protocols.length).to.eql(0) - - const peerMultiaddrs = peers.addresses.map((mi) => mi.multiaddr) - expect(peerMultiaddrs).to.have.deep.members([addr4]) - }) - - it('can find all the peers supporting a protocol', async () => { - const peerSupporting2 = [] - - for await (const peer of await peerStore.all()) { - if (peer.protocols.includes(proto2)) { - peerSupporting2.push(peer) - } - } - - expect(peerSupporting2.length).to.eql(2) - expect(peerSupporting2[0].id.toString()).to.eql(peerIds[1].toString()) - expect(peerSupporting2[1].id.toString()).to.eql(peerIds[3].toString()) - }) - - it('can find all the peers listening on a given address', async () => { - const peerListening4 = [] - - for await (const peer of await peerStore.all()) { - const multiaddrs = peer.addresses.map((mi) => mi.multiaddr.toString()) - - if (multiaddrs.includes(addr4.toString())) { - peerListening4.push(peer) - } - } - - expect(peerListening4.length).to.eql(2) - expect(peerListening4[0].id.toString()).to.eql(peerIds[2].toString()) - expect(peerListening4[1].id.toString()).to.eql(peerIds[3].toString()) - }) - }) - - describe('peerStore.getPeers', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('returns peers if only addresses are known', async () => { - await peerStore.addressBook.set(peerIds[0], [addr1]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(1) - expect(peerData.protocols).to.have.lengthOf(0) - expect(peerData.metadata).to.be.empty() - }) - - it('returns peers if only protocols are known', async () => { - await peerStore.protoBook.set(peerIds[0], [proto1]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(0) - expect(peerData.protocols).to.have.lengthOf(1) - expect(peerData.metadata).to.be.empty() - }) - - it('returns peers if only metadata is known', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('earth') - await peerStore.metadataBook.setValue(peerIds[0], metadataKey, metadataValue) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(0) - expect(peerData.protocols).to.have.lengthOf(0) - expect(peerData.metadata).to.exist() - expect(peerData.metadata.get(metadataKey)).to.equalBytes(metadataValue) - }) - }) - - describe('tags', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('tags a peer', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag for peer') - .to.eventually.deep.include.members([{ - name, - value: 0 - }]) - }) - - it('tags a peer with a value', async () => { - const name = 'a-tag' - const value = 50 - await peerStore.tagPeer(peerIds[0], name, { - value - }) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag for peer with a value') - .to.eventually.deep.include.members([{ - name, - value - }]) - }) - - it('tags a peer with a valid value', async () => { - const name = 'a-tag' - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: -1 - }), 'PeerStore contain tag for peer where value was too small') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: 101 - }), 'PeerStore contain tag for peer where value was too large') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: 5.5 - }), 'PeerStore contain tag for peer where value was not an integer') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - }) - - it('tags a peer with an expiring value', async () => { - const name = 'a-tag' - const value = 50 - await peerStore.tagPeer(peerIds[0], name, { - value, - ttl: 50 - }) - - await expect(peerStore.getTags(peerIds[0])) - .to.eventually.deep.include.members([{ - name, - value - }], 'PeerStore did not contain expiring value') - - await delay(100) - - await expect(peerStore.getTags(peerIds[0])) - .to.eventually.not.deep.include.members([{ - name, - value - }], 'PeerStore contained expired value') - }) - - it('does not tag a peer twice', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name, { - value: 1 - }) - await peerStore.tagPeer(peerIds[0], name, { - value: 10 - }) - - const allTags = await peerStore.getTags(peerIds[0]) - const tags = allTags.filter(t => t.name === name) - - expect(tags).to.have.lengthOf(1) - expect(tags).to.have.nested.property('[0].value', 10) - }) - - it('untags a peer', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag') - .to.eventually.deep.include.members([{ - name, - value: 0 - }]) - - await peerStore.unTagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore contained untagged tag') - .to.eventually.not.deep.include.members([{ - name, - value: 0 - }]) - }) - }) -}) diff --git a/test/proto-book.spec.ts b/test/proto-book.spec.ts deleted file mode 100644 index 84dbeb8..0000000 --- a/test/proto-book.spec.ts +++ /dev/null @@ -1,388 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { MemoryDatastore } from 'datastore-core/memory' -import pDefer from 'p-defer' -import pWaitFor from 'p-wait-for' -import { PersistentPeerStore } from '../src/index.js' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { ProtoBook } from '@libp2p/interface-peer-store' - -const arraysAreEqual = (a: string[], b: string[]): boolean => { - if (a.length !== b.length) { - return false - } - - return a.sort().every((item, index) => b[index] === item) -} - -describe('protoBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('protoBook.set', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.set('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.set(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('replaces the stored content by default and emit change event', async () => { - const defer = pDefer() - const supportedProtocols = ['protocol1', 'protocol2'] - - peerStore.addEventListener('change:protocols', (evt) => { - const { peerId, protocols } = evt.detail - expect(peerId).to.exist() - expect(protocols).to.have.deep.members(supportedProtocols) - defer.resolve() - }, { - once: true - }) - - await pb.set(peerId, supportedProtocols) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - - await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (same content) - await pb.set(peerId, supportedProtocolsB) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocolsB) - - await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - - const supportedProtocols = ['protocol1', 'protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocols) - - // set 2 (same content) - await pb.set(peerId, supportedProtocols) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('protoBook.add', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.add('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.add(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('adds the new content and emits change event', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol3'] - const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) - - let changeTrigger = 2 - peerStore.addEventListener('change:protocols', (evt) => { - const { protocols } = evt.detail - changeTrigger-- - if (changeTrigger === 0 && arraysAreEqual(protocols, finalProtocols)) { - defer.resolve() - } - }) - - // Replace - await pb.set(peerId, supportedProtocolsA) - let protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocolsA) - - // Add - await pb.add(peerId, supportedProtocolsB) - protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - return await defer.promise - }) - - it('emits on add if the content to add not exists', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1'] - const supportedProtocolsB = ['protocol2'] - const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (content already existing) - await pb.add(peerId, supportedProtocolsB) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - return await defer.promise - }) - - it('does not emit on add if the content to add already exists', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (content already existing) - await pb.add(peerId, supportedProtocolsB) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('protoBook.remove', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.remove('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.remove(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('removes the given protocol and emits change event', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol1'] - const finalProtocols = supportedProtocols.filter(p => !removedProtocols.includes(p)) - - peerStore.addEventListener('change:protocols', spy) - - // Replace - await pb.set(peerId, supportedProtocols) - let protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - - // Remove - await pb.remove(peerId, removedProtocols) - protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - await pWaitFor(() => spy.callCount === 2) - - const [firstCallArgs] = spy.firstCall.args - const [secondCallArgs] = spy.secondCall.args - expect(arraysAreEqual(firstCallArgs.detail.protocols, supportedProtocols)) - expect(arraysAreEqual(secondCallArgs.detail.protocols, finalProtocols)) - }) - - it('emits on remove if the content changes', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol2'] - const finalProtocols = supportedProtocols.filter(p => !removedProtocols.includes(p)) - - peerStore.addEventListener('change:protocols', spy) - - // set - await pb.set(peerId, supportedProtocols) - - // remove (content already existing) - await pb.remove(peerId, removedProtocols) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - await pWaitFor(() => spy.callCount === 2) - }) - - it('does not emit on remove if the content does not change', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol3'] - - peerStore.addEventListener('change:protocols', spy) - - // set - await pb.set(peerId, supportedProtocols) - - // remove - await pb.remove(peerId, removedProtocols) - - // Only one event - expect(spy.callCount).to.eql(1) - }) - }) - - describe('protoBook.get', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.get('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('returns empty if no protocols are known for the provided peer', async () => { - const protocols = await pb.get(peerId) - - expect(protocols).to.be.empty() - }) - - it('returns the protocols stored', async () => { - const supportedProtocols = ['protocol1', 'protocol2'] - - await pb.set(peerId, supportedProtocols) - - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - }) - }) - - describe('protoBook.delete', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.delete('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:protocols', () => { - defer.reject() - }) - - await pb.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('should emit event if a record exists for the peer', async () => { - const defer = pDefer() - - const supportedProtocols = ['protocol1', 'protocol2'] - await pb.set(peerId, supportedProtocols) - - // Listen after set - peerStore.addEventListener('change:protocols', (evt) => { - const { protocols } = evt.detail - expect(protocols.length).to.eql(0) - defer.resolve() - }) - - await pb.delete(peerId) - - await defer.promise - }) - }) -}) diff --git a/test/save.spec.ts b/test/save.spec.ts new file mode 100644 index 0000000..c160288 --- /dev/null +++ b/test/save.spec.ts @@ -0,0 +1,173 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import pDefer from 'p-defer' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { codes } from '../src/errors.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import { pEvent } from 'p-event' +import sinon from 'sinon' +import type { PeerUpdate } from '@libp2p/interface-libp2p' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') + +describe('save', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + }) + + it('throws invalid parameters error if invalid PeerId is provided', async () => { + // @ts-expect-error invalid input + await expect(peerStore.save('invalid peerId')) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('throws invalid parameters error if no peer data provided', async () => { + // @ts-expect-error invalid input + await expect(peerStore.save(peerId)) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('throws invalid parameters error if invalid multiaddrs are provided', async () => { + await expect(peerStore.save(peerId, { + // @ts-expect-error invalid input + addresses: ['invalid multiaddr'] + })) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('replaces the stored content by default and emit change event', async () => { + const supportedMultiaddrs = [addr1, addr2] + const eventPromise = pEvent(peerStore, 'peer:update') + + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + const event = await eventPromise as CustomEvent + + const { peer, previous } = event.detail + + expect(peer.addresses).to.deep.equal( + supportedMultiaddrs.map((multiaddr) => ({ + isCertified: false, + multiaddr + })) + ) + expect(previous).to.be.undefined() + }) + + it('emits on set if not storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1, addr2] + const supportedMultiaddrsB = [addr2] + + let changeCounter = 0 + peerStore.addEventListener('peer:update', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrsA + }) + + // set 2 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrsB + }) + + const peer = await peerStore.get(otherPeerId) + const multiaddrs = peer.addresses.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) + + await defer.promise + }) + + it('emits self event on save for self peer', async () => { + const eventPromise = pEvent(peerStore, 'self:peer:update') + + await peerStore.save(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('does not emit on set if it is storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrs = [addr1, addr2] + + let changeCounter = 0 + peerStore.addEventListener('peer:update', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject(new Error('Saved identical data twice')) + } + }) + + // set 1 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + // set 2 (same content) + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + await defer.promise + }) + + it('should not set public key when key does not match', async () => { + const edKey = await createEd25519PeerId() + + if (peerId.publicKey == null) { + throw new Error('Public key was missing') + } + + await expect(peerStore.save(edKey, { + publicKey: peerId.publicKey + })).to.eventually.be.rejectedWith(/bytes do not match/) + }) + + it('should not store a public key if already stored', async () => { + // @ts-expect-error private fields + const spy = sinon.spy(peerStore.store.components.datastore, 'put') + + if (otherPeerId.publicKey == null) { + throw new Error('Public key was missing') + } + + // Set PeerId + await peerStore.save(otherPeerId, { + publicKey: otherPeerId.publicKey + }) + await peerStore.save(otherPeerId, { + publicKey: otherPeerId.publicKey + }) + + expect(spy).to.have.property('callCount', 1) + }) +}) diff --git a/test/utils/dedupe-addresses.spec.ts b/test/utils/dedupe-addresses.spec.ts new file mode 100644 index 0000000..e547a3a --- /dev/null +++ b/test/utils/dedupe-addresses.spec.ts @@ -0,0 +1,75 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import { dedupeAddresses } from '../../src/utils/dedupe-addresses.js' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') + +describe('dedupe-addresses', () => { + it('should dedupe addresses', () => { + expect(dedupeAddresses({ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + })).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: false + }, { + multiaddr: addr2.bytes, + isCertified: false + }]) + }) + + it('should sort addresses', () => { + expect(dedupeAddresses({ + multiaddr: addr2, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + })).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: false + }, { + multiaddr: addr2.bytes, + isCertified: false + }]) + }) + + it('should retain isCertified when deduping addresses', () => { + expect(dedupeAddresses({ + multiaddr: addr1, + isCertified: true + }, { + multiaddr: addr1, + isCertified: false + })).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: true + }]) + }) + + it('should survive deduping garbage addresses', () => { + expect(dedupeAddresses({ + multiaddr: addr1, + isCertified: false + // @ts-expect-error invalid params + }, {}, 'hello', 5, undefined, { + multiaddr: addr1, + isCertified: false + })).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: false + }]) + }) +}) diff --git a/test/utils/dedupe-metadata.spec.ts b/test/utils/dedupe-metadata.spec.ts new file mode 100644 index 0000000..6ed9f81 --- /dev/null +++ b/test/utils/dedupe-metadata.spec.ts @@ -0,0 +1,35 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import { dedupeMetadata } from '../../src/utils/dedupe-metadata.js' + +describe('dedupe-metadata', () => { + it('should dedupe tags', () => { + expect(dedupeMetadata( + new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]), + new Map([['a-key', Uint8Array.from([4, 5, 6, 7])]]) + )).to.deep.equal( + new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) + ) + }) + + it('should only require one argument', () => { + expect(dedupeMetadata( + new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) + )).to.deep.equal( + new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) + ) + }) + + it('should sort tags', () => { + expect(dedupeMetadata( + new Map([['b-key', Uint8Array.from([0, 1, 2, 3])]]), + new Map([['a-key', Uint8Array.from([4, 5, 6, 7])]]) + )).to.deep.equal( + new Map([ + ['a-key', Uint8Array.from([4, 5, 6, 7])], + ['b-key', Uint8Array.from([0, 1, 2, 3])] + ]) + ) + }) +}) diff --git a/test/utils/dedupe-tags.spec.ts b/test/utils/dedupe-tags.spec.ts new file mode 100644 index 0000000..35c95d3 --- /dev/null +++ b/test/utils/dedupe-tags.spec.ts @@ -0,0 +1,35 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import { dedupeTags } from '../../src/utils/dedupe-tags.js' + +describe('dedupe-tags', () => { + it('should dedupe tags', () => { + expect(dedupeTags( + new Map([['tag', { value: 20 }]]), + new Map([['tag', { value: 10 }]]) + )).to.deep.equal( + new Map([['tag', { value: 20, expiry: undefined }]]) + ) + }) + + it('should only require one argument', () => { + expect(dedupeTags( + new Map([['tag', { value: 10 }]]) + )).to.deep.equal( + new Map([['tag', { value: 10 }]]) + ) + }) + + it('should sort tags', () => { + expect(dedupeTags( + new Map([['btag', { value: 20 }]]), + new Map([['atag', { value: 10 }]]) + )).to.deep.equal( + new Map([ + ['atag', { value: 10, expiry: undefined }], + ['btag', { value: 20, expiry: undefined }] + ]) + ) + }) +}) From b88e2a6c14f542f69f9ee7c10e5b5bf791ca8723 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 21 Apr 2023 14:46:10 +0100 Subject: [PATCH 2/8] chore: update deps --- package.json | 4 +-- src/index.ts | 67 +++++++++------------------------------------- src/store.ts | 38 +++++++++++++++++--------- test/index.spec.ts | 6 ++++- test/merge.spec.ts | 10 ++++--- test/patch.spec.ts | 10 ++++--- test/save.spec.ts | 17 +++++++----- 7 files changed, 69 insertions(+), 83 deletions(-) diff --git a/package.json b/package.json index 8d6b2c1..d674639 100644 --- a/package.json +++ b/package.json @@ -145,9 +145,9 @@ }, "dependencies": { "@libp2p/crypto": "^1.0.15", - "@libp2p/interface-libp2p": "^1.3.1", + "@libp2p/interface-libp2p": "^2.0.0", "@libp2p/interface-peer-id": "^2.0.0", - "@libp2p/interface-peer-store": "^1.2.2", + "@libp2p/interface-peer-store": "^2.0.0", "@libp2p/interfaces": "^3.2.0", "@libp2p/peer-id": "^2.0.0", "@multiformats/multiaddr": "^12.0.0", diff --git a/src/index.ts b/src/index.ts index 103c58d..05f9e7e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,15 +1,15 @@ -import { EventEmitter } from '@libp2p/interfaces/events' -import { PeerUpdate, PersistentStore } from './store.js' +import type { EventEmitter } from '@libp2p/interfaces/events' +import { PersistentStore, PeerUpdate } from './store.js' import type { PeerStore, Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' import type { Datastore } from 'interface-datastore' import type { Multiaddr } from '@multiformats/multiaddr' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' export interface PersistentPeerStoreComponents { peerId: PeerId datastore: Datastore + events: EventEmitter } export interface AddressFilter { @@ -20,61 +20,22 @@ export interface PersistentPeerStoreInit { addressFilter?: AddressFilter } -interface PeerStoreEvents { - /** - * This event is emitted when the stored data for a peer changes. - * - * If the peer store already contained data about the peer it will be set - * as the `previous` key on the event detail. - * - * @example - * - * ```js - * peerStore.addEventListener('peer:update', (event) => { - * const { peer, previous } = event.detail - * // ... - * }) - * ``` - */ - 'peer:update': CustomEvent - - /** - * Similar to the 'peer:update' event, this event is dispatched when the - * updated peer is the current node. - * - * @example - * - * ```js - * peerStore.addEventListener('self:peer:update', (event) => { - * const { peer, previous } = event.detail - * // ... - * }) - * ``` - */ - 'self:peer:update': CustomEvent -} - /** * An implementation of PeerStore that stores data in a Datastore */ -export class PersistentPeerStore extends EventEmitter implements PeerStore { - private readonly components: PersistentPeerStoreComponents +export class PersistentPeerStore implements PeerStore { private readonly store: PersistentStore + private readonly events: EventEmitter + private readonly peerId: PeerId constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { - super() - - this.components = components + this.events = components.events + this.peerId = components.peerId this.store = new PersistentStore(components) } async forEach (fn: (peer: Peer) => void): Promise { for await (const peer of this.store.all()) { - if (peer.id.equals(this.components.peerId)) { - // Skip self peer if present - continue - } - fn(peer) } } @@ -90,10 +51,6 @@ export class PersistentPeerStore extends EventEmitter implement } async delete (peerId: PeerId): Promise { - if (this.components.peerId.equals(peerId)) { - throw new CodeError('Cannot delete self peer', codes.ERR_INVALID_PARAMETERS) - } - await this.store.delete(peerId) } @@ -134,10 +91,10 @@ export class PersistentPeerStore extends EventEmitter implement return } - if (this.components.peerId.equals(id)) { - this.safeDispatchEvent('self:peer:update', { detail: result }) + if (this.peerId.equals(id)) { + this.events.safeDispatchEvent('self:peer:update', { detail: result }) } else { - this.safeDispatchEvent('peer:update', { detail: result }) + this.events.safeDispatchEvent('peer:update', { detail: result }) } } } diff --git a/src/store.ts b/src/store.ts index e0e70b9..b474b8b 100644 --- a/src/store.ts +++ b/src/store.ts @@ -12,33 +12,41 @@ import { dedupeTags } from './utils/dedupe-tags.js' import { dedupeMetadata } from './utils/dedupe-metadata.js' import { bytesToPeer } from './utils/bytes-to-peer.js' import { multiaddr } from '@multiformats/multiaddr' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from './errors.js' +import type { Datastore } from 'interface-datastore' +import type { PeerUpdate as PeerUpdateExternal } from '@libp2p/interface-libp2p' /** * Event detail emitted when peer data changes */ -export interface PeerUpdate { - peer: Peer - previous?: Peer +export interface PeerUpdate extends PeerUpdateExternal { updated: boolean } export class PersistentStore { - private readonly components: PersistentPeerStoreComponents + private readonly peerId: PeerId + private readonly datastore: Datastore constructor (components: PersistentPeerStoreComponents) { - this.components = components + this.peerId = components.peerId + this.datastore = components.datastore } async has (peerId: PeerId): Promise { - return await this.components.datastore.has(peerIdToDatastoreKey(peerId)) + return await this.datastore.has(peerIdToDatastoreKey(peerId)) } async delete (peerId: PeerId): Promise { - await this.components.datastore.delete(peerIdToDatastoreKey(peerId)) + if (this.peerId.equals(peerId)) { + throw new CodeError('Cannot delete self peer', codes.ERR_INVALID_PARAMETERS) + } + + await this.datastore.delete(peerIdToDatastoreKey(peerId)) } async load (peerId: PeerId): Promise { - const buf = await this.components.datastore.get(peerIdToDatastoreKey(peerId)) + const buf = await this.datastore.get(peerIdToDatastoreKey(peerId)) return await bytesToPeer(peerId, buf) } @@ -94,20 +102,26 @@ export class PersistentStore { } async * all (): AsyncGenerator { - for await (const { key, value } of this.components.datastore.query({ + for await (const { key, value } of this.datastore.query({ prefix: NAMESPACE_COMMON })) { // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} const base32Str = key.toString().split('/')[2] const buf = base32.decode(base32Str) + const peerId = peerIdFromBytes(buf) + + if (peerId.equals(this.peerId)) { + // Skip self peer if present + continue + } - yield bytesToPeer(peerIdFromBytes(buf), value) + yield bytesToPeer(peerId, value) } } async #findExistingPeer (peerId: PeerId): Promise<{ existingBuf?: Uint8Array, existingPeer?: Peer }> { try { - const existingBuf = await this.components.datastore.get(peerIdToDatastoreKey(peerId)) + const existingBuf = await this.datastore.get(peerIdToDatastoreKey(peerId)) const existingPeer = await bytesToPeer(peerId, existingBuf) return { @@ -144,7 +158,7 @@ export class PersistentStore { } } - await this.components.datastore.put(peerIdToDatastoreKey(peerId), buf) + await this.datastore.put(peerIdToDatastoreKey(peerId), buf) return { peer: await bytesToPeer(peerId, buf), diff --git a/test/index.spec.ts b/test/index.spec.ts index 570afb4..526c15f 100644 --- a/test/index.spec.ts +++ b/test/index.spec.ts @@ -8,6 +8,8 @@ import { MemoryDatastore } from 'datastore-core/memory' import { PersistentPeerStore } from '../src/index.js' import { createEd25519PeerId } from '@libp2p/peer-id-factory' import delay from 'delay' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') @@ -15,11 +17,13 @@ describe('PersistentPeerStore', () => { let peerId: PeerId let otherPeerId: PeerId let peerStore: PersistentPeerStore + let events: EventEmitter beforeEach(async () => { peerId = await createEd25519PeerId() otherPeerId = await createEd25519PeerId() - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) }) it('has an empty map of peers', async () => { diff --git a/test/merge.spec.ts b/test/merge.spec.ts index e14b8a9..8e42b67 100644 --- a/test/merge.spec.ts +++ b/test/merge.spec.ts @@ -9,6 +9,8 @@ import { PersistentPeerStore } from '../src/index.js' import { createEd25519PeerId } from '@libp2p/peer-id-factory' import type { PeerData } from '@libp2p/interface-peer-store' import { pEvent } from 'p-event' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') @@ -18,15 +20,17 @@ describe('merge', () => { let peerId: PeerId let otherPeerId: PeerId let peerStore: PersistentPeerStore + let events: EventEmitter beforeEach(async () => { peerId = await createEd25519PeerId() otherPeerId = await createEd25519PeerId() - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) }) it('emits peer:update event on merge', async () => { - const eventPromise = pEvent(peerStore, 'peer:update') + const eventPromise = pEvent(events, 'peer:update') await peerStore.merge(otherPeerId, { multiaddrs: [addr1, addr2] @@ -36,7 +40,7 @@ describe('merge', () => { }) it('emits self:peer:update event on merge for self peer', async () => { - const eventPromise = pEvent(peerStore, 'self:peer:update') + const eventPromise = pEvent(events, 'self:peer:update') await peerStore.merge(peerId, { multiaddrs: [addr1, addr2] diff --git a/test/patch.spec.ts b/test/patch.spec.ts index 146444f..f997885 100644 --- a/test/patch.spec.ts +++ b/test/patch.spec.ts @@ -9,6 +9,8 @@ import { PersistentPeerStore } from '../src/index.js' import { createEd25519PeerId } from '@libp2p/peer-id-factory' import type { PeerData } from '@libp2p/interface-peer-store' import { pEvent } from 'p-event' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') @@ -18,15 +20,17 @@ describe('patch', () => { let peerId: PeerId let otherPeerId: PeerId let peerStore: PersistentPeerStore + let events: EventEmitter beforeEach(async () => { peerId = await createEd25519PeerId() otherPeerId = await createEd25519PeerId() - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) }) it('emits peer:update event on patch', async () => { - const eventPromise = pEvent(peerStore, 'peer:update') + const eventPromise = pEvent(events, 'peer:update') await peerStore.patch(otherPeerId, { multiaddrs: [addr1, addr2] @@ -36,7 +40,7 @@ describe('patch', () => { }) it('emits self:peer:update event on patch for self peer', async () => { - const eventPromise = pEvent(peerStore, 'self:peer:update') + const eventPromise = pEvent(events, 'self:peer:update') await peerStore.patch(peerId, { multiaddrs: [addr1, addr2] diff --git a/test/save.spec.ts b/test/save.spec.ts index c160288..e2c172d 100644 --- a/test/save.spec.ts +++ b/test/save.spec.ts @@ -11,7 +11,8 @@ import { codes } from '../src/errors.js' import { createEd25519PeerId } from '@libp2p/peer-id-factory' import { pEvent } from 'p-event' import sinon from 'sinon' -import type { PeerUpdate } from '@libp2p/interface-libp2p' +import type { Libp2pEvents, PeerUpdate } from '@libp2p/interface-libp2p' +import { EventEmitter } from '@libp2p/interfaces/events' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') @@ -20,11 +21,13 @@ describe('save', () => { let peerId: PeerId let otherPeerId: PeerId let peerStore: PersistentPeerStore + let events: EventEmitter beforeEach(async () => { peerId = await createEd25519PeerId() otherPeerId = await createEd25519PeerId() - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) }) it('throws invalid parameters error if invalid PeerId is provided', async () => { @@ -49,7 +52,7 @@ describe('save', () => { it('replaces the stored content by default and emit change event', async () => { const supportedMultiaddrs = [addr1, addr2] - const eventPromise = pEvent(peerStore, 'peer:update') + const eventPromise = pEvent(events, 'peer:update') await peerStore.save(otherPeerId, { multiaddrs: supportedMultiaddrs @@ -75,7 +78,7 @@ describe('save', () => { const supportedMultiaddrsB = [addr2] let changeCounter = 0 - peerStore.addEventListener('peer:update', () => { + events.addEventListener('peer:update', () => { changeCounter++ if (changeCounter > 1) { defer.resolve() @@ -100,7 +103,7 @@ describe('save', () => { }) it('emits self event on save for self peer', async () => { - const eventPromise = pEvent(peerStore, 'self:peer:update') + const eventPromise = pEvent(events, 'self:peer:update') await peerStore.save(peerId, { multiaddrs: [addr1, addr2] @@ -115,7 +118,7 @@ describe('save', () => { const supportedMultiaddrs = [addr1, addr2] let changeCounter = 0 - peerStore.addEventListener('peer:update', () => { + events.addEventListener('peer:update', () => { changeCounter++ if (changeCounter > 1) { defer.reject(new Error('Saved identical data twice')) @@ -154,7 +157,7 @@ describe('save', () => { it('should not store a public key if already stored', async () => { // @ts-expect-error private fields - const spy = sinon.spy(peerStore.store.components.datastore, 'put') + const spy = sinon.spy(peerStore.store.datastore, 'put') if (otherPeerId.publicKey == null) { throw new Error('Public key was missing') From 8ac4ad321d621d07af0bf5fbe8109e263761b0b7 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 21 Apr 2023 18:06:35 +0100 Subject: [PATCH 3/8] chore: do not store peer id unecessarily --- src/store.ts | 5 +++++ test/save.spec.ts | 36 +++++++++++++++++++++++++++++++++++- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/store.ts b/src/store.ts index b474b8b..7ce1a68 100644 --- a/src/store.ts +++ b/src/store.ts @@ -148,6 +148,11 @@ export class PersistentStore { peer.metadata = sortMapByKeys(peer.metadata) peer.tags = sortMapByKeys(peer.tags) + // Ed25519 and secp256k1 have their public key embedded in them so no need to duplicate it + if (peerId.type !== 'RSA') { + delete peer.publicKey + } + const buf = PeerPB.encode(peer) if (existingBuf != null && uint8ArrayEquals(buf, existingBuf)) { diff --git a/test/save.spec.ts b/test/save.spec.ts index e2c172d..bf75df1 100644 --- a/test/save.spec.ts +++ b/test/save.spec.ts @@ -8,11 +8,12 @@ import pDefer from 'p-defer' import { MemoryDatastore } from 'datastore-core/memory' import { PersistentPeerStore } from '../src/index.js' import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import { createEd25519PeerId, createRSAPeerId, createSecp256k1PeerId } from '@libp2p/peer-id-factory' import { pEvent } from 'p-event' import sinon from 'sinon' import type { Libp2pEvents, PeerUpdate } from '@libp2p/interface-libp2p' import { EventEmitter } from '@libp2p/interfaces/events' +import { Peer as PeerPB } from '../src/pb/peer.js' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') @@ -173,4 +174,37 @@ describe('save', () => { expect(spy).to.have.property('callCount', 1) }) + + it('should not store a public key if part of peer id', async () => { + // @ts-expect-error private fields + const spy = sinon.spy(peerStore.store.datastore, 'put') + + if (otherPeerId.publicKey == null) { + throw new Error('Public key was missing') + } + + const edKey = await createEd25519PeerId() + await peerStore.save(edKey, { + publicKey: edKey.publicKey + }) + + const dbPeerEdKey = PeerPB.decode(spy.getCall(0).args[1]) + expect(dbPeerEdKey).to.not.have.property('publicKey') + + const secpKey = await createSecp256k1PeerId() + await peerStore.save(secpKey, { + publicKey: secpKey.publicKey + }) + + const dbPeerSecpKey = PeerPB.decode(spy.getCall(1).args[1]) + expect(dbPeerSecpKey).to.not.have.property('publicKey') + + const rsaKey = await createRSAPeerId() + await peerStore.save(rsaKey, { + publicKey: rsaKey.publicKey + }) + + const dbPeerRsaKey = PeerPB.decode(spy.getCall(2).args[1]) + expect(dbPeerRsaKey).to.have.property('publicKey').that.equalBytes(rsaKey.publicKey) + }) }) From cd5e6a6bdcd19f10d21f0f200e80dd44a2739f23 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 21 Apr 2023 18:38:25 +0100 Subject: [PATCH 4/8] chore: store the right fields --- src/store.ts | 8 ++-- test/merge.spec.ts | 91 ++++++++++++++++++++++++++++++++++-------- test/patch.spec.ts | 98 ++++++++++++++++++++++++++++++++++++---------- test/save.spec.ts | 42 ++++++++++++++++++++ 4 files changed, 197 insertions(+), 42 deletions(-) diff --git a/src/store.ts b/src/store.ts index 7ce1a68..7698c0d 100644 --- a/src/store.ts +++ b/src/store.ts @@ -71,12 +71,12 @@ export class PersistentStore { const peer = toDatastorePeer(peerId, data) const peerPb: PeerPB = { - addresses: dedupeAddresses(...(peer.addresses ?? existingPeer?.addresses ?? [])), - protocols: [...new Set(peer.protocols ?? existingPeer?.protocols)], + addresses: dedupeAddresses(...((data.addresses != null || data.multiaddrs != null) ? peer.addresses : (existingPeer?.addresses ?? []))), + protocols: (data.protocols != null) ? [...new Set(peer.protocols)] : [...new Set(existingPeer?.protocols)], publicKey: peer.publicKey ?? existingPeer?.id.publicKey, peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, - metadata: peer.metadata ?? existingPeer?.metadata, - tags: peer.tags ?? existingPeer?.tags + metadata: data.metadata != null ? peer.metadata : existingPeer?.metadata ?? new Map(), + tags: data.tags != null ? peer.tags : existingPeer?.tags ?? new Map() } return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) diff --git a/test/merge.spec.ts b/test/merge.spec.ts index 8e42b67..90db3eb 100644 --- a/test/merge.spec.ts +++ b/test/merge.spec.ts @@ -60,11 +60,14 @@ describe('merge', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.save(otherPeerId, peer) - + const original = await peerStore.save(otherPeerId, peer) const updated = await peerStore.merge(otherPeerId, { multiaddrs: [ addr3 @@ -81,6 +84,12 @@ describe('merge', () => { multiaddr: addr2, isCertified: false }]) + + // other fields should be untouched + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) }) it('merges metadata', async () => { @@ -94,18 +103,19 @@ describe('merge', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.save(otherPeerId, peer) - - const peerUpdate: PeerData = { + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { metadata: { bar: Uint8Array.from([3, 4, 5]) } - } - - const updated = await peerStore.merge(otherPeerId, peerUpdate) + }) expect(updated).to.have.property('metadata').that.deep.equals( new Map([ @@ -113,6 +123,12 @@ describe('merge', () => { ['bar', Uint8Array.from([3, 4, 5])] ]) ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) }) it('merges tags', async () => { @@ -126,18 +142,19 @@ describe('merge', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.patch(otherPeerId, peer) - - const peerUpdate: PeerData = { + const original = await peerStore.patch(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { tags: { tag2: { value: 20 } } - } - - const updated = await peerStore.merge(otherPeerId, peerUpdate) + }) expect(updated).to.have.property('tags').that.deep.equals( new Map([ @@ -145,5 +162,45 @@ describe('merge', () => { ['tag2', { value: 20 }] ]) ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('merges peer record envelope', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { + peerRecordEnvelope: Uint8Array.from([6, 7, 8]) + }) + + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals( + Uint8Array.from([6, 7, 8]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) }) }) diff --git a/test/patch.spec.ts b/test/patch.spec.ts index f997885..b219a34 100644 --- a/test/patch.spec.ts +++ b/test/patch.spec.ts @@ -60,23 +60,31 @@ describe('patch', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.patch(otherPeerId, peer) - - const peerUpdate: PeerData = { + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { multiaddrs: [ addr3 ] - } - - const updated = await peerStore.patch(otherPeerId, peerUpdate) + }) + // upated field expect(updated).to.have.property('addresses').that.deep.equals([{ multiaddr: addr3, isCertified: false }]) + + // other fields should be untouched + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) }) it('replaces metadata', async () => { @@ -90,22 +98,29 @@ describe('patch', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.patch(peerId, peer) - - const peerUpdate: PeerData = { + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { metadata: { bar: Uint8Array.from([3, 4, 5]) } - } - - const updated = await peerStore.patch(otherPeerId, peerUpdate) + }) expect(updated).to.have.property('metadata').that.deep.equals( new Map([['bar', Uint8Array.from([3, 4, 5])]]) ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) }) it('replaces tags', async () => { @@ -119,21 +134,62 @@ describe('patch', () => { }, tags: { tag1: { value: 10 } - } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) } - await peerStore.patch(peerId, peer) - - const peerUpdate: PeerData = { + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { tags: { tag2: { value: 20 } } - } - - const updated = await peerStore.patch(otherPeerId, peerUpdate) + }) expect(updated).to.have.property('tags').that.deep.equals( new Map([['tag2', { value: 20 }]]) ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('replaces peer record envelope', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { + peerRecordEnvelope: Uint8Array.from([6, 7, 8]) + }) + + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals( + Uint8Array.from([6, 7, 8]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) }) }) diff --git a/test/save.spec.ts b/test/save.spec.ts index bf75df1..e97d0f5 100644 --- a/test/save.spec.ts +++ b/test/save.spec.ts @@ -14,6 +14,7 @@ import sinon from 'sinon' import type { Libp2pEvents, PeerUpdate } from '@libp2p/interface-libp2p' import { EventEmitter } from '@libp2p/interfaces/events' import { Peer as PeerPB } from '../src/pb/peer.js' +import type { PeerData } from '@libp2p/interface-peer-store' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') @@ -207,4 +208,45 @@ describe('save', () => { const dbPeerRsaKey = PeerPB.decode(spy.getCall(2).args[1]) expect(dbPeerRsaKey).to.have.property('publicKey').that.equalBytes(rsaKey.publicKey) }) + + it('saves all of the fields', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const saved = await peerStore.save(otherPeerId, peer) + + expect(saved).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + }]) + expect(saved).to.have.property('metadata').that.deep.equals( + new Map([ + ['foo', Uint8Array.from([0, 1, 2])] + ]) + ) + expect(saved).to.have.property('tags').that.deep.equals( + new Map([ + ['tag1', { value: 10 }] + ]) + ) + expect(saved).to.have.property('protocols').that.deep.equals(peer.protocols) + expect(saved).to.have.property('peerRecordEnvelope').that.deep.equals(peer.peerRecordEnvelope) + }) }) From a7a54efa2c7430a25e69ab70e88869c6acb616df Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 22 Apr 2023 17:38:03 +0100 Subject: [PATCH 5/8] chore: restore locking --- package.json | 2 + src/index.ts | 113 ++++++++++++++++++++++++++++++++++++++++++--------- src/store.ts | 6 +++ 3 files changed, 102 insertions(+), 19 deletions(-) diff --git a/package.json b/package.json index d674639..95cddba 100644 --- a/package.json +++ b/package.json @@ -149,9 +149,11 @@ "@libp2p/interface-peer-id": "^2.0.0", "@libp2p/interface-peer-store": "^2.0.0", "@libp2p/interfaces": "^3.2.0", + "@libp2p/logger": "^2.0.7", "@libp2p/peer-id": "^2.0.0", "@multiformats/multiaddr": "^12.0.0", "interface-datastore": "^8.0.0", + "mortice": "^3.0.1", "multiformats": "^11.0.0", "protons-runtime": "^5.0.0", "uint8arraylist": "^2.1.1", diff --git a/src/index.ts b/src/index.ts index 05f9e7e..5a208b0 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,6 +5,9 @@ import type { PeerId } from '@libp2p/interface-peer-id' import type { Datastore } from 'interface-datastore' import type { Multiaddr } from '@multiformats/multiaddr' import type { Libp2pEvents } from '@libp2p/interface-libp2p' +import { logger } from '@libp2p/logger' + +const log = logger('libp2p:peer-store') export interface PersistentPeerStoreComponents { peerId: PeerId @@ -35,55 +38,127 @@ export class PersistentPeerStore implements PeerStore { } async forEach (fn: (peer: Peer) => void): Promise { - for await (const peer of this.store.all()) { - fn(peer) + log.trace('forEach await read lock') + const release = await this.store.lock.readLock() + log.trace('forEach got read lock') + + try { + for await (const peer of this.store.all()) { + fn(peer) + } + } finally { + log.trace('forEach release read lock') + release() } } async all (): Promise { - const output: Peer[] = [] + log.trace('all await read lock') + const release = await this.store.lock.readLock() + log.trace('all got read lock') + + try { + const output: Peer[] = [] - await this.forEach(peer => { - output.push(peer) - }) + await this.forEach(peer => { + output.push(peer) + }) - return output + return output + } finally { + log.trace('all release read lock') + release() + } } async delete (peerId: PeerId): Promise { - await this.store.delete(peerId) + log.trace('delete await write lock') + const release = await this.store.lock.writeLock() + log.trace('delete got write lock') + + try { + await this.store.delete(peerId) + } finally { + log.trace('delete release write lock') + release() + } } async has (peerId: PeerId): Promise { - return await this.store.has(peerId) + log.trace('has await read lock') + const release = await this.store.lock.readLock() + log.trace('has got read lock') + + try { + return await this.store.has(peerId) + } finally { + log.trace('has release read lock') + release() + } } async get (peerId: PeerId): Promise { - return await this.store.load(peerId) + log.trace('get await read lock') + const release = await this.store.lock.readLock() + log.trace('get got read lock') + + try { + return await this.store.load(peerId) + } finally { + log.trace('get release read lock') + release() + } } async save (id: PeerId, data: PeerData): Promise { - const result = await this.store.save(id, data) + log.trace('save await write lock') + const release = await this.store.lock.writeLock() + log.trace('save got write lock') - this.#emitIfUpdated(id, result) + try { + const result = await this.store.save(id, data) - return result.peer + this.#emitIfUpdated(id, result) + + return result.peer + } finally { + log.trace('save release write lock') + release() + } } async patch (id: PeerId, data: PeerData): Promise { - const result = await this.store.patch(id, data) + log.trace('patch await write lock') + const release = await this.store.lock.writeLock() + log.trace('patch got write lock') + + try { + const result = await this.store.patch(id, data) - this.#emitIfUpdated(id, result) + this.#emitIfUpdated(id, result) - return result.peer + return result.peer + } finally { + log.trace('patch release write lock') + release() + } } async merge (id: PeerId, data: PeerData): Promise { - const result = await this.store.merge(id, data) + log.trace('merge await write lock') + const release = await this.store.lock.writeLock() + log.trace('merge got write lock') + + try { + const result = await this.store.merge(id, data) - this.#emitIfUpdated(id, result) + this.#emitIfUpdated(id, result) - return result.peer + return result.peer + } finally { + log.trace('merge release write lock') + release() + } } #emitIfUpdated (id: PeerId, result: PeerUpdate): void { diff --git a/src/store.ts b/src/store.ts index 7698c0d..fa1937a 100644 --- a/src/store.ts +++ b/src/store.ts @@ -16,6 +16,7 @@ import { CodeError } from '@libp2p/interfaces/errors' import { codes } from './errors.js' import type { Datastore } from 'interface-datastore' import type { PeerUpdate as PeerUpdateExternal } from '@libp2p/interface-libp2p' +import mortice, { Mortice } from 'mortice' /** * Event detail emitted when peer data changes @@ -27,10 +28,15 @@ export interface PeerUpdate extends PeerUpdateExternal { export class PersistentStore { private readonly peerId: PeerId private readonly datastore: Datastore + public readonly lock: Mortice constructor (components: PersistentPeerStoreComponents) { this.peerId = components.peerId this.datastore = components.datastore + this.lock = mortice({ + name: 'peer-store', + singleProcess: true + }) } async has (peerId: PeerId): Promise { From c3b901a36a1af678c3ce2980e30cc66cdcf633f9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 22 Apr 2023 17:43:55 +0100 Subject: [PATCH 6/8] chore: use store.all --- src/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/index.ts b/src/index.ts index 5a208b0..765fa32 100644 --- a/src/index.ts +++ b/src/index.ts @@ -60,9 +60,9 @@ export class PersistentPeerStore implements PeerStore { try { const output: Peer[] = [] - await this.forEach(peer => { + for await (const peer of this.store.all()) { output.push(peer) - }) + } return output } finally { From cd73156339bcf6774c822599abe4e2ba0b2dcd21 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 22 Apr 2023 18:52:22 +0100 Subject: [PATCH 7/8] chore: restore address filter --- src/index.ts | 5 +++- src/store.ts | 22 +++++++++------ src/utils/dedupe-addresses.ts | 33 ++++++++++++++-------- test/utils/dedupe-addresses.spec.ts | 44 ++++++++++++++++++++--------- 4 files changed, 70 insertions(+), 34 deletions(-) diff --git a/src/index.ts b/src/index.ts index 765fa32..29355d3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -15,6 +15,9 @@ export interface PersistentPeerStoreComponents { events: EventEmitter } +/** + * Return true to allow storing the passed multiaddr for the passed peer + */ export interface AddressFilter { (peerId: PeerId, multiaddr: Multiaddr): Promise } @@ -34,7 +37,7 @@ export class PersistentPeerStore implements PeerStore { constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { this.events = components.events this.peerId = components.peerId - this.store = new PersistentStore(components) + this.store = new PersistentStore(components, init) } async forEach (fn: (peer: Peer) => void): Promise { diff --git a/src/store.ts b/src/store.ts index fa1937a..da29894 100644 --- a/src/store.ts +++ b/src/store.ts @@ -3,15 +3,14 @@ import { base32 } from 'multiformats/bases/base32' import { Peer as PeerPB } from './pb/peer.js' import type { Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' -import type { PersistentPeerStoreComponents } from './index.js' +import type { AddressFilter, PersistentPeerStoreComponents, PersistentPeerStoreInit } from './index.js' import { equals as uint8ArrayEquals } from 'uint8arrays/equals' import { NAMESPACE_COMMON, peerIdToDatastoreKey } from './utils/peer-id-to-datastore-key.js' import { toDatastorePeer } from './utils/peer-data-to-datastore-peer.js' -import { dedupeAddresses } from './utils/dedupe-addresses.js' +import { dedupeFilterAndSortAddresses } from './utils/dedupe-addresses.js' import { dedupeTags } from './utils/dedupe-tags.js' import { dedupeMetadata } from './utils/dedupe-metadata.js' import { bytesToPeer } from './utils/bytes-to-peer.js' -import { multiaddr } from '@multiformats/multiaddr' import { CodeError } from '@libp2p/interfaces/errors' import { codes } from './errors.js' import type { Datastore } from 'interface-datastore' @@ -29,10 +28,12 @@ export class PersistentStore { private readonly peerId: PeerId private readonly datastore: Datastore public readonly lock: Mortice + private readonly addressFilter?: AddressFilter - constructor (components: PersistentPeerStoreComponents) { + constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { this.peerId = components.peerId this.datastore = components.datastore + this.addressFilter = init.addressFilter this.lock = mortice({ name: 'peer-store', singleProcess: true @@ -64,6 +65,7 @@ export class PersistentStore { } = await this.#findExistingPeer(peerId) const peerPb: PeerPB = toDatastorePeer(peerId, data) + peerPb.addresses = await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), peerPb.addresses) return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } @@ -77,7 +79,9 @@ export class PersistentStore { const peer = toDatastorePeer(peerId, data) const peerPb: PeerPB = { - addresses: dedupeAddresses(...((data.addresses != null || data.multiaddrs != null) ? peer.addresses : (existingPeer?.addresses ?? []))), + addresses: await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), [ + ...((data.addresses != null || data.multiaddrs != null) ? peer.addresses : (existingPeer?.addresses ?? [])) + ]), protocols: (data.protocols != null) ? [...new Set(peer.protocols)] : [...new Set(existingPeer?.protocols)], publicKey: peer.publicKey ?? existingPeer?.id.publicKey, peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, @@ -96,7 +100,10 @@ export class PersistentStore { const peer = toDatastorePeer(peerId, data) const peerPb: PeerPB = { - addresses: dedupeAddresses(...(existingPeer?.addresses ?? []), ...peer.addresses), + addresses: await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), [ + ...(existingPeer?.addresses ?? []), + ...(peer.addresses ?? []) + ]), protocols: [...new Set([...(existingPeer?.protocols ?? []), ...peer.protocols])], publicKey: peer.publicKey ?? existingPeer?.id.publicKey, peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, @@ -145,9 +152,6 @@ export class PersistentStore { async #saveIfDifferent (peerId: PeerId, peer: PeerPB, existingBuf?: Uint8Array, existingPeer?: Peer): Promise { // sort fields before write so bytes are consistent - peer.addresses = peer.addresses.sort((a, b) => { - return multiaddr(a.multiaddr).toString().localeCompare(multiaddr(b.multiaddr).toString()) - }) peer.protocols = peer.protocols.sort((a, b) => { return a.localeCompare(b) }) diff --git a/src/utils/dedupe-addresses.ts b/src/utils/dedupe-addresses.ts index be8f857..af210ea 100644 --- a/src/utils/dedupe-addresses.ts +++ b/src/utils/dedupe-addresses.ts @@ -1,13 +1,15 @@ import { isMultiaddr, multiaddr } from '@multiformats/multiaddr' import type { Address as AddressPB } from '../pb/peer.js' import type { Address } from '@libp2p/interface-peer-store' +import type { AddressFilter } from '../index.js' +import type { PeerId } from '@libp2p/interface-peer-id' -export function dedupeAddresses (...addresses: Array
): AddressPB[] { - const addressMap = new Map() +export async function dedupeFilterAndSortAddresses (peerId: PeerId, filter: AddressFilter, addresses: Array
): Promise { + const addressMap = new Map() - addresses.forEach(addr => { + for (const addr of addresses) { if (addr == null) { - return + continue } if (addr.multiaddr instanceof Uint8Array) { @@ -15,7 +17,11 @@ export function dedupeAddresses (...addresses: Array
{ - return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) - }) + return [...addressMap.values()] + .sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) + }) + .map(({ isCertified, multiaddr }) => ({ + isCertified, + multiaddr: multiaddr.bytes + })) } diff --git a/test/utils/dedupe-addresses.spec.ts b/test/utils/dedupe-addresses.spec.ts index e547a3a..a335f52 100644 --- a/test/utils/dedupe-addresses.spec.ts +++ b/test/utils/dedupe-addresses.spec.ts @@ -2,14 +2,22 @@ import { expect } from 'aegir/chai' import { multiaddr } from '@multiformats/multiaddr' -import { dedupeAddresses } from '../../src/utils/dedupe-addresses.js' +import { dedupeFilterAndSortAddresses } from '../../src/utils/dedupe-addresses.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerId } from '@libp2p/interface-peer-id' const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') describe('dedupe-addresses', () => { - it('should dedupe addresses', () => { - expect(dedupeAddresses({ + let peerId: PeerId + + beforeEach(async () => { + peerId = await createEd25519PeerId() + }) + + it('should dedupe addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ multiaddr: addr1, isCertified: false }, { @@ -18,7 +26,7 @@ describe('dedupe-addresses', () => { }, { multiaddr: addr2, isCertified: false - })).to.deep.equal([{ + }])).to.deep.equal([{ multiaddr: addr1.bytes, isCertified: false }, { @@ -27,8 +35,8 @@ describe('dedupe-addresses', () => { }]) }) - it('should sort addresses', () => { - expect(dedupeAddresses({ + it('should sort addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ multiaddr: addr2, isCertified: false }, { @@ -37,7 +45,7 @@ describe('dedupe-addresses', () => { }, { multiaddr: addr1, isCertified: false - })).to.deep.equal([{ + }])).to.deep.equal([{ multiaddr: addr1.bytes, isCertified: false }, { @@ -46,30 +54,40 @@ describe('dedupe-addresses', () => { }]) }) - it('should retain isCertified when deduping addresses', () => { - expect(dedupeAddresses({ + it('should retain isCertified when deduping addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ multiaddr: addr1, isCertified: true }, { multiaddr: addr1, isCertified: false - })).to.deep.equal([{ + }])).to.deep.equal([{ multiaddr: addr1.bytes, isCertified: true }]) }) - it('should survive deduping garbage addresses', () => { - expect(dedupeAddresses({ + it('should survive deduping garbage addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ multiaddr: addr1, isCertified: false // @ts-expect-error invalid params }, {}, 'hello', 5, undefined, { multiaddr: addr1, isCertified: false - })).to.deep.equal([{ + }])).to.deep.equal([{ multiaddr: addr1.bytes, isCertified: false }]) }) + + it('should filter addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => false, [{ + multiaddr: addr1, + isCertified: true + }, { + multiaddr: addr1, + isCertified: false + }])).to.deep.equal([]) + }) }) From ec02ec316d4fd0c332fb85071d7ce33d7654d13d Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 24 Apr 2023 14:15:48 +0100 Subject: [PATCH 8/8] chore: allow unsetting tags/metadata with undefined --- package.json | 2 +- src/store.ts | 70 ++----- src/utils/dedupe-addresses.ts | 4 +- src/utils/dedupe-metadata.ts | 25 --- src/utils/dedupe-tags.ts | 21 -- src/utils/peer-data-to-datastore-peer.ts | 40 ++-- src/utils/to-peer-pb.ts | 237 +++++++++++++++++++++++ test/merge.spec.ts | 12 +- test/utils/dedupe-addresses.spec.ts | 14 -- test/utils/dedupe-metadata.spec.ts | 35 ---- test/utils/dedupe-tags.spec.ts | 35 ---- 11 files changed, 280 insertions(+), 215 deletions(-) delete mode 100644 src/utils/dedupe-metadata.ts delete mode 100644 src/utils/dedupe-tags.ts create mode 100644 src/utils/to-peer-pb.ts delete mode 100644 test/utils/dedupe-metadata.spec.ts delete mode 100644 test/utils/dedupe-tags.spec.ts diff --git a/package.json b/package.json index 95cddba..4bb0cc2 100644 --- a/package.json +++ b/package.json @@ -147,7 +147,7 @@ "@libp2p/crypto": "^1.0.15", "@libp2p/interface-libp2p": "^2.0.0", "@libp2p/interface-peer-id": "^2.0.0", - "@libp2p/interface-peer-store": "^2.0.0", + "@libp2p/interface-peer-store": "^2.0.1", "@libp2p/interfaces": "^3.2.0", "@libp2p/logger": "^2.0.7", "@libp2p/peer-id": "^2.0.0", diff --git a/src/store.ts b/src/store.ts index da29894..0f658e0 100644 --- a/src/store.ts +++ b/src/store.ts @@ -6,16 +6,13 @@ import type { PeerId } from '@libp2p/interface-peer-id' import type { AddressFilter, PersistentPeerStoreComponents, PersistentPeerStoreInit } from './index.js' import { equals as uint8ArrayEquals } from 'uint8arrays/equals' import { NAMESPACE_COMMON, peerIdToDatastoreKey } from './utils/peer-id-to-datastore-key.js' -import { toDatastorePeer } from './utils/peer-data-to-datastore-peer.js' -import { dedupeFilterAndSortAddresses } from './utils/dedupe-addresses.js' -import { dedupeTags } from './utils/dedupe-tags.js' -import { dedupeMetadata } from './utils/dedupe-metadata.js' import { bytesToPeer } from './utils/bytes-to-peer.js' import { CodeError } from '@libp2p/interfaces/errors' import { codes } from './errors.js' import type { Datastore } from 'interface-datastore' import type { PeerUpdate as PeerUpdateExternal } from '@libp2p/interface-libp2p' import mortice, { Mortice } from 'mortice' +import { toPeerPB } from './utils/to-peer-pb.js' /** * Event detail emitted when peer data changes @@ -64,8 +61,9 @@ export class PersistentStore { existingPeer } = await this.#findExistingPeer(peerId) - const peerPb: PeerPB = toDatastorePeer(peerId, data) - peerPb.addresses = await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), peerPb.addresses) + const peerPb: PeerPB = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter + }) return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } @@ -76,18 +74,10 @@ export class PersistentStore { existingPeer } = await this.#findExistingPeer(peerId) - const peer = toDatastorePeer(peerId, data) - - const peerPb: PeerPB = { - addresses: await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), [ - ...((data.addresses != null || data.multiaddrs != null) ? peer.addresses : (existingPeer?.addresses ?? [])) - ]), - protocols: (data.protocols != null) ? [...new Set(peer.protocols)] : [...new Set(existingPeer?.protocols)], - publicKey: peer.publicKey ?? existingPeer?.id.publicKey, - peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, - metadata: data.metadata != null ? peer.metadata : existingPeer?.metadata ?? new Map(), - tags: data.tags != null ? peer.tags : existingPeer?.tags ?? new Map() - } + const peerPb: PeerPB = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter, + existingPeer + }) return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } @@ -98,18 +88,10 @@ export class PersistentStore { existingPeer } = await this.#findExistingPeer(peerId) - const peer = toDatastorePeer(peerId, data) - const peerPb: PeerPB = { - addresses: await dedupeFilterAndSortAddresses(peerId, this.addressFilter ?? (async () => true), [ - ...(existingPeer?.addresses ?? []), - ...(peer.addresses ?? []) - ]), - protocols: [...new Set([...(existingPeer?.protocols ?? []), ...peer.protocols])], - publicKey: peer.publicKey ?? existingPeer?.id.publicKey, - peerRecordEnvelope: peer.peerRecordEnvelope ?? existingPeer?.peerRecordEnvelope, - metadata: dedupeMetadata(peer.metadata, existingPeer?.metadata), - tags: dedupeTags(peer.tags, existingPeer?.tags) - } + const peerPb: PeerPB = await toPeerPB(peerId, data, 'merge', { + addressFilter: this.addressFilter, + existingPeer + }) return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } @@ -151,18 +133,6 @@ export class PersistentStore { } async #saveIfDifferent (peerId: PeerId, peer: PeerPB, existingBuf?: Uint8Array, existingPeer?: Peer): Promise { - // sort fields before write so bytes are consistent - peer.protocols = peer.protocols.sort((a, b) => { - return a.localeCompare(b) - }) - peer.metadata = sortMapByKeys(peer.metadata) - peer.tags = sortMapByKeys(peer.tags) - - // Ed25519 and secp256k1 have their public key embedded in them so no need to duplicate it - if (peerId.type !== 'RSA') { - delete peer.publicKey - } - const buf = PeerPB.encode(peer) if (existingBuf != null && uint8ArrayEquals(buf, existingBuf)) { @@ -182,19 +152,3 @@ export class PersistentStore { } } } - -/** - * In JS maps are ordered by insertion order so create a new map with the - * keys inserted in alphabetical order. - */ -function sortMapByKeys (map: Map): Map { - const output = new Map() - - for (const key of [...map.keys()].sort((a, b) => { - return a.localeCompare(b) - })) { - output.set(key, map.get(key)) - } - - return output -} diff --git a/src/utils/dedupe-addresses.ts b/src/utils/dedupe-addresses.ts index af210ea..f39cd8c 100644 --- a/src/utils/dedupe-addresses.ts +++ b/src/utils/dedupe-addresses.ts @@ -3,6 +3,8 @@ import type { Address as AddressPB } from '../pb/peer.js' import type { Address } from '@libp2p/interface-peer-store' import type { AddressFilter } from '../index.js' import type { PeerId } from '@libp2p/interface-peer-id' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' export async function dedupeFilterAndSortAddresses (peerId: PeerId, filter: AddressFilter, addresses: Array
): Promise { const addressMap = new Map() @@ -17,7 +19,7 @@ export async function dedupeFilterAndSortAddresses (peerId: PeerId, filter: Addr } if (!isMultiaddr(addr.multiaddr)) { - continue + throw new CodeError('Multiaddr was invalid', codes.ERR_INVALID_PARAMETERS) } if (!(await filter(peerId, addr.multiaddr))) { diff --git a/src/utils/dedupe-metadata.ts b/src/utils/dedupe-metadata.ts deleted file mode 100644 index 9999581..0000000 --- a/src/utils/dedupe-metadata.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from '../errors.js' - -/** - * a takes priority - */ -export function dedupeMetadata (a: Map, b?: Map): Map { - if (b == null) { - return a - } - - const output = new Map([...b.entries(), ...a.entries()]) - - for (const key of output.keys()) { - if (typeof key !== 'string') { - throw new CodeError('Peer metadata keys must be strings', codes.ERR_INVALID_PARAMETERS) - } - - if (!(output.get(key) instanceof Uint8Array)) { - throw new CodeError('Peer metadata values must be Uint8Arrays', codes.ERR_INVALID_PARAMETERS) - } - } - - return output -} diff --git a/src/utils/dedupe-tags.ts b/src/utils/dedupe-tags.ts deleted file mode 100644 index 89e38ae..0000000 --- a/src/utils/dedupe-tags.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { Tag as TagPB } from '../pb/peer.js' - -/** - * a takes priority - */ -export function dedupeTags (a: Map, b?: Map): Map { - if (b == null) { - return a - } - - const output = new Map([...b.entries(), ...a.entries()]) - - for (const [key, tag] of output.entries()) { - output.set(key, { - value: tag.value ?? 0, - expiry: tag.expiry - }) - } - - return output -} diff --git a/src/utils/peer-data-to-datastore-peer.ts b/src/utils/peer-data-to-datastore-peer.ts index 95212ba..5603518 100644 --- a/src/utils/peer-data-to-datastore-peer.ts +++ b/src/utils/peer-data-to-datastore-peer.ts @@ -4,7 +4,7 @@ import { codes } from '../errors.js' import { isMultiaddr } from '@multiformats/multiaddr' import type { Peer as PeerPB } from '../pb/peer.js' import { equals as uint8arrayEquals } from 'uint8arrays/equals' -import type { PeerData, TagOptions } from '@libp2p/interface-peer-store' +import type { PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' export function toDatastorePeer (peerId: PeerId, data: PeerData): PeerPB { @@ -50,43 +50,41 @@ export function toDatastorePeer (peerId: PeerId, data: PeerData): PeerPB { // remove invalid metadata if (data.metadata != null) { - if (data.metadata instanceof Map) { - output.metadata = data.metadata - } else { - for (const [key, value] of Object.entries(data.metadata)) { - output.metadata.set(key, value) - } - } + const metadataEntries = data.metadata instanceof Map ? data.metadata.entries() : Object.entries(data.metadata) - for (const key of output.metadata.keys()) { + for (const [key, value] of metadataEntries) { if (typeof key !== 'string') { throw new CodeError('Peer metadata keys must be strings', codes.ERR_INVALID_PARAMETERS) } - if (!(output.metadata.get(key) instanceof Uint8Array)) { + if (value == null) { + continue + } + + if (!(value instanceof Uint8Array)) { throw new CodeError('Peer metadata values must be Uint8Arrays', codes.ERR_INVALID_PARAMETERS) } + + output.metadata.set(key, value) } } if (data.tags != null) { - let tagOptions: Map + const tagsEntries = data.tags instanceof Map ? data.tags.entries() : Object.entries(data.tags) - if (data.tags instanceof Map) { - tagOptions = data.tags - } else { - tagOptions = new Map() + for (const [key, value] of tagsEntries) { + if (typeof key !== 'string') { + throw new CodeError('Peer tag keys must be strings', codes.ERR_INVALID_PARAMETERS) + } - for (const [key, value] of Object.entries(data.tags)) { - tagOptions.set(key, value) + if (value == null) { + continue } - } - for (const [key, options] of tagOptions.entries()) { const tag = { name: key, - ttl: options.ttl, - value: options.value ?? 0 + ttl: value.ttl, + value: value.value ?? 0 } if (tag.value < 0 || tag.value > 100) { diff --git a/src/utils/to-peer-pb.ts b/src/utils/to-peer-pb.ts new file mode 100644 index 0000000..b8abd9c --- /dev/null +++ b/src/utils/to-peer-pb.ts @@ -0,0 +1,237 @@ +import type { PeerId } from '@libp2p/interface-peer-id' +import type { Address, Peer, PeerData, TagOptions } from '@libp2p/interface-peer-store' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { equals as uint8arrayEquals } from 'uint8arrays/equals' +import type { AddressFilter } from '../index.js' +import type { Tag, Peer as PeerPB } from '../pb/peer.js' +import { dedupeFilterAndSortAddresses } from './dedupe-addresses.js' + +export interface ToPBPeerOptions { + addressFilter?: AddressFilter + existingPeer?: Peer +} + +export async function toPeerPB (peerId: PeerId, data: Partial, strategy: 'merge' | 'patch', options: ToPBPeerOptions): Promise { + if (data == null) { + throw new CodeError('Invalid PeerData', codes.ERR_INVALID_PARAMETERS) + } + + if (data.publicKey != null && peerId.publicKey != null && !uint8arrayEquals(data.publicKey, peerId.publicKey)) { + throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) + } + + const existingPeer = options.existingPeer + + if (existingPeer != null && !peerId.equals(existingPeer.id)) { + throw new CodeError('peer id did not match existing peer id', codes.ERR_INVALID_PARAMETERS) + } + + let addresses: Address[] = existingPeer?.addresses ?? [] + let protocols: Set = new Set(existingPeer?.protocols ?? []) + let metadata: Map = existingPeer?.metadata ?? new Map() + let tags: Map = existingPeer?.tags ?? new Map() + let peerRecordEnvelope: Uint8Array | undefined = existingPeer?.peerRecordEnvelope + + // when patching, we replace the original fields with passed values + if (strategy === 'patch') { + if (data.multiaddrs != null || data.addresses != null) { + addresses = [] + + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))) + } + + if (data.addresses != null) { + addresses.push(...data.addresses) + } + } + + if (data.protocols != null) { + protocols = new Set(data.protocols) + } + + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata) + + metadata = createSortedMap(metadataEntries, { + validate: validateMetadata + }) + } + + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags) + + tags = createSortedMap(tagsEntries, { + validate: validateTag, + map: mapTag + }) + } + + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope + } + } + + // when merging, we join the original fields with passed values + if (strategy === 'merge') { + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))) + } + + if (data.addresses != null) { + addresses.push(...data.addresses) + } + + if (data.protocols != null) { + protocols = new Set([...protocols, ...data.protocols]) + } + + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata) + + for (const [key, value] of metadataEntries) { + if (value == null) { + metadata.delete(key) + } else { + metadata.set(key, value) + } + } + + metadata = createSortedMap([...metadata.entries()], { + validate: validateMetadata + }) + } + + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags) + const mergedTags: Map = new Map(tags) + + for (const [key, value] of tagsEntries) { + if (value == null) { + mergedTags.delete(key) + } else { + mergedTags.set(key, value) + } + } + + tags = createSortedMap([...mergedTags.entries()], { + validate: validateTag, + map: mapTag + }) + } + + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope + } + } + + const output: PeerPB = { + addresses: await dedupeFilterAndSortAddresses(peerId, options.addressFilter ?? (async () => true), addresses), + protocols: [...protocols.values()].sort((a, b) => { + return a.localeCompare(b) + }), + metadata, + tags, + + publicKey: existingPeer?.id.publicKey ?? data.publicKey ?? peerId.publicKey, + peerRecordEnvelope + } + + // Ed25519 and secp256k1 have their public key embedded in them so no need to duplicate it + if (peerId.type !== 'RSA') { + delete output.publicKey + } + + return output +} + +interface CreateSortedMapOptions { + validate: (key: string, value: V) => void + map?: (key: string, value: V) => R +} + +/** + * In JS maps are ordered by insertion order so create a new map with the + * keys inserted in alphabetical order. + */ +function createSortedMap (entries: Array<[string, V | undefined]>, options: CreateSortedMapOptions): Map { + const output = new Map() + + for (const [key, value] of entries) { + if (value == null) { + continue + } + + options.validate(key, value) + } + + for (const [key, value] of entries.sort(([a], [b]) => { + return a.localeCompare(b) + })) { + if (value != null) { + output.set(key, options.map?.(key, value) ?? value) + } + } + + return output +} + +function validateMetadata (key: string, value: Uint8Array): void { + if (typeof key !== 'string') { + throw new CodeError('Metadata key must be a string', codes.ERR_INVALID_PARAMETERS) + } + + if (!(value instanceof Uint8Array)) { + throw new CodeError('Metadata value must be a Uint8Array', codes.ERR_INVALID_PARAMETERS) + } +} + +function validateTag (key: string, tag: TagOptions): void { + if (typeof key !== 'string') { + throw new CodeError('Tag name must be a string', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.value != null) { + if (parseInt(`${tag.value}`, 10) !== tag.value) { + throw new CodeError('Tag value must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.value < 0 || tag.value > 100) { + throw new CodeError('Tag value must be between 0-100', codes.ERR_INVALID_PARAMETERS) + } + } + + if (tag.ttl != null) { + if (parseInt(`${tag.ttl}`, 10) !== tag.ttl) { + throw new CodeError('Tag ttl must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.ttl < 0) { + throw new CodeError('Tag ttl must be between greater than 0', codes.ERR_INVALID_PARAMETERS) + } + } +} + +function mapTag (key: string, tag: any): Tag { + let expiry: bigint | undefined + + if (tag.expiry != null) { + expiry = tag.expiry + } + + if (tag.ttl != null) { + expiry = BigInt(Date.now() + Number(tag.ttl)) + } + + return { + value: tag.value ?? 0, + expiry + } +} diff --git a/test/merge.spec.ts b/test/merge.spec.ts index 90db3eb..bd5587b 100644 --- a/test/merge.spec.ts +++ b/test/merge.spec.ts @@ -99,7 +99,8 @@ describe('merge', () => { addr2 ], metadata: { - foo: Uint8Array.from([0, 1, 2]) + foo: Uint8Array.from([0, 1, 2]), + baz: Uint8Array.from([6, 7, 8]) }, tags: { tag1: { value: 10 } @@ -113,7 +114,8 @@ describe('merge', () => { const original = await peerStore.save(otherPeerId, peer) const updated = await peerStore.merge(otherPeerId, { metadata: { - bar: Uint8Array.from([3, 4, 5]) + bar: Uint8Array.from([3, 4, 5]), + baz: undefined } }) @@ -141,7 +143,8 @@ describe('merge', () => { foo: Uint8Array.from([0, 1, 2]) }, tags: { - tag1: { value: 10 } + tag1: { value: 10 }, + tag3: { value: 50 } }, protocols: [ '/foo/bar' @@ -152,7 +155,8 @@ describe('merge', () => { const original = await peerStore.patch(otherPeerId, peer) const updated = await peerStore.merge(otherPeerId, { tags: { - tag2: { value: 20 } + tag2: { value: 20 }, + tag3: undefined } }) diff --git a/test/utils/dedupe-addresses.spec.ts b/test/utils/dedupe-addresses.spec.ts index a335f52..9e076ef 100644 --- a/test/utils/dedupe-addresses.spec.ts +++ b/test/utils/dedupe-addresses.spec.ts @@ -67,20 +67,6 @@ describe('dedupe-addresses', () => { }]) }) - it('should survive deduping garbage addresses', async () => { - expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ - multiaddr: addr1, - isCertified: false - // @ts-expect-error invalid params - }, {}, 'hello', 5, undefined, { - multiaddr: addr1, - isCertified: false - }])).to.deep.equal([{ - multiaddr: addr1.bytes, - isCertified: false - }]) - }) - it('should filter addresses', async () => { expect(await dedupeFilterAndSortAddresses(peerId, async () => false, [{ multiaddr: addr1, diff --git a/test/utils/dedupe-metadata.spec.ts b/test/utils/dedupe-metadata.spec.ts deleted file mode 100644 index 6ed9f81..0000000 --- a/test/utils/dedupe-metadata.spec.ts +++ /dev/null @@ -1,35 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { dedupeMetadata } from '../../src/utils/dedupe-metadata.js' - -describe('dedupe-metadata', () => { - it('should dedupe tags', () => { - expect(dedupeMetadata( - new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]), - new Map([['a-key', Uint8Array.from([4, 5, 6, 7])]]) - )).to.deep.equal( - new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) - ) - }) - - it('should only require one argument', () => { - expect(dedupeMetadata( - new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) - )).to.deep.equal( - new Map([['a-key', Uint8Array.from([0, 1, 2, 3])]]) - ) - }) - - it('should sort tags', () => { - expect(dedupeMetadata( - new Map([['b-key', Uint8Array.from([0, 1, 2, 3])]]), - new Map([['a-key', Uint8Array.from([4, 5, 6, 7])]]) - )).to.deep.equal( - new Map([ - ['a-key', Uint8Array.from([4, 5, 6, 7])], - ['b-key', Uint8Array.from([0, 1, 2, 3])] - ]) - ) - }) -}) diff --git a/test/utils/dedupe-tags.spec.ts b/test/utils/dedupe-tags.spec.ts deleted file mode 100644 index 35c95d3..0000000 --- a/test/utils/dedupe-tags.spec.ts +++ /dev/null @@ -1,35 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { dedupeTags } from '../../src/utils/dedupe-tags.js' - -describe('dedupe-tags', () => { - it('should dedupe tags', () => { - expect(dedupeTags( - new Map([['tag', { value: 20 }]]), - new Map([['tag', { value: 10 }]]) - )).to.deep.equal( - new Map([['tag', { value: 20, expiry: undefined }]]) - ) - }) - - it('should only require one argument', () => { - expect(dedupeTags( - new Map([['tag', { value: 10 }]]) - )).to.deep.equal( - new Map([['tag', { value: 10 }]]) - ) - }) - - it('should sort tags', () => { - expect(dedupeTags( - new Map([['btag', { value: 20 }]]), - new Map([['atag', { value: 10 }]]) - )).to.deep.equal( - new Map([ - ['atag', { value: 10, expiry: undefined }], - ['btag', { value: 20, expiry: undefined }] - ]) - ) - }) -})