2019-11-06 15:11:13 +01:00
|
|
|
'use strict'
|
|
|
|
/* eslint-env mocha */
|
|
|
|
|
2020-10-15 15:31:33 +01:00
|
|
|
const { expect } = require('aegir/utils/chai')
|
2022-01-20 12:03:35 +00:00
|
|
|
const all = require('it-all')
|
2019-11-06 15:11:13 +01:00
|
|
|
const PeerStore = require('../../src/peer-store')
|
2021-04-15 09:40:02 +02:00
|
|
|
const { Multiaddr } = require('multiaddr')
|
2021-08-16 21:29:06 +02:00
|
|
|
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
2022-01-20 12:03:35 +00:00
|
|
|
const { MemoryDatastore } = require('datastore-core/memory')
|
2019-11-06 15:11:13 +01:00
|
|
|
const peerUtils = require('../utils/creators/peer')
|
2022-01-25 16:27:01 +00:00
|
|
|
const { mockConnectionGater } = require('../utils/mock-connection-gater')
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2021-04-15 09:40:02 +02:00
|
|
|
const addr1 = new Multiaddr('/ip4/127.0.0.1/tcp/8000')
|
|
|
|
const addr2 = new Multiaddr('/ip4/127.0.0.1/tcp/8001')
|
|
|
|
const addr3 = new Multiaddr('/ip4/127.0.0.1/tcp/8002')
|
|
|
|
const addr4 = new Multiaddr('/ip4/127.0.0.1/tcp/8003')
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
const proto1 = '/protocol1'
|
|
|
|
const proto2 = '/protocol2'
|
|
|
|
const proto3 = '/protocol3'
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
/**
|
|
|
|
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
|
|
|
|
*/
|
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
describe('peer-store', () => {
|
2022-01-25 16:27:01 +00:00
|
|
|
const connectionGater = mockConnectionGater()
|
2020-04-09 16:07:18 +02:00
|
|
|
let peerIds
|
|
|
|
before(async () => {
|
|
|
|
peerIds = await peerUtils.createPeerId({
|
2020-07-17 14:00:59 +02:00
|
|
|
number: 5
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
describe('empty books', () => {
|
2022-01-20 12:03:35 +00:00
|
|
|
/** @type {PeerStore} */
|
2020-04-09 16:07:18 +02:00
|
|
|
let peerStore
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
beforeEach(() => {
|
2022-01-20 12:03:35 +00:00
|
|
|
peerStore = new PeerStore({
|
|
|
|
peerId: peerIds[4],
|
2022-01-25 16:27:01 +00:00
|
|
|
datastore: new MemoryDatastore(),
|
|
|
|
addressFilter: connectionGater.filterMultiaddrForPeer
|
2022-01-20 12:03:35 +00:00
|
|
|
})
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('has an empty map of peers', async () => {
|
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(0)
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('deletes a peerId', async () => {
|
|
|
|
await peerStore.addressBook.set(peerIds[0], [new Multiaddr('/ip4/127.0.0.1/tcp/4001')])
|
|
|
|
await expect(peerStore.has(peerIds[0])).to.eventually.be.true()
|
|
|
|
await peerStore.delete(peerIds[0])
|
|
|
|
await expect(peerStore.has(peerIds[0])).to.eventually.be.false()
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('sets the peer\'s public key to the KeyBook', async () => {
|
|
|
|
await peerStore.keyBook.set(peerIds[0], peerIds[0].pubKey)
|
|
|
|
await expect(peerStore.keyBook.get(peerIds[0])).to.eventually.deep.equal(peerIds[0].pubKey)
|
2020-04-28 15:03:16 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
describe('previously populated books', () => {
|
2022-01-20 12:03:35 +00:00
|
|
|
/** @type {PeerStore} */
|
2020-04-09 16:07:18 +02:00
|
|
|
let peerStore
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
beforeEach(async () => {
|
|
|
|
peerStore = new PeerStore({
|
|
|
|
peerId: peerIds[4],
|
2022-01-25 16:27:01 +00:00
|
|
|
datastore: new MemoryDatastore(),
|
|
|
|
addressFilter: connectionGater.filterMultiaddrForPeer
|
2022-01-20 12:03:35 +00:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
// Add peer0 with { addr1, addr2 } and { proto1 }
|
2022-01-20 12:03:35 +00:00
|
|
|
await peerStore.addressBook.set(peerIds[0], [addr1, addr2])
|
|
|
|
await peerStore.protoBook.set(peerIds[0], [proto1])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
// Add peer1 with { addr3 } and { proto2, proto3 }
|
2022-01-20 12:03:35 +00:00
|
|
|
await peerStore.addressBook.set(peerIds[1], [addr3])
|
|
|
|
await peerStore.protoBook.set(peerIds[1], [proto2, proto3])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
// Add peer2 with { addr4 }
|
2022-01-20 12:03:35 +00:00
|
|
|
await peerStore.addressBook.set(peerIds[2], [addr4])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
// Add peer3 with { addr4 } and { proto2 }
|
2022-01-20 12:03:35 +00:00
|
|
|
await peerStore.addressBook.set(peerIds[3], [addr4])
|
|
|
|
await peerStore.protoBook.set(peerIds[3], [proto2])
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('has peers', async () => {
|
|
|
|
const peers = await all(peerStore.getPeers())
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peers.length).to.equal(4)
|
|
|
|
expect(peers.map(peer => peer.id.toB58String())).to.have.members([
|
2020-04-09 16:07:18 +02:00
|
|
|
peerIds[0].toB58String(),
|
|
|
|
peerIds[1].toB58String(),
|
|
|
|
peerIds[2].toB58String(),
|
|
|
|
peerIds[3].toB58String()
|
|
|
|
])
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('deletes a stored peer', async () => {
|
|
|
|
await peerStore.delete(peerIds[0])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(3)
|
2020-04-09 16:07:18 +02:00
|
|
|
expect(Array.from(peers.keys())).to.not.have.members([peerIds[0].toB58String()])
|
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('deletes a stored peer which is only on one book', async () => {
|
|
|
|
await peerStore.delete(peerIds[2])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(3)
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('gets the stored information of a peer in all its books', async () => {
|
|
|
|
const peer = await peerStore.get(peerIds[0])
|
2020-04-24 15:54:59 +02:00
|
|
|
expect(peer).to.exist()
|
|
|
|
expect(peer.protocols).to.have.members([proto1])
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-24 15:54:59 +02:00
|
|
|
const peerMultiaddrs = peer.addresses.map((mi) => mi.multiaddr)
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peerMultiaddrs).to.have.deep.members([addr1, addr2])
|
2020-04-28 15:03:16 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peer.id.toB58String()).to.equal(peerIds[0].toB58String())
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('gets the stored information of a peer that is not present in all its books', async () => {
|
|
|
|
const peers = await peerStore.get(peerIds[2])
|
2020-04-24 15:54:59 +02:00
|
|
|
expect(peers).to.exist()
|
|
|
|
expect(peers.protocols.length).to.eql(0)
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-24 15:54:59 +02:00
|
|
|
const peerMultiaddrs = peers.addresses.map((mi) => mi.multiaddr)
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peerMultiaddrs).to.have.deep.members([addr4])
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('can find all the peers supporting a protocol', async () => {
|
2020-04-09 16:07:18 +02:00
|
|
|
const peerSupporting2 = []
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
for await (const peer of peerStore.getPeers()) {
|
2020-04-24 15:54:59 +02:00
|
|
|
if (peer.protocols.includes(proto2)) {
|
|
|
|
peerSupporting2.push(peer)
|
2020-04-09 16:07:18 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2020-04-09 16:07:18 +02:00
|
|
|
expect(peerSupporting2.length).to.eql(2)
|
|
|
|
expect(peerSupporting2[0].id.toB58String()).to.eql(peerIds[1].toB58String())
|
|
|
|
expect(peerSupporting2[1].id.toB58String()).to.eql(peerIds[3].toB58String())
|
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('can find all the peers listening on a given address', async () => {
|
|
|
|
const peerListening4 = []
|
2019-11-06 15:11:13 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
for await (const peer of peerStore.getPeers()) {
|
|
|
|
const multiaddrs = peer.addresses.map((mi) => mi.multiaddr.toString())
|
2020-04-14 14:05:30 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
if (multiaddrs.includes(addr4.toString())) {
|
|
|
|
peerListening4.push(peer)
|
2020-04-09 16:07:18 +02:00
|
|
|
}
|
|
|
|
}
|
2020-02-11 16:32:40 +01:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peerListening4.length).to.eql(2)
|
|
|
|
expect(peerListening4[0].id.toB58String()).to.eql(peerIds[2].toB58String())
|
|
|
|
expect(peerListening4[1].id.toB58String()).to.eql(peerIds[3].toB58String())
|
2020-04-09 16:07:18 +02:00
|
|
|
})
|
2020-02-11 16:32:40 +01:00
|
|
|
})
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
describe('peerStore.getPeers', () => {
|
|
|
|
/** @type {PeerStore} */
|
2020-05-15 19:39:13 +02:00
|
|
|
let peerStore
|
|
|
|
|
|
|
|
beforeEach(() => {
|
2022-01-20 12:03:35 +00:00
|
|
|
peerStore = new PeerStore({
|
|
|
|
peerId: peerIds[4],
|
2022-01-25 16:27:01 +00:00
|
|
|
datastore: new MemoryDatastore(),
|
|
|
|
addressFilter: connectionGater.filterMultiaddrForPeer
|
2022-01-20 12:03:35 +00:00
|
|
|
})
|
2020-05-15 19:39:13 +02:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('returns peers if only addresses are known', async () => {
|
|
|
|
await peerStore.addressBook.set(peerIds[0], [addr1])
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(1)
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peerData = peers[0]
|
2020-05-15 19:39:13 +02:00
|
|
|
expect(peerData).to.exist()
|
|
|
|
expect(peerData.id).to.exist()
|
|
|
|
expect(peerData.addresses).to.have.lengthOf(1)
|
|
|
|
expect(peerData.protocols).to.have.lengthOf(0)
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peerData.metadata).to.be.empty()
|
2020-05-15 19:39:13 +02:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('returns peers if only protocols are known', async () => {
|
|
|
|
await peerStore.protoBook.set(peerIds[0], [proto1])
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(1)
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peerData = peers[0]
|
2020-05-15 19:39:13 +02:00
|
|
|
expect(peerData).to.exist()
|
|
|
|
expect(peerData.id).to.exist()
|
|
|
|
expect(peerData.addresses).to.have.lengthOf(0)
|
|
|
|
expect(peerData.protocols).to.have.lengthOf(1)
|
2022-01-20 12:03:35 +00:00
|
|
|
expect(peerData.metadata).to.be.empty()
|
2020-05-15 19:39:13 +02:00
|
|
|
})
|
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
it('returns peers if only metadata is known', async () => {
|
2020-05-15 19:39:13 +02:00
|
|
|
const metadataKey = 'location'
|
2020-08-24 11:58:02 +01:00
|
|
|
const metadataValue = uint8ArrayFromString('earth')
|
2022-01-20 12:03:35 +00:00
|
|
|
await peerStore.metadataBook.setValue(peerIds[0], metadataKey, metadataValue)
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peers = await all(peerStore.getPeers())
|
|
|
|
expect(peers.length).to.equal(1)
|
2020-05-15 19:39:13 +02:00
|
|
|
|
2022-01-20 12:03:35 +00:00
|
|
|
const peerData = peers[0]
|
2020-05-15 19:39:13 +02:00
|
|
|
expect(peerData).to.exist()
|
|
|
|
expect(peerData.id).to.exist()
|
|
|
|
expect(peerData.addresses).to.have.lengthOf(0)
|
|
|
|
expect(peerData.protocols).to.have.lengthOf(0)
|
|
|
|
expect(peerData.metadata).to.exist()
|
|
|
|
expect(peerData.metadata.get(metadataKey)).to.equalBytes(metadataValue)
|
|
|
|
})
|
|
|
|
})
|
2019-11-06 15:11:13 +01:00
|
|
|
})
|