mirror of
https://github.com/fluencelabs/fluence-js.git
synced 2025-03-15 07:20:49 +00:00
fix: Fire and forget [fixes DXJ-446] (#336)
* Add test for particle and try to fix it * Fix build * Fix tests * Fix stop order * FluencePeer refactoring * mplex to yamux * Small fixes * Refactor connections * Update packages/core/js-client/src/jsPeer/FluencePeer.ts Co-authored-by: shamsartem <shamsartem@gmail.com> * Remove redundant checks * Update packages/core/js-client/src/jsPeer/FluencePeer.ts Co-authored-by: shamsartem <shamsartem@gmail.com> * Suppress very long output of raw data * Test for parallel execution * Fix test * Misc optimization * Add minRepr * Fix reset error * Latest default nox image --------- Co-authored-by: shamsartem <shamsartem@gmail.com>
This commit is contained in:
parent
b79039dff3
commit
e0a970d86a
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
nox-image:
|
||||
description: "nox image tag"
|
||||
type: string
|
||||
default: "fluencelabs/nox:unstable_minimal"
|
||||
default: "fluencelabs/nox:minimal_0.2.5"
|
||||
avm-version:
|
||||
description: "@fluencelabs/avm version"
|
||||
type: string
|
||||
|
6
packages/@tests/aqua/_aqua/finalize_particle.aqua
Normal file
6
packages/@tests/aqua/_aqua/finalize_particle.aqua
Normal file
@ -0,0 +1,6 @@
|
||||
import "@fluencelabs/aqua-lib/builtin.aqua"
|
||||
export test
|
||||
|
||||
func test():
|
||||
on HOST_PEER_ID:
|
||||
Op.noop()
|
78
packages/@tests/aqua/src/_aqua/finalize_particle.ts
Normal file
78
packages/@tests/aqua/src/_aqua/finalize_particle.ts
Normal file
@ -0,0 +1,78 @@
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/**
|
||||
*
|
||||
* This file is auto-generated. Do not edit manually: changes may be erased.
|
||||
* Generated by Aqua compiler: https://github.com/fluencelabs/aqua/.
|
||||
* If you find any bugs, please write an issue on GitHub: https://github.com/fluencelabs/aqua/issues
|
||||
* Aqua version: 0.12.0
|
||||
*
|
||||
*/
|
||||
import type { IFluenceClient as IFluenceClient$$, CallParams as CallParams$$ } from '@fluencelabs/js-client';
|
||||
import {
|
||||
v5_callFunction as callFunction$$,
|
||||
v5_registerService as registerService$$,
|
||||
} from '@fluencelabs/js-client';
|
||||
|
||||
|
||||
|
||||
// Services
|
||||
|
||||
// Functions
|
||||
export const test_script = `
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(xor
|
||||
(xor
|
||||
(call -relay- ("op" "noop") [])
|
||||
(fail %last_error%)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 0])
|
||||
)
|
||||
)
|
||||
`
|
||||
|
||||
|
||||
export function test(
|
||||
config?: {ttl?: number}
|
||||
): Promise<void>;
|
||||
|
||||
export function test(
|
||||
peer: IFluenceClient$$,
|
||||
config?: {ttl?: number}
|
||||
): Promise<void>;
|
||||
|
||||
export function test(...args: any) {
|
||||
|
||||
|
||||
return callFunction$$(
|
||||
args,
|
||||
{
|
||||
"functionName" : "test",
|
||||
"arrow" : {
|
||||
"tag" : "arrow",
|
||||
"domain" : {
|
||||
"tag" : "labeledProduct",
|
||||
"fields" : {
|
||||
|
||||
}
|
||||
},
|
||||
"codomain" : {
|
||||
"tag" : "nil"
|
||||
}
|
||||
},
|
||||
"names" : {
|
||||
"relay" : "-relay-",
|
||||
"getDataSrv" : "getDataSrv",
|
||||
"callbackSrv" : "callbackSrv",
|
||||
"responseSrv" : "callbackSrv",
|
||||
"responseFnName" : "response",
|
||||
"errorHandlingSrv" : "errorHandlingSrv",
|
||||
"errorFnName" : "error"
|
||||
}
|
||||
},
|
||||
test_script
|
||||
)
|
||||
}
|
||||
|
||||
/* eslint-enable */
|
@ -5,7 +5,7 @@
|
||||
* This file is auto-generated. Do not edit manually: changes may be erased.
|
||||
* Generated by Aqua compiler: https://github.com/fluencelabs/aqua/.
|
||||
* If you find any bugs, please write an issue on GitHub: https://github.com/fluencelabs/aqua/issues
|
||||
* Aqua version: 0.10.2
|
||||
* Aqua version: 0.12.0
|
||||
*
|
||||
*/
|
||||
import type { IFluenceClient as IFluenceClient$$, CallParams as CallParams$$ } from '@fluencelabs/js-client';
|
||||
@ -328,6 +328,225 @@ export function registerHelloWorld(...args: any) {
|
||||
}
|
||||
|
||||
// Functions
|
||||
export const resourceTest_script = `
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "label") [] label)
|
||||
)
|
||||
(xor
|
||||
(new $resource_id
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("peer" "timestamp_sec") [] t)
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("registry" "get_key_bytes") [label [] t [] ""] bytes)
|
||||
(xor
|
||||
(call %init_peer_id% ("sig" "sign") [bytes] result)
|
||||
(fail %last_error%)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match result.$.success false
|
||||
(ap result.$.error.[0] $error)
|
||||
)
|
||||
(new $successful
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(ap result.$.signature result_flat)
|
||||
(call -relay- ("registry" "get_key_id") [label %init_peer_id%] id)
|
||||
)
|
||||
(call -relay- ("op" "string_to_b58") [id] k)
|
||||
)
|
||||
(call -relay- ("kad" "neighborhood") [k [] []] nodes)
|
||||
)
|
||||
(par
|
||||
(fold nodes n-0
|
||||
(par
|
||||
(xor
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call n-0 ("peer" "timestamp_sec") [] t-0)
|
||||
(call n-0 ("trust-graph" "get_weight") [%init_peer_id% t-0] weight)
|
||||
)
|
||||
(call n-0 ("registry" "register_key") [label [] t [] "" result_flat.$.[0] weight t-0] result-0)
|
||||
)
|
||||
(xor
|
||||
(seq
|
||||
(match result-0.$.success true
|
||||
(ap true $successful)
|
||||
)
|
||||
(new $-ephemeral-stream-
|
||||
(new #-ephemeral-canon-
|
||||
(canon -relay- $-ephemeral-stream- #-ephemeral-canon-)
|
||||
)
|
||||
)
|
||||
)
|
||||
(seq
|
||||
(ap result-0.$.error $error)
|
||||
(new $-ephemeral-stream-
|
||||
(new #-ephemeral-canon-
|
||||
(canon -relay- $-ephemeral-stream- #-ephemeral-canon-)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(null)
|
||||
)
|
||||
(fail %last_error%)
|
||||
)
|
||||
(next n-0)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
(null)
|
||||
)
|
||||
)
|
||||
(new $status
|
||||
(new $result-1
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(par
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "sub") [1 1] sub)
|
||||
(new $successful_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [sub 1] successful_incr)
|
||||
(fold $successful successful_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap successful_fold_var $successful_test)
|
||||
(canon -relay- $successful_test #successful_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #successful_iter_canon.length successful_incr
|
||||
(null)
|
||||
)
|
||||
(next successful_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $successful_test #successful_result_canon)
|
||||
)
|
||||
(ap #successful_result_canon successful_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
(call -relay- ("math" "sub") [1 1] sub-0)
|
||||
)
|
||||
(ap "ok" $status)
|
||||
)
|
||||
(call -relay- ("peer" "timeout") [6000 "timeout"] $status)
|
||||
)
|
||||
(new $status_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [0 1] status_incr)
|
||||
(fold $status status_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap status_fold_var $status_test)
|
||||
(canon -relay- $status_test #status_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #status_iter_canon.length status_incr
|
||||
(null)
|
||||
)
|
||||
(next status_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $status_test #status_result_canon)
|
||||
)
|
||||
(ap #status_result_canon status_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match status_gate.$.[0] "ok"
|
||||
(ap true $result-1)
|
||||
)
|
||||
(ap false $result-1)
|
||||
)
|
||||
)
|
||||
(new $result-1_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [0 1] result-1_incr)
|
||||
(fold $result-1 result-1_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap result-1_fold_var $result-1_test)
|
||||
(canon -relay- $result-1_test #result-1_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #result-1_iter_canon.length result-1_incr
|
||||
(null)
|
||||
)
|
||||
(next result-1_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $result-1_test #result-1_result_canon)
|
||||
)
|
||||
(ap #result-1_result_canon result-1_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match result-1_gate.$.[0] false
|
||||
(ap "resource wasn't created: timeout exceeded" $error)
|
||||
)
|
||||
(ap id $resource_id)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(fail %last_error%)
|
||||
)
|
||||
)
|
||||
(canon %init_peer_id% $resource_id #-resource_id-fix-0)
|
||||
)
|
||||
(ap #-resource_id-fix-0 -resource_id-flat-0)
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 0])
|
||||
)
|
||||
)
|
||||
(canon %init_peer_id% $error #error_canon)
|
||||
)
|
||||
(call %init_peer_id% ("callbackSrv" "response") [-resource_id-flat-0 #error_canon])
|
||||
)
|
||||
`
|
||||
|
||||
export type ResourceTestResult = [string | null, string[]]
|
||||
export function resourceTest(
|
||||
@ -343,220 +562,7 @@ export function resourceTest(
|
||||
|
||||
export function resourceTest(...args: any) {
|
||||
|
||||
let script = `
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "label") [] label)
|
||||
)
|
||||
(new $resource_id
|
||||
(new $successful
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("peer" "timestamp_sec") [] t)
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("registry" "get_key_bytes") [label [] t [] ""] bytes)
|
||||
(xor
|
||||
(call %init_peer_id% ("sig" "sign") [bytes] result)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match result.$.success! false
|
||||
(ap result.$.error.[0]! $error)
|
||||
)
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(ap result.$.signature! result_flat)
|
||||
(call -relay- ("registry" "get_key_id") [label %init_peer_id%] id)
|
||||
)
|
||||
(call -relay- ("op" "string_to_b58") [id] k)
|
||||
)
|
||||
(call -relay- ("kad" "neighborhood") [k [] []] nodes)
|
||||
)
|
||||
(par
|
||||
(fold nodes n-0
|
||||
(par
|
||||
(seq
|
||||
(xor
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call n-0 ("peer" "timestamp_sec") [] t-0)
|
||||
(call n-0 ("trust-graph" "get_weight") [%init_peer_id% t-0] weight)
|
||||
)
|
||||
(call n-0 ("registry" "register_key") [label [] t [] "" result_flat.$.[0]! weight t-0] result-0)
|
||||
)
|
||||
(xor
|
||||
(match result-0.$.success! true
|
||||
(ap true $successful)
|
||||
)
|
||||
(ap result-0.$.error! $error)
|
||||
)
|
||||
)
|
||||
(call n-0 ("op" "noop") [])
|
||||
)
|
||||
(seq
|
||||
(call -relay- ("op" "noop") [])
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
|
||||
)
|
||||
)
|
||||
(call -relay- ("op" "noop") [])
|
||||
)
|
||||
(next n-0)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
(null)
|
||||
)
|
||||
)
|
||||
(new $status
|
||||
(new $result-1
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(par
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "sub") [1 1] sub)
|
||||
(new $successful_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [sub 1] successful_incr)
|
||||
(fold $successful successful_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap successful_fold_var $successful_test)
|
||||
(canon -relay- $successful_test #successful_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #successful_iter_canon.length successful_incr
|
||||
(null)
|
||||
)
|
||||
(next successful_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $successful_test #successful_result_canon)
|
||||
)
|
||||
(ap #successful_result_canon successful_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
(call -relay- ("math" "sub") [1 1] sub-0)
|
||||
)
|
||||
(ap "ok" $status)
|
||||
)
|
||||
(call -relay- ("peer" "timeout") [6000 "timeout"] $status)
|
||||
)
|
||||
(new $status_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [0 1] status_incr)
|
||||
(fold $status status_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap status_fold_var $status_test)
|
||||
(canon -relay- $status_test #status_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #status_iter_canon.length status_incr
|
||||
(null)
|
||||
)
|
||||
(next status_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $status_test #status_result_canon)
|
||||
)
|
||||
(ap #status_result_canon status_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match status_gate.$.[0]! "ok"
|
||||
(ap true $result-1)
|
||||
)
|
||||
(ap false $result-1)
|
||||
)
|
||||
)
|
||||
(new $result-1_test
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call -relay- ("math" "add") [0 1] result-1_incr)
|
||||
(fold $result-1 result-1_fold_var
|
||||
(seq
|
||||
(seq
|
||||
(ap result-1_fold_var $result-1_test)
|
||||
(canon -relay- $result-1_test #result-1_iter_canon)
|
||||
)
|
||||
(xor
|
||||
(match #result-1_iter_canon.length result-1_incr
|
||||
(null)
|
||||
)
|
||||
(next result-1_fold_var)
|
||||
)
|
||||
)
|
||||
(never)
|
||||
)
|
||||
)
|
||||
(canon -relay- $result-1_test #result-1_result_canon)
|
||||
)
|
||||
(ap #result-1_result_canon result-1_gate)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(match result-1_gate.$.[0]! false
|
||||
(ap "resource wasn't created: timeout exceeded" $error)
|
||||
)
|
||||
(ap id $resource_id)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 3])
|
||||
)
|
||||
)
|
||||
(canon %init_peer_id% $resource_id #-resource_id-fix-0)
|
||||
)
|
||||
(ap #-resource_id-fix-0 -resource_id-flat-0)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
(xor
|
||||
(seq
|
||||
(canon %init_peer_id% $error #error_canon)
|
||||
(call %init_peer_id% ("callbackSrv" "response") [-resource_id-flat-0 #error_canon])
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 4])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 5])
|
||||
)
|
||||
`
|
||||
|
||||
return callFunction$$(
|
||||
args,
|
||||
{
|
||||
@ -602,10 +608,22 @@ export function resourceTest(...args: any) {
|
||||
"errorFnName" : "error"
|
||||
}
|
||||
},
|
||||
script
|
||||
resourceTest_script
|
||||
)
|
||||
}
|
||||
|
||||
export const helloTest_script = `
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(xor
|
||||
(call %init_peer_id% ("hello-world" "hello") ["Fluence user"] hello)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 0])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("callbackSrv" "response") [hello])
|
||||
)
|
||||
`
|
||||
|
||||
|
||||
export function helloTest(
|
||||
@ -619,21 +637,7 @@ export function helloTest(
|
||||
|
||||
export function helloTest(...args: any) {
|
||||
|
||||
let script = `
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("hello-world" "hello") ["Fluence user"] hello)
|
||||
)
|
||||
(xor
|
||||
(call %init_peer_id% ("callbackSrv" "response") [hello])
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
|
||||
)
|
||||
`
|
||||
|
||||
return callFunction$$(
|
||||
args,
|
||||
{
|
||||
@ -666,10 +670,40 @@ export function helloTest(...args: any) {
|
||||
"errorFnName" : "error"
|
||||
}
|
||||
},
|
||||
script
|
||||
helloTest_script
|
||||
)
|
||||
}
|
||||
|
||||
export const demo_calculation_script = `
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "service_id") [] service_id)
|
||||
)
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% (service_id "test_logs") [])
|
||||
(call %init_peer_id% (service_id "add") [10])
|
||||
)
|
||||
(call %init_peer_id% (service_id "multiply") [5])
|
||||
)
|
||||
(call %init_peer_id% (service_id "subtract") [8])
|
||||
)
|
||||
(call %init_peer_id% (service_id "divide") [6])
|
||||
)
|
||||
(call %init_peer_id% (service_id "state") [] res)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 0])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("callbackSrv" "response") [res])
|
||||
)
|
||||
`
|
||||
|
||||
|
||||
export function demo_calculation(
|
||||
@ -685,39 +719,7 @@ export function demo_calculation(
|
||||
|
||||
export function demo_calculation(...args: any) {
|
||||
|
||||
let script = `
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "service_id") [] service_id)
|
||||
)
|
||||
(call %init_peer_id% (service_id "test_logs") [])
|
||||
)
|
||||
(call %init_peer_id% (service_id "add") [10])
|
||||
)
|
||||
(call %init_peer_id% (service_id "multiply") [5])
|
||||
)
|
||||
(call %init_peer_id% (service_id "subtract") [8])
|
||||
)
|
||||
(call %init_peer_id% (service_id "divide") [6])
|
||||
)
|
||||
(call %init_peer_id% (service_id "state") [] res)
|
||||
)
|
||||
(xor
|
||||
(call %init_peer_id% ("callbackSrv" "response") [res])
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
|
||||
)
|
||||
`
|
||||
|
||||
return callFunction$$(
|
||||
args,
|
||||
{
|
||||
@ -753,10 +755,43 @@ export function demo_calculation(...args: any) {
|
||||
"errorFnName" : "error"
|
||||
}
|
||||
},
|
||||
script
|
||||
demo_calculation_script
|
||||
)
|
||||
}
|
||||
|
||||
export const marineTest_script = `
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "wasm64") [] wasm64)
|
||||
)
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("single_module_srv" "create") [wasm64] serviceResult)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "test_logs") [])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "add") [10])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "multiply") [5])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "subtract") [8])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "divide") [6])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0] "state") [] res)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 0])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("callbackSrv" "response") [res])
|
||||
)
|
||||
`
|
||||
|
||||
|
||||
export function marineTest(
|
||||
@ -772,42 +807,7 @@ export function marineTest(
|
||||
|
||||
export function marineTest(...args: any) {
|
||||
|
||||
let script = `
|
||||
(xor
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(seq
|
||||
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
|
||||
(call %init_peer_id% ("getDataSrv" "wasm64") [] wasm64)
|
||||
)
|
||||
(call %init_peer_id% ("single_module_srv" "create") [wasm64] serviceResult)
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "test_logs") [])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "add") [10])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "multiply") [5])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "subtract") [8])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "divide") [6])
|
||||
)
|
||||
(call %init_peer_id% (serviceResult.$.service_id.[0]! "state") [] res)
|
||||
)
|
||||
(xor
|
||||
(call %init_peer_id% ("callbackSrv" "response") [res])
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
|
||||
)
|
||||
)
|
||||
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
|
||||
)
|
||||
`
|
||||
|
||||
return callFunction$$(
|
||||
args,
|
||||
{
|
||||
@ -843,7 +843,7 @@ export function marineTest(...args: any) {
|
||||
"errorFnName" : "error"
|
||||
}
|
||||
},
|
||||
script
|
||||
marineTest_script
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@ import { fromByteArray } from 'base64-js';
|
||||
import { Fluence } from '@fluencelabs/js-client';
|
||||
import type { ClientConfig } from '@fluencelabs/js-client';
|
||||
import { registerHelloWorld, helloTest, marineTest, resourceTest } from './_aqua/smoke_test.js';
|
||||
import { test as particleTest } from './_aqua/finalize_particle.js';
|
||||
import { wasm } from './wasmb64.js';
|
||||
|
||||
const relay = {
|
||||
@ -67,6 +68,10 @@ export const runTest = async (): Promise<TestResult> => {
|
||||
|
||||
console.log('running marine test...');
|
||||
const marine = await marineTest(wasm);
|
||||
|
||||
console.log('running particle test...');
|
||||
await particleTest();
|
||||
|
||||
console.log('marine test finished, result: ', marine);
|
||||
|
||||
const returnVal = {
|
||||
|
@ -1,7 +1,8 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist"
|
||||
"outDir": "./dist",
|
||||
"module": "NodeNext"
|
||||
},
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
@ -27,10 +27,10 @@
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@chainsafe/libp2p-noise": "13.0.0",
|
||||
"@chainsafe/libp2p-yamux": "5.0.0",
|
||||
"@fluencelabs/interfaces": "workspace:*",
|
||||
"@libp2p/crypto": "2.0.3",
|
||||
"@libp2p/interface": "0.1.2",
|
||||
"@libp2p/mplex": "9.0.4",
|
||||
"@libp2p/peer-id": "3.0.2",
|
||||
"@libp2p/peer-id-factory": "3.0.3",
|
||||
"@libp2p/websockets": "7.0.4",
|
||||
|
@ -55,20 +55,13 @@ export const makeClientPeerConfig = async (
|
||||
};
|
||||
|
||||
export class ClientPeer extends FluencePeer implements IFluenceClient {
|
||||
private relayPeerId: PeerIdB58;
|
||||
private relayConnection: RelayConnection;
|
||||
|
||||
constructor(
|
||||
peerConfig: PeerConfig,
|
||||
relayConfig: RelayConnectionConfig,
|
||||
keyPair: KeyPair,
|
||||
marine: IMarineHost,
|
||||
) {
|
||||
const relayConnection = new RelayConnection(relayConfig);
|
||||
|
||||
super(peerConfig, keyPair, marine, new JsServiceHost(), relayConnection);
|
||||
this.relayPeerId = relayConnection.getRelayPeerId();
|
||||
this.relayConnection = relayConnection;
|
||||
super(peerConfig, keyPair, marine, new JsServiceHost(), new RelayConnection(relayConfig));
|
||||
}
|
||||
|
||||
getPeerId(): string {
|
||||
@ -83,7 +76,7 @@ export class ClientPeer extends FluencePeer implements IFluenceClient {
|
||||
connectionStateChangeHandler: (state: ConnectionState) => void = () => {};
|
||||
|
||||
getRelayPeerId(): string {
|
||||
return this.relayPeerId;
|
||||
return this.internals.getRelayPeerId();
|
||||
}
|
||||
|
||||
onConnectionStateChange(handler: (state: ConnectionState) => void): ConnectionState {
|
||||
@ -115,7 +108,6 @@ export class ClientPeer extends FluencePeer implements IFluenceClient {
|
||||
log.trace('connecting to Fluence network');
|
||||
this.changeConnectionState('connecting');
|
||||
await super.start();
|
||||
await this.relayConnection.start();
|
||||
// TODO: check connection (`checkConnection` function) here
|
||||
this.changeConnectionState('connected');
|
||||
log.trace('connected');
|
||||
@ -124,7 +116,6 @@ export class ClientPeer extends FluencePeer implements IFluenceClient {
|
||||
async stop(): Promise<void> {
|
||||
log.trace('disconnecting from Fluence network');
|
||||
this.changeConnectionState('disconnecting');
|
||||
await this.relayConnection.stop();
|
||||
await super.stop();
|
||||
this.changeConnectionState('disconnected');
|
||||
log.trace('disconnected');
|
||||
|
@ -20,7 +20,7 @@ import type { PeerId } from '@libp2p/interface/peer-id';
|
||||
import { createLibp2p, Libp2p } from 'libp2p';
|
||||
|
||||
import { noise } from '@chainsafe/libp2p-noise';
|
||||
import { mplex } from '@libp2p/mplex';
|
||||
import { yamux } from '@chainsafe/libp2p-yamux';
|
||||
import { webSockets } from '@libp2p/websockets';
|
||||
import { all } from '@libp2p/websockets/filters';
|
||||
import { multiaddr } from '@multiformats/multiaddr';
|
||||
@ -36,7 +36,8 @@ import { throwIfHasNoPeerId } from '../util/libp2pUtils.js';
|
||||
import { IConnection } from './interfaces.js';
|
||||
import { IParticle } from '../particle/interfaces.js';
|
||||
import { Particle, serializeToString } from '../particle/Particle.js';
|
||||
import { IStartable } from '../util/commonTypes.js';
|
||||
import { identifyService } from 'libp2p/identify';
|
||||
import { pingService } from 'libp2p/ping';
|
||||
|
||||
const log = logger('connection');
|
||||
|
||||
@ -77,7 +78,7 @@ export interface RelayConnectionConfig {
|
||||
/**
|
||||
* Implementation for JS peers which connects to Fluence through relay node
|
||||
*/
|
||||
export class RelayConnection implements IStartable, IConnection {
|
||||
export class RelayConnection implements IConnection {
|
||||
private relayAddress: Multiaddr;
|
||||
private lib2p2Peer: Libp2p | null = null;
|
||||
|
||||
@ -110,7 +111,7 @@ export class RelayConnection implements IStartable, IConnection {
|
||||
filter: all,
|
||||
}),
|
||||
],
|
||||
streamMuxers: [mplex()],
|
||||
streamMuxers: [yamux()],
|
||||
connectionEncryption: [noise()],
|
||||
connectionManager: {
|
||||
dialTimeout: this.config.dialTimeoutMs,
|
||||
@ -118,6 +119,12 @@ export class RelayConnection implements IStartable, IConnection {
|
||||
connectionGater: {
|
||||
// By default, this function forbids connections to private peers. For example multiaddr with ip 127.0.0.1 isn't allowed
|
||||
denyDialMultiaddr: () => Promise.resolve(false)
|
||||
},
|
||||
services: {
|
||||
identify: identifyService({
|
||||
runOnConnectionOpen: false,
|
||||
}),
|
||||
ping: pingService()
|
||||
}
|
||||
});
|
||||
|
||||
@ -158,15 +165,17 @@ export class RelayConnection implements IStartable, IConnection {
|
||||
const sink = this._connection.streams[0].sink;
|
||||
*/
|
||||
|
||||
log.trace('sending particle...');
|
||||
const stream = await this.lib2p2Peer.dialProtocol(this.relayAddress, PROTOCOL_NAME);
|
||||
log.trace('created stream with id ', stream.id);
|
||||
const sink = stream.sink;
|
||||
|
||||
pipe(
|
||||
await pipe(
|
||||
[fromString(serializeToString(particle))],
|
||||
// @ts-ignore
|
||||
encode(),
|
||||
sink,
|
||||
);
|
||||
log.trace('data written to sink');
|
||||
}
|
||||
|
||||
private async connect() {
|
||||
@ -174,7 +183,7 @@ export class RelayConnection implements IStartable, IConnection {
|
||||
throw new Error('Relay connection is not started');
|
||||
}
|
||||
|
||||
this.lib2p2Peer.handle(
|
||||
await this.lib2p2Peer.handle(
|
||||
[PROTOCOL_NAME],
|
||||
async ({ connection, stream }) => {
|
||||
pipe(
|
||||
@ -188,6 +197,7 @@ export class RelayConnection implements IStartable, IConnection {
|
||||
for await (const msg of source) {
|
||||
try {
|
||||
const particle = Particle.fromString(msg);
|
||||
log.trace('got particle from stream with id %s and particle id %s', stream.id, particle.id);
|
||||
this.particleSource.next(particle);
|
||||
} catch (e) {
|
||||
log.error('error on handling a new incoming message: %j', e);
|
||||
|
@ -16,11 +16,12 @@
|
||||
import type { PeerIdB58 } from '@fluencelabs/interfaces';
|
||||
import type { Subscribable } from 'rxjs';
|
||||
import { IParticle } from '../particle/interfaces.js';
|
||||
import { IStartable } from '../util/commonTypes.js';
|
||||
|
||||
/**
|
||||
* Interface for connection used in Fluence Peer.
|
||||
*/
|
||||
export interface IConnection {
|
||||
export interface IConnection extends IStartable {
|
||||
/**
|
||||
* Observable that emits particles received from the connection.
|
||||
*/
|
||||
|
@ -129,13 +129,21 @@ export interface IEphemeralConnection extends IConnection {
|
||||
receiveParticle(particle: Particle): void;
|
||||
}
|
||||
|
||||
export class EphemeralConnection implements IConnection, IEphemeralConnection {
|
||||
export class EphemeralConnection implements IEphemeralConnection {
|
||||
readonly selfPeerId: PeerIdB58;
|
||||
readonly connections: Map<PeerIdB58, IEphemeralConnection> = new Map();
|
||||
|
||||
constructor(selfPeerId: PeerIdB58) {
|
||||
this.selfPeerId = selfPeerId;
|
||||
}
|
||||
|
||||
start(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
stop(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
connectToOther(other: IEphemeralConnection) {
|
||||
if (other.selfPeerId === this.selfPeerId) {
|
||||
|
@ -25,9 +25,23 @@ import {
|
||||
ParticleExecutionStage,
|
||||
ParticleQueueItem,
|
||||
} from '../particle/Particle.js';
|
||||
import { defaultCallParameters } from "@fluencelabs/marine-js/dist/types"
|
||||
import { defaultCallParameters } from '@fluencelabs/marine-js/dist/types'
|
||||
import { jsonify, isString } from '../util/utils.js';
|
||||
import { concatMap, filter, pipe, Subject, tap, Unsubscribable } from 'rxjs';
|
||||
import {
|
||||
concatAll,
|
||||
concatMap,
|
||||
filter,
|
||||
from,
|
||||
groupBy,
|
||||
lastValueFrom,
|
||||
mergeAll,
|
||||
mergeMap,
|
||||
Observable,
|
||||
pipe,
|
||||
Subject,
|
||||
tap,
|
||||
Unsubscribable
|
||||
} from 'rxjs';
|
||||
import { defaultSigGuard, Sig } from '../services/Sig.js';
|
||||
import { registerSig } from '../services/_aqua/services.js';
|
||||
import { registerSrv } from '../services/_aqua/single-module-srv.js';
|
||||
@ -105,6 +119,7 @@ export abstract class FluencePeer {
|
||||
|
||||
this._startParticleProcessing();
|
||||
this.isInitialized = true;
|
||||
await this.connection.start();
|
||||
log_peer.trace('started Fluence peer');
|
||||
}
|
||||
|
||||
@ -114,10 +129,17 @@ export abstract class FluencePeer {
|
||||
*/
|
||||
async stop() {
|
||||
log_peer.trace('stopping Fluence peer');
|
||||
|
||||
this._particleSourceSubscription?.unsubscribe();
|
||||
|
||||
log_peer.trace('Waiting for all particles to finish execution');
|
||||
this._incomingParticles.complete();
|
||||
await this._incomingParticlePromise;
|
||||
log_peer.trace('All particles finished execution');
|
||||
|
||||
this._stopParticleProcessing();
|
||||
await this.marineHost.stop();
|
||||
|
||||
await this.connection.stop();
|
||||
this.isInitialized = false;
|
||||
log_peer.trace('stopped Fluence peer');
|
||||
}
|
||||
@ -202,6 +224,7 @@ export abstract class FluencePeer {
|
||||
/**
|
||||
* Initiates a new particle execution starting from local peer
|
||||
* @param particle - particle to start execution of
|
||||
* @param onStageChange - callback for reacting on particle state changes
|
||||
*/
|
||||
initiateParticle: (particle: IParticle, onStageChange: (stage: ParticleExecutionStage) => void): void => {
|
||||
if (!this.isInitialized) {
|
||||
@ -238,10 +261,9 @@ export abstract class FluencePeer {
|
||||
// Queues for incoming and outgoing particles
|
||||
|
||||
private _incomingParticles = new Subject<ParticleQueueItem>();
|
||||
private _outgoingParticles = new Subject<ParticleQueueItem & { nextPeerIds: PeerIdB58[] }>();
|
||||
private _timeouts: Array<NodeJS.Timeout> = [];
|
||||
private _particleSourceSubscription?: Unsubscribable;
|
||||
private _particleQueues = new Map<string, Subject<ParticleQueueItem>>();
|
||||
private _incomingParticlePromise?: Promise<void>;
|
||||
|
||||
// Internal peer state
|
||||
|
||||
@ -280,7 +302,7 @@ export abstract class FluencePeer {
|
||||
},
|
||||
});
|
||||
|
||||
this._incomingParticles
|
||||
this._incomingParticlePromise = lastValueFrom(this._incomingParticles
|
||||
.pipe(
|
||||
tap((item) => {
|
||||
log_particle.debug('id %s. received:', item.particle.id);
|
||||
@ -295,47 +317,188 @@ export abstract class FluencePeer {
|
||||
log_particle.trace('id %s. call results: %j', item.particle.id, item.callResults);
|
||||
}),
|
||||
filterExpiredParticles(this._expireParticle.bind(this)),
|
||||
)
|
||||
.subscribe((item) => {
|
||||
const p = item.particle;
|
||||
let particlesQueue = this._particleQueues.get(p.id);
|
||||
groupBy(item => item.particle.id),
|
||||
mergeMap(group$ => {
|
||||
let prevData: Uint8Array = Buffer.from([]);
|
||||
let firstRun = true;
|
||||
|
||||
return group$.pipe(
|
||||
concatMap(async (item) => {
|
||||
if (firstRun) {
|
||||
const timeout = setTimeout(() => {
|
||||
this._expireParticle(item);
|
||||
}, getActualTTL(item.particle));
|
||||
|
||||
if (!particlesQueue) {
|
||||
particlesQueue = this._createParticlesProcessingQueue();
|
||||
this._particleQueues.set(p.id, particlesQueue);
|
||||
this._timeouts.push(timeout);
|
||||
firstRun = false;
|
||||
}
|
||||
|
||||
if (!this.isInitialized || this.marineHost === undefined) {
|
||||
// If `.stop()` was called return null to stop particle processing immediately
|
||||
return null;
|
||||
}
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
this._expireParticle(item);
|
||||
}, getActualTTL(p));
|
||||
// IMPORTANT!
|
||||
// AVM runner execution and prevData <-> newData swapping
|
||||
// MUST happen sequentially (in a critical section).
|
||||
// Otherwise the race might occur corrupting the prevData
|
||||
|
||||
this._timeouts.push(timeout);
|
||||
}
|
||||
log_particle.debug('id %s. sending particle to interpreter', item.particle.id);
|
||||
log_particle.trace('id %s. prevData: %s', item.particle.id, this.decodeAvmData(prevData).slice(0, 50));
|
||||
|
||||
particlesQueue.next(item);
|
||||
});
|
||||
const args = serializeAvmArgs(
|
||||
{
|
||||
initPeerId: item.particle.initPeerId,
|
||||
currentPeerId: this.keyPair.getPeerId(),
|
||||
timestamp: item.particle.timestamp,
|
||||
ttl: item.particle.ttl,
|
||||
keyFormat: KeyPairFormat.Ed25519,
|
||||
particleId: item.particle.id,
|
||||
secretKeyBytes: this.keyPair.toEd25519PrivateKey(),
|
||||
},
|
||||
item.particle.script,
|
||||
prevData,
|
||||
item.particle.data,
|
||||
item.callResults,
|
||||
);
|
||||
|
||||
this._outgoingParticles.subscribe((item) => {
|
||||
// Do not send particle after the peer has been stopped
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
let avmCallResult: InterpreterResult | Error;
|
||||
try {
|
||||
const res = await this.marineHost.callService('avm', 'invoke', args, defaultCallParameters);
|
||||
avmCallResult = deserializeAvmResult(res);
|
||||
} catch (e) {
|
||||
avmCallResult = e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
log_particle.debug(
|
||||
'id %s. sending particle into network. Next peer ids: %s',
|
||||
item.particle.id,
|
||||
item.nextPeerIds.toString(),
|
||||
);
|
||||
if (!(avmCallResult instanceof Error) && avmCallResult.retCode === 0) {
|
||||
const newData = Buffer.from(avmCallResult.data);
|
||||
prevData = newData;
|
||||
}
|
||||
|
||||
this.connection
|
||||
?.sendParticle(item.nextPeerIds, item.particle)
|
||||
.then(() => {
|
||||
item.onStageChange({ stage: 'sent' });
|
||||
return {
|
||||
...item,
|
||||
result: avmCallResult,
|
||||
};
|
||||
}),
|
||||
filter((item): item is NonNullable<typeof item> => item !== null),
|
||||
filterExpiredParticles<ParticleQueueItem & {result: Error | InterpreterResult }>(this._expireParticle.bind(this)),
|
||||
mergeMap(async (item) => {
|
||||
// If peer was stopped, do not proceed further
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not continue if there was an error in particle interpretation
|
||||
if (item.result instanceof Error) {
|
||||
log_particle.error('id %s. interpreter failed: %s', item.particle.id, item.result.message);
|
||||
item.onStageChange({ stage: 'interpreterError', errorMessage: item.result.message });
|
||||
return;
|
||||
}
|
||||
|
||||
if (item.result.retCode !== 0) {
|
||||
log_particle.error(
|
||||
'id %s. interpreter failed: retCode: %d, message: %s',
|
||||
item.particle.id,
|
||||
item.result.retCode,
|
||||
item.result.errorMessage,
|
||||
);
|
||||
log_particle.trace('id %s. avm data: %s', item.particle.id, this.decodeAvmData(item.result.data));
|
||||
item.onStageChange({ stage: 'interpreterError', errorMessage: item.result.errorMessage });
|
||||
return;
|
||||
}
|
||||
|
||||
log_particle.trace(
|
||||
'id %s. interpreter result: retCode: %d, avm data: %s',
|
||||
item.particle.id,
|
||||
item.result.retCode,
|
||||
this.decodeAvmData(item.result.data)
|
||||
);
|
||||
|
||||
setTimeout(() => {
|
||||
item.onStageChange({ stage: 'interpreted' });
|
||||
}, 0);
|
||||
|
||||
let connectionPromise: Promise<void> = Promise.resolve();
|
||||
|
||||
// send particle further if requested
|
||||
if (item.result.nextPeerPks.length > 0) {
|
||||
const newParticle = cloneWithNewData(item.particle, Buffer.from(item.result.data));
|
||||
|
||||
// Do not send particle after the peer has been stopped
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
log_particle.debug(
|
||||
'id %s. sending particle into network. Next peer ids: %s',
|
||||
newParticle.id,
|
||||
item.result.nextPeerPks.toString(),
|
||||
);
|
||||
|
||||
connectionPromise = this.connection
|
||||
?.sendParticle(item.result.nextPeerPks, newParticle)
|
||||
.then(() => {
|
||||
log_particle.trace('id %s. send successful', newParticle.id);
|
||||
item.onStageChange({ stage: 'sent' });
|
||||
})
|
||||
.catch((e: any) => {
|
||||
log_particle.error('id %s. send failed %j', newParticle.id, e);
|
||||
item.onStageChange({ stage: 'sendingError', errorMessage: e.toString() });
|
||||
});
|
||||
}
|
||||
|
||||
// execute call requests if needed
|
||||
// and put particle with the results back to queue
|
||||
if (item.result.callRequests.length > 0) {
|
||||
for (const [key, cr] of item.result.callRequests) {
|
||||
const req = {
|
||||
fnName: cr.functionName,
|
||||
args: cr.arguments,
|
||||
serviceId: cr.serviceId,
|
||||
tetraplets: cr.tetraplets,
|
||||
particleContext: getParticleContext(item.particle),
|
||||
};
|
||||
|
||||
this._execSingleCallRequest(req)
|
||||
.catch((err): CallServiceResult => {
|
||||
if (err instanceof ServiceError) {
|
||||
return {
|
||||
retCode: ResultCodes.error,
|
||||
result: err.message,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
retCode: ResultCodes.error,
|
||||
result: `Service call failed. fnName="${req.fnName}" serviceId="${
|
||||
req.serviceId
|
||||
}" error: ${err.toString()}`,
|
||||
};
|
||||
})
|
||||
.then((res) => {
|
||||
const serviceResult = {
|
||||
result: jsonify(res.result),
|
||||
retCode: res.retCode,
|
||||
};
|
||||
|
||||
const newParticle = cloneWithNewData(item.particle, Buffer.from([]));
|
||||
this._incomingParticles.next({
|
||||
...item,
|
||||
particle: newParticle,
|
||||
callResults: [[key, serviceResult]],
|
||||
});
|
||||
});
|
||||
}
|
||||
} else {
|
||||
item.onStageChange({ stage: 'localWorkDone' });
|
||||
}
|
||||
|
||||
return connectionPromise;
|
||||
}),
|
||||
|
||||
)
|
||||
})
|
||||
.catch((e: any) => {
|
||||
log_particle.error('id %s. send failed %j', item.particle.id, e);
|
||||
item.onStageChange({ stage: 'sendingError', errorMessage: e.toString() });
|
||||
});
|
||||
});
|
||||
), { defaultValue: undefined });
|
||||
}
|
||||
|
||||
private _expireParticle(item: ParticleQueueItem) {
|
||||
@ -345,179 +508,16 @@ export abstract class FluencePeer {
|
||||
item.particle.id,
|
||||
item.particle.ttl,
|
||||
);
|
||||
|
||||
this._particleQueues.delete(particleId);
|
||||
|
||||
this.jsServiceHost.removeParticleScopeHandlers(particleId);
|
||||
|
||||
item.onStageChange({ stage: 'expired' });
|
||||
}
|
||||
|
||||
|
||||
private decodeAvmData(data: Uint8Array) {
|
||||
return new TextDecoder().decode(data.buffer);
|
||||
}
|
||||
|
||||
private _createParticlesProcessingQueue() {
|
||||
const particlesQueue = new Subject<ParticleQueueItem>();
|
||||
let prevData: Uint8Array = Buffer.from([]);
|
||||
|
||||
particlesQueue
|
||||
.pipe(
|
||||
filterExpiredParticles(this._expireParticle.bind(this)),
|
||||
|
||||
concatMap(async (item) => {
|
||||
if (!this.isInitialized || this.marineHost === undefined) {
|
||||
// If `.stop()` was called return null to stop particle processing immediately
|
||||
return null;
|
||||
}
|
||||
|
||||
// IMPORTANT!
|
||||
// AVM runner execution and prevData <-> newData swapping
|
||||
// MUST happen sequentially (in a critical section).
|
||||
// Otherwise the race might occur corrupting the prevData
|
||||
|
||||
log_particle.debug('id %s. sending particle to interpreter', item.particle.id);
|
||||
log_particle.trace('id %s. prevData: %s', item.particle.id, this.decodeAvmData(prevData));
|
||||
|
||||
const args = serializeAvmArgs(
|
||||
{
|
||||
initPeerId: item.particle.initPeerId,
|
||||
currentPeerId: this.keyPair.getPeerId(),
|
||||
timestamp: item.particle.timestamp,
|
||||
ttl: item.particle.ttl,
|
||||
keyFormat: KeyPairFormat.Ed25519,
|
||||
particleId: item.particle.id,
|
||||
secretKeyBytes: this.keyPair.toEd25519PrivateKey(),
|
||||
},
|
||||
item.particle.script,
|
||||
prevData,
|
||||
item.particle.data,
|
||||
item.callResults,
|
||||
);
|
||||
|
||||
let avmCallResult: InterpreterResult | Error;
|
||||
try {
|
||||
const res = await this.marineHost.callService('avm', 'invoke', args, defaultCallParameters);
|
||||
avmCallResult = deserializeAvmResult(res);
|
||||
} catch (e) {
|
||||
avmCallResult = e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
if (!(avmCallResult instanceof Error) && avmCallResult.retCode === 0) {
|
||||
const newData = Buffer.from(avmCallResult.data);
|
||||
prevData = newData;
|
||||
}
|
||||
|
||||
return {
|
||||
...item,
|
||||
result: avmCallResult,
|
||||
};
|
||||
}),
|
||||
)
|
||||
.subscribe((item) => {
|
||||
// If peer was stopped, do not proceed further
|
||||
if (item === null || !this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not proceed further if the particle is expired
|
||||
if (hasExpired(item.particle)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not continue if there was an error in particle interpretation
|
||||
if (item.result instanceof Error) {
|
||||
log_particle.error('id %s. interpreter failed: %s', item.particle.id, item.result.message);
|
||||
item.onStageChange({ stage: 'interpreterError', errorMessage: item.result.message });
|
||||
return;
|
||||
}
|
||||
|
||||
if (item.result.retCode !== 0) {
|
||||
log_particle.error(
|
||||
'id %s. interpreter failed: retCode: %d, message: %s',
|
||||
item.particle.id,
|
||||
item.result.retCode,
|
||||
item.result.errorMessage,
|
||||
);
|
||||
log_particle.trace('id %s. avm data: %s', item.particle.id, this.decodeAvmData(item.result.data));
|
||||
item.onStageChange({ stage: 'interpreterError', errorMessage: item.result.errorMessage });
|
||||
return;
|
||||
}
|
||||
|
||||
log_particle.trace(
|
||||
'id %s. interpreter result: retCode: %d, avm data: %s',
|
||||
item.particle.id,
|
||||
item.result.retCode,
|
||||
this.decodeAvmData(item.result.data)
|
||||
);
|
||||
|
||||
setTimeout(() => {
|
||||
item.onStageChange({ stage: 'interpreted' });
|
||||
}, 0);
|
||||
|
||||
// send particle further if requested
|
||||
if (item.result.nextPeerPks.length > 0) {
|
||||
const newParticle = cloneWithNewData(item.particle, Buffer.from(item.result.data));
|
||||
this._outgoingParticles.next({
|
||||
...item,
|
||||
particle: newParticle,
|
||||
nextPeerIds: item.result.nextPeerPks,
|
||||
});
|
||||
}
|
||||
|
||||
// execute call requests if needed
|
||||
// and put particle with the results back to queue
|
||||
if (item.result.callRequests.length > 0) {
|
||||
for (const [key, cr] of item.result.callRequests) {
|
||||
const req = {
|
||||
fnName: cr.functionName,
|
||||
args: cr.arguments,
|
||||
serviceId: cr.serviceId,
|
||||
tetraplets: cr.tetraplets,
|
||||
particleContext: getParticleContext(item.particle),
|
||||
};
|
||||
|
||||
if (hasExpired(item.particle)) {
|
||||
// just in case do not call any services if the particle is already expired
|
||||
return;
|
||||
}
|
||||
this._execSingleCallRequest(req)
|
||||
.catch((err): CallServiceResult => {
|
||||
if (err instanceof ServiceError) {
|
||||
return {
|
||||
retCode: ResultCodes.error,
|
||||
result: err.message,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
retCode: ResultCodes.error,
|
||||
result: `Service call failed. fnName="${req.fnName}" serviceId="${
|
||||
req.serviceId
|
||||
}" error: ${err.toString()}`,
|
||||
};
|
||||
})
|
||||
.then((res) => {
|
||||
const serviceResult = {
|
||||
result: jsonify(res.result),
|
||||
retCode: res.retCode,
|
||||
};
|
||||
|
||||
const newParticle = cloneWithNewData(item.particle, Buffer.from([]));
|
||||
particlesQueue.next({
|
||||
...item,
|
||||
particle: newParticle,
|
||||
callResults: [[key, serviceResult]],
|
||||
});
|
||||
});
|
||||
}
|
||||
} else {
|
||||
item.onStageChange({ stage: 'localWorkDone' });
|
||||
}
|
||||
});
|
||||
|
||||
return particlesQueue;
|
||||
}
|
||||
|
||||
private async _execSingleCallRequest(req: CallServiceData): Promise<CallServiceResult> {
|
||||
const particleId = req.particleContext.particleId;
|
||||
log_particle.trace('id %s. executing call service handler %j', particleId, req);
|
||||
@ -552,17 +552,16 @@ export abstract class FluencePeer {
|
||||
this._timeouts.forEach((timeout) => {
|
||||
clearTimeout(timeout);
|
||||
});
|
||||
this._particleQueues.clear();
|
||||
}
|
||||
}
|
||||
|
||||
function filterExpiredParticles(onParticleExpiration: (item: ParticleQueueItem) => void) {
|
||||
function filterExpiredParticles<T extends ParticleQueueItem>(onParticleExpiration: (item: T) => void) {
|
||||
return pipe(
|
||||
tap((item: ParticleQueueItem) => {
|
||||
tap((item: T) => {
|
||||
if (hasExpired(item.particle)) {
|
||||
onParticleExpiration(item);
|
||||
}
|
||||
}),
|
||||
filter((x: ParticleQueueItem) => !hasExpired(x.particle)),
|
||||
filter((x) => !hasExpired(x.particle)),
|
||||
);
|
||||
}
|
||||
|
85
packages/core/js-client/src/jsPeer/__test__/par.spec.ts
Normal file
85
packages/core/js-client/src/jsPeer/__test__/par.spec.ts
Normal file
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Copyright 2023 Fluence Labs Limited
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { registerHandlersHelper, withPeer } from '../../util/testUtils.js';
|
||||
import { handleTimeout } from '../../particle/Particle.js';
|
||||
import { CallServiceData, ResultCodes } from '../../jsServiceHost/interfaces.js';
|
||||
|
||||
describe('FluencePeer flow tests', () => {
|
||||
it('should execute par instruction in parallel', async function () {
|
||||
await withPeer(async (peer) => {
|
||||
const res = await new Promise<any>((resolve, reject) => {
|
||||
const script = `
|
||||
(par
|
||||
(seq
|
||||
(call %init_peer_id% ("flow" "timeout") [1000 "test1"] res1)
|
||||
(call %init_peer_id% ("callback" "callback1") [res1])
|
||||
)
|
||||
(seq
|
||||
(call %init_peer_id% ("flow" "timeout") [1000 "test2"] res2)
|
||||
(call %init_peer_id% ("callback" "callback2") [res2])
|
||||
)
|
||||
)
|
||||
`;
|
||||
|
||||
const particle = peer.internals.createNewParticle(script);
|
||||
|
||||
peer.internals.regHandler.forParticle(particle.id, 'flow', 'timeout', (req: CallServiceData) => {
|
||||
const [timeout, message] = req.args;
|
||||
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
const res = {
|
||||
result: message,
|
||||
retCode: ResultCodes.success,
|
||||
};
|
||||
resolve(res);
|
||||
}, timeout);
|
||||
});
|
||||
});
|
||||
|
||||
if (particle instanceof Error) {
|
||||
return reject(particle.message);
|
||||
}
|
||||
|
||||
const values: any[] = [];
|
||||
|
||||
registerHandlersHelper(peer, particle, {
|
||||
callback: {
|
||||
callback1: (args: any) => {
|
||||
const [val] = args;
|
||||
values.push(val);
|
||||
if (values.length === 2) {
|
||||
resolve(values);
|
||||
}
|
||||
},
|
||||
callback2: (args: any) => {
|
||||
const [val] = args;
|
||||
values.push(val);
|
||||
if (values.length === 2) {
|
||||
resolve(values);
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
peer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||
});
|
||||
|
||||
await expect(res).toEqual(expect.arrayContaining(["test1", "test1"]));
|
||||
});
|
||||
}, 1500);
|
||||
});
|
@ -80,6 +80,14 @@ export const compileAqua = async (aquaFile: string): Promise<CompiledFile> => {
|
||||
};
|
||||
|
||||
class NoopConnection implements IConnection {
|
||||
start(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
stop(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
getRelayPeerId(): string {
|
||||
return 'nothing_here';
|
||||
}
|
||||
|
70
pnpm-lock.yaml
generated
70
pnpm-lock.yaml
generated
@ -143,6 +143,9 @@ importers:
|
||||
'@chainsafe/libp2p-noise':
|
||||
specifier: 13.0.0
|
||||
version: 13.0.0
|
||||
'@chainsafe/libp2p-yamux':
|
||||
specifier: 5.0.0
|
||||
version: 5.0.0
|
||||
'@fluencelabs/interfaces':
|
||||
specifier: workspace:*
|
||||
version: link:../interfaces
|
||||
@ -152,9 +155,6 @@ importers:
|
||||
'@libp2p/interface':
|
||||
specifier: 0.1.2
|
||||
version: 0.1.2
|
||||
'@libp2p/mplex':
|
||||
specifier: 9.0.4
|
||||
version: 9.0.4
|
||||
'@libp2p/peer-id':
|
||||
specifier: 3.0.2
|
||||
version: 3.0.2
|
||||
@ -1114,6 +1114,16 @@ packages:
|
||||
'@babel/helper-plugin-utils': 7.22.5
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-syntax-flow@7.22.5(@babel/core@7.22.10):
|
||||
resolution: {integrity: sha512-9RdCl0i+q0QExayk2nOS7853w08yLucnnPML6EN9S8fgMPVtdLDCdx/cOQ/i44Lb9UeQX9A35yaqBBOMMZxPxQ==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
peerDependencies:
|
||||
'@babel/core': ^7.0.0-0
|
||||
dependencies:
|
||||
'@babel/core': 7.22.10
|
||||
'@babel/helper-plugin-utils': 7.22.5
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-syntax-flow@7.22.5(@babel/core@7.22.5):
|
||||
resolution: {integrity: sha512-9RdCl0i+q0QExayk2nOS7853w08yLucnnPML6EN9S8fgMPVtdLDCdx/cOQ/i44Lb9UeQX9A35yaqBBOMMZxPxQ==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
@ -1198,6 +1208,16 @@ packages:
|
||||
'@babel/helper-plugin-utils': 7.22.5
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-syntax-jsx@7.22.5(@babel/core@7.22.10):
|
||||
resolution: {integrity: sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
peerDependencies:
|
||||
'@babel/core': ^7.0.0-0
|
||||
dependencies:
|
||||
'@babel/core': 7.22.10
|
||||
'@babel/helper-plugin-utils': 7.22.5
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-syntax-jsx@7.22.5(@babel/core@7.22.5):
|
||||
resolution: {integrity: sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
@ -2313,6 +2333,20 @@ packages:
|
||||
'@babel/plugin-transform-react-jsx': 7.22.5(@babel/core@7.22.5)
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-transform-react-jsx@7.22.5(@babel/core@7.22.10):
|
||||
resolution: {integrity: sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
peerDependencies:
|
||||
'@babel/core': ^7.0.0-0
|
||||
dependencies:
|
||||
'@babel/core': 7.22.10
|
||||
'@babel/helper-annotate-as-pure': 7.22.5
|
||||
'@babel/helper-module-imports': 7.22.5
|
||||
'@babel/helper-plugin-utils': 7.22.5
|
||||
'@babel/plugin-syntax-jsx': 7.22.5(@babel/core@7.22.10)
|
||||
'@babel/types': 7.22.5
|
||||
dev: false
|
||||
|
||||
/@babel/plugin-transform-react-jsx@7.22.5(@babel/core@7.22.5):
|
||||
resolution: {integrity: sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
@ -2964,6 +2998,21 @@ packages:
|
||||
- supports-color
|
||||
dev: true
|
||||
|
||||
/@chainsafe/libp2p-yamux@5.0.0:
|
||||
resolution: {integrity: sha512-aWTnBPR2hJt0A2y579sMtZVB6IqgSSHlZ6Eg+WDxNZQ0zcexafuruZQDj+z3FUTNPz+E8IeuyCi7tjI4IEehjw==}
|
||||
engines: {node: '>=16.0.0', npm: '>=7.0.0'}
|
||||
dependencies:
|
||||
'@libp2p/interface': 0.1.2
|
||||
'@libp2p/logger': 3.0.2
|
||||
abortable-iterator: 5.0.1
|
||||
it-foreach: 2.0.4
|
||||
it-pipe: 3.0.1
|
||||
it-pushable: 3.2.1
|
||||
uint8arraylist: 2.4.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: false
|
||||
|
||||
/@chainsafe/netmask@2.0.0:
|
||||
resolution: {integrity: sha512-I3Z+6SWUoaljh3TBzCnCxjlUyN8tA+NAk5L6m9IxvCf1BENQTePzPMis97CoN/iMW1St3WN+AWCCRp+TTBRiDg==}
|
||||
dependencies:
|
||||
@ -4581,6 +4630,7 @@ packages:
|
||||
uint8arrays: 4.0.6
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
dev: true
|
||||
|
||||
/@libp2p/multistream-select@4.0.2:
|
||||
resolution: {integrity: sha512-Ss3kPD+1Z8RFLUT+oN9I2ynEtp/Yj2+rOngU1XjIxustg1nt5lq0kk9hvWJyBexzmuML0xCknNjUXovpRbFPgQ==}
|
||||
@ -7993,6 +8043,7 @@ packages:
|
||||
dependencies:
|
||||
lodash: 4.17.21
|
||||
platform: 1.3.6
|
||||
dev: true
|
||||
|
||||
/bfj@7.0.2:
|
||||
resolution: {integrity: sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw==}
|
||||
@ -10399,8 +10450,8 @@ packages:
|
||||
'@babel/plugin-transform-react-jsx': ^7.14.9
|
||||
eslint: ^8.1.0
|
||||
dependencies:
|
||||
'@babel/plugin-syntax-flow': 7.22.5(@babel/core@7.22.5)
|
||||
'@babel/plugin-transform-react-jsx': 7.22.5(@babel/core@7.22.5)
|
||||
'@babel/plugin-syntax-flow': 7.22.5(@babel/core@7.22.10)
|
||||
'@babel/plugin-transform-react-jsx': 7.22.5(@babel/core@7.22.10)
|
||||
eslint: 8.43.0
|
||||
lodash: 4.17.21
|
||||
string-natural-compare: 3.0.1
|
||||
@ -13168,6 +13219,7 @@ packages:
|
||||
dependencies:
|
||||
p-defer: 4.0.0
|
||||
uint8arraylist: 2.4.3
|
||||
dev: true
|
||||
|
||||
/it-buffer@0.1.3:
|
||||
resolution: {integrity: sha512-9a2/9SYVwG7bcn3tpRDR4bXbtuMLXnDK48KVC+GXiQg97ZOOdWz2nIITBsOQ19b+gj01Rw8RNwtiLDLI8P8oiQ==}
|
||||
@ -13222,6 +13274,12 @@ packages:
|
||||
resolution: {integrity: sha512-ZLxL651N5w5SL/EIIcrXELgYrrkuEKj/TErG93C4lr6lNZziKsf338ljSG85PjQfu7Frg/1wESl5pLrPSFXI9g==}
|
||||
dev: true
|
||||
|
||||
/it-foreach@2.0.4:
|
||||
resolution: {integrity: sha512-txxcoc09g+KdLyOapxAuB12H9zUb2FuZC/TqSXRT+YR0T5fHnvcDIhspgvx/e/HiPKlKjOR8onA0qtuiAtcXqg==}
|
||||
dependencies:
|
||||
it-peekable: 3.0.2
|
||||
dev: false
|
||||
|
||||
/it-glob@0.0.14:
|
||||
resolution: {integrity: sha512-TKKzs9CglbsihSpcwJPXN5DBUssu4akRzPlp8QJRCoLrKoaOpyY2V1qDlxx+UMivn0i114YyTd4AawWl7eqIdw==}
|
||||
dependencies:
|
||||
@ -16483,6 +16541,7 @@ packages:
|
||||
|
||||
/platform@1.3.6:
|
||||
resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==}
|
||||
dev: true
|
||||
|
||||
/portfinder@1.0.32:
|
||||
resolution: {integrity: sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==}
|
||||
@ -17638,6 +17697,7 @@ packages:
|
||||
|
||||
/rate-limiter-flexible@2.4.2:
|
||||
resolution: {integrity: sha512-rMATGGOdO1suFyf/mI5LYhts71g1sbdhmd6YvdiXO2gJnd42Tt6QS4JUKJKSWVVkMtBacm6l40FR7Trjo6Iruw==}
|
||||
dev: true
|
||||
|
||||
/raw-body@2.5.1:
|
||||
resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==}
|
||||
|
Loading…
x
Reference in New Issue
Block a user