diff --git a/Cargo.toml b/Cargo.toml index 195e3e7..9ff0700 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,6 +31,10 @@ path = "src/lib.rs" name = "it_generator" path = "src/it_generator.rs" +[[bin]] +name = "leak_test" +path = "leak_test/main.rs" + [[bin]] name = "test" path = "src/test.rs" @@ -40,5 +44,5 @@ marine-rs-sdk = "0.7.0" bytesize = "1.1.0" [dev-dependencies] -marine-rs-sdk-test = "0.2.0" temporary = "0.6" +marine-rs-sdk-test = "0.8.2" diff --git a/leak_launcher/Cargo.toml b/leak_launcher/Cargo.toml new file mode 100644 index 0000000..77e7d3a --- /dev/null +++ b/leak_launcher/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "leak_launcher" +version = "0.7.0" +license = "Apache-2.0/MIT" +authors = [ + "Daniel Dulaney ", + "Ivan Stankovic ", + "Ivan Ukhov ", + "Jake Kerr ", + "Jayson Reis ", + "Pierre Krieger ", + "Sean Klein ", + "Sophie Tauchert <999eagle@999eagle.moe>", + "Tomoki Aonuma ", + "Yorhel ", +] +description = "The package provides an interface to SQLite." +documentation = "https://docs.rs/sqlite" +homepage = "https://github.com/stainless-steel/sqlite" +repository = "https://github.com/stainless-steel/sqlite" +readme = "README.md" +categories = ["api-bindings", "database"] +keywords = ["database"] +edition = "2018" + +[[bin]] +name = "leak_launcher" +path = "main.rs" + +[dependencies] +marine-rs-sdk = "0.7.0" +fluence-app-service = "0.23.1" +toml = "0.7.2" +serde = "1.0.152" +serde_json = "1.0.93" diff --git a/leak_launcher/Config.toml b/leak_launcher/Config.toml new file mode 100644 index 0000000..ef43f93 --- /dev/null +++ b/leak_launcher/Config.toml @@ -0,0 +1,15 @@ +modules_dir = "./artifacts/" + +[[module]] + name = "sqlite3" + mem_pages_count = 100 + logger_enabled = false + + [module.wasi] + preopened_files = ["./tmp"] + mapped_dirs = { "tmp" = "./tmp" } + +[[module]] + name = "leak_test" + mem_pages_count = 1 + logger_enabled = false diff --git a/leak_launcher/artifacts/leak_test.wasm b/leak_launcher/artifacts/leak_test.wasm new file mode 100755 index 0000000..4c77cfc Binary files /dev/null and b/leak_launcher/artifacts/leak_test.wasm differ diff --git a/leak_launcher/artifacts/sqlite3.wasm b/leak_launcher/artifacts/sqlite3.wasm new file mode 100755 index 0000000..80c833e Binary files /dev/null and b/leak_launcher/artifacts/sqlite3.wasm differ diff --git a/leak_launcher/main.rs b/leak_launcher/main.rs new file mode 100644 index 0000000..03db0bb --- /dev/null +++ b/leak_launcher/main.rs @@ -0,0 +1,48 @@ +use std::convert::TryInto; +use fluence_app_service::AppService; +use fluence_app_service::TomlMarineConfig; +use fluence_app_service::AppServiceConfig; +use serde_json::json; + +fn main() { + + let config = TomlMarineConfig::load("./Config.toml").unwrap(); + let config = AppServiceConfig { + service_base_dir: std::path::PathBuf::new(), + marine_config: config.try_into().unwrap() + }; + + let service_name = "test_service"; + let mut service = AppService::new(config, service_name, <_>::default()).unwrap(); + + let db_path = "./tmp/db.sqlite"; + service.call("create", json!(db_path), <_>::default()).unwrap(); + + for i in 0..500 { + if i % 50 == 0 { + println!("insert_1 {}:\n{}", i, service.module_memory_stats()); + } + service.call("insert_1", json!(db_path), <_>::default()).unwrap(); + } + + for i in 0..500 { + if i % 50 == 0 { + println!("insert_2: {} - {}", i, service.module_memory_stats()); + } + service.call("insert_2", json!(db_path), <_>::default()).unwrap(); + } + + for i in 0..500 { + if i % 50 == 0 { + println!("select_1 {}:\n{}", i, service.module_memory_stats()); + } + service.call("select_1", json!(db_path), <_>::default()).unwrap(); + } + + for i in 0..500 { + if i % 50 == 0 { + println!("select_2 {}:\n{}", i, service.module_memory_stats()); + } + service.call("select_2", json!(db_path), <_>::default()).unwrap(); + } +} diff --git a/leak_launcher/test_service/tmp/db.sqlite b/leak_launcher/test_service/tmp/db.sqlite new file mode 100644 index 0000000..5826464 Binary files /dev/null and b/leak_launcher/test_service/tmp/db.sqlite differ diff --git a/leak_test/main.rs b/leak_test/main.rs new file mode 100644 index 0000000..8a81679 --- /dev/null +++ b/leak_test/main.rs @@ -0,0 +1,144 @@ +use marine_rs_sdk::marine; +use marine_sqlite_connector::State; + +fn main() {} + +#[marine] +pub fn create(path: String) { + let conn_create = marine_sqlite_connector::open(&path).expect("Open database connection"); + + conn_create + .execute( + " + CREATE TABLE IF NOT EXISTS trigger_config ( + -- clock config + start_sec INTEGER, end_sec INTEGER, period_sec INTEGER, + -- connection pool config + connect INTEGER, disconnect INTEGER, + -- blockchain config + start_block INTEGER, end_block INTEGER + ); + CREATE TABLE IF NOT EXISTS relay (relay TEXT); + -- CREATE TABLE IF NOT EXISTS kv (key TEXT, string TEXT, u32 INTEGER, list_index INTEGER); + CREATE TABLE IF NOT EXISTS kv ( + key TEXT NOT NULL, + string TEXT, + u32 INTEGER, + list_index INTEGER DEFAULT -1, + PRIMARY KEY(key, list_index) + ); + -- particles stored in the database, LRU-like + CREATE TABLE IF NOT EXISTS particles (particle_id TEXT PRIMARY KEY, timestamp INTEGER); + -- errors happened in particles + CREATE TABLE IF NOT EXISTS errors ( + particle_id TEXT, + timestamp INTEGER, + error_idx INTEGER, + error_code INTEGER, + instruction TEXT, + message TEXT, + peer_id TEXT + ); + CREATE TABLE IF NOT EXISTS particle_count (parameter TEXT PRIMARY KEY, value INTEGER NOT NULL); + -- maximum number of particles to store information about + INSERT OR REPLACE INTO particle_count VALUES ('max_particles', 50); + -- current count of stored particles + INSERT OR REPLACE INTO particle_count VALUES ('count_particles', 0); + -- if there are more than `max_particles` particles, delete the oldest one + CREATE TRIGGER IF NOT EXISTS errors_limit_trigger AFTER INSERT ON particles + FOR EACH ROW + -- if limit is reached + WHEN (SELECT value FROM particle_count WHERE parameter = 'count_particles') + > (SELECT value FROM particle_count WHERE parameter = 'max_particles') + BEGIN + -- delete all errors for the oldest particle + DELETE FROM particles + -- take oldest by 'timestamp' column + WHERE particle_id = (SELECT particle_id FROM particles ORDER BY timestamp LIMIT 1); + END; + -- when a particle is removed, remove its errors + CREATE TRIGGER IF NOT EXISTS clear_errors AFTER DELETE ON particles + FOR EACH ROW + BEGIN + -- remove all errors for that particle + DELETE FROM errors WHERE particle_id = OLD.particle_id; + -- decrement number of particles + UPDATE particle_count SET value = value - 1 WHERE parameter = 'count_particles'; + END; + -- when a particle is inserted, incremenet the counter + CREATE TRIGGER IF NOT EXISTS particles_count_insert_trigger AFTER INSERT ON particles + FOR EACH ROW + BEGIN + UPDATE particle_count SET value = value + 1 WHERE parameter = 'count_particles'; + END; + -- when a particle error is inserted, store particle id if it wasn't there yet + CREATE TRIGGER IF NOT EXISTS store_particle_id AFTER INSERT ON errors + FOR EACH ROW + BEGIN + INSERT OR IGNORE INTO particles (particle_id, timestamp) VALUES (NEW.particle_id, NEW.timestamp); + END; + ", + ) + .expect("running schema queries"); +} + +#[marine] +fn insert_1(path: String) { + let conn = marine_sqlite_connector::open(path).expect("Open database connection"); + + let key = "some"; + let value = "other"; + let mut statement = conn + .prepare("INSERT OR REPLACE INTO kv (key, string) VALUES (?, ?)") + .expect("prep rand 0..3"); + statement.bind(1, key).expect("bind 1"); + statement.bind(2, value).expect("bind 2"); + statement.next().expect("next"); +} + +#[marine] +fn insert_2(path: String) { + let conn = marine_sqlite_connector::open(path).expect("Open database connection"); + + let mut statement = conn + .prepare("INSERT OR REPLACE INTO kv (key, u32) VALUES (?, ?)") + .expect("prep rand 6..7"); + statement.bind(1, 42).expect("6..7 bind"); + statement.bind(2, 42).expect("6..7 bind"); + statement.next().expect("4..5 bind"); + if let State::Row = statement.next().expect("6..7 next") { + statement.read::(0).expect("6..7 read"); + } +} + +#[marine] +fn select_1(path: String) { + let conn = marine_sqlite_connector::open(path).expect("Open database connection"); + + let mut statement = conn + .prepare("SELECT string FROM kv WHERE key = ?") + .expect("prep rand 4..5"); + let key = "some"; + statement.bind(1, key).expect("4..5 bind"); + if let State::Row = statement.next().expect("4..5 next") { + statement.read::(0).expect("4..5 read"); + } +} + +#[marine] +fn select_2(path: String) { + let conn = marine_sqlite_connector::open(path).expect("Open database connection"); + + let mut statement = conn + .prepare("SELECT u32 FROM kv WHERE key = ?") + .expect("prep rand 8..9"); + statement.bind(1, 42).expect("8..9 bind"); + if let State::Row = statement.next().expect("8..9 next") { + statement.read::(0).expect("8..9 read") as u32; + } +} + +#[marine] +fn set_limit(limit: i64) -> i64 { + marine_sqlite_connector::set_hard_memory_limit(limit) +} diff --git a/src/connection.rs b/src/connection.rs index 4dfd154..d2d8bc0 100644 --- a/src/connection.rs +++ b/src/connection.rs @@ -55,10 +55,6 @@ impl Connection { } } - pub fn set_memory_limit(limit: i64) -> i64 { - unsafe { ffi::sqlite3_hard_heap_limit64(limit) } - } - /// Execute a statement without processing the resulting rows if any. #[inline] pub fn execute>(&self, statement: T) -> Result<()> { diff --git a/src/lib.rs b/src/lib.rs index 0fe536a..5af2809 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -291,6 +291,14 @@ pub fn version() -> usize { unsafe { ffi::sqlite3_libversion_number() as usize } } +pub fn set_soft_memory_limit(limit: i64) -> i64 { + unsafe { ffi::sqlite3_soft_heap_limit64(limit) } +} + +pub fn set_hard_memory_limit(limit: i64) -> i64 { + unsafe { ffi::sqlite3_hard_heap_limit64(limit) } +} + fn last_error(raw: ffi::Sqlite3DbHandle) -> Option { unsafe { let code = ffi::sqlite3_errcode(raw);