diff --git a/crates/core/src/schema/mod.rs b/crates/core/src/schema/mod.rs index cab6c0b..ec04753 100644 --- a/crates/core/src/schema/mod.rs +++ b/crates/core/src/schema/mod.rs @@ -6,7 +6,8 @@ use serde::Deserialize; use sqlite::ResultCode; use sqlite_nostd as sqlite; pub use table_info::{ - DiffIncludeOld, PendingStatement, PendingStatementValue, RawTable, Table, TableInfoFlags, + Column, DiffIncludeOld, PendingStatement, PendingStatementValue, RawTable, Table, + TableInfoFlags, }; #[derive(Deserialize, Default)] diff --git a/crates/core/src/schema/table_info.rs b/crates/core/src/schema/table_info.rs index 03291c7..b380f13 100644 --- a/crates/core/src/schema/table_info.rs +++ b/crates/core/src/schema/table_info.rs @@ -45,8 +45,21 @@ impl Table { } } - pub fn column_names(&self) -> impl Iterator { - self.columns.iter().map(|c| c.name.as_str()) + pub fn filtered_columns<'a>( + &'a self, + names: impl Iterator, + ) -> impl Iterator { + // First, sort all columns by name for faster lookups by name. + let mut sorted_by_name: Vec<&Column> = self.columns.iter().collect(); + sorted_by_name.sort_by_key(|c| &*c.name); + + names.filter_map(move |name| { + let index = sorted_by_name + .binary_search_by_key(&name, |c| c.name.as_str()) + .ok()?; + + Some(sorted_by_name[index]) + }) } } diff --git a/crates/core/src/util.rs b/crates/core/src/util.rs index 5e77768..2c9f22b 100644 --- a/crates/core/src/util.rs +++ b/crates/core/src/util.rs @@ -1,7 +1,9 @@ extern crate alloc; +use core::fmt::{Display, Write}; + use alloc::format; -use alloc::string::String; +use alloc::string::{String, ToString}; #[cfg(not(feature = "getrandom"))] use crate::sqlite; @@ -13,7 +15,7 @@ use uuid::Uuid; use uuid::Builder; pub fn quote_string(s: &str) -> String { - format!("'{:}'", s.replace("'", "''")) + return QuotedString(s).to_string(); } pub fn quote_json_path(s: &str) -> String { @@ -32,6 +34,28 @@ pub fn quote_internal_name(name: &str, local_only: bool) -> String { } } +/// A string that [Display]s as a SQLite string literal. +pub struct QuotedString<'a>(pub &'a str); + +impl<'a> Display for QuotedString<'a> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + const SINGLE_QUOTE: char = '\''; + const ESCAPE_SEQUENCE: &'static str = "''"; + + f.write_char(SINGLE_QUOTE)?; + + for (i, group) in self.0.split(SINGLE_QUOTE).enumerate() { + if i != 0 { + f.write_str(ESCAPE_SEQUENCE)?; + } + + f.write_str(group)?; + } + + f.write_char(SINGLE_QUOTE) + } +} + pub fn quote_identifier_prefixed(prefix: &str, name: &str) -> String { return format!("\"{:}{:}\"", prefix, name.replace("\"", "\"\"")); } diff --git a/crates/core/src/views.rs b/crates/core/src/views.rs index b79c98b..c6a267e 100644 --- a/crates/core/src/views.rs +++ b/crates/core/src/views.rs @@ -12,7 +12,7 @@ use sqlite_nostd::{self as sqlite}; use crate::create_sqlite_text_fn; use crate::error::PowerSyncError; -use crate::schema::{DiffIncludeOld, Table}; +use crate::schema::{Column, DiffIncludeOld, Table}; use crate::util::*; fn powersync_view_sql_impl( @@ -88,11 +88,12 @@ fn powersync_trigger_delete_sql_impl( match &table_info.diff_include_old { Some(include_old) => { let mut json = match include_old { - DiffIncludeOld::OnlyForColumns { columns } => { - json_object_fragment("OLD", &mut columns.iter().map(|c| c.as_str())) - } + DiffIncludeOld::OnlyForColumns { columns } => json_object_fragment( + "OLD", + &mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())), + ), DiffIncludeOld::ForAllColumns => { - json_object_fragment("OLD", &mut table_info.column_names()) + json_object_fragment("OLD", &mut table_info.columns.iter()) } }?; @@ -174,7 +175,7 @@ fn powersync_trigger_insert_sql_impl( let trigger_name = quote_identifier_prefixed("ps_view_insert_", view_name); let type_string = quote_string(name); - let json_fragment = json_object_fragment("NEW", &mut table_info.column_names())?; + let json_fragment = json_object_fragment("NEW", &mut table_info.columns.iter())?; let (metadata_key, metadata_value) = if table_info.flags.include_metadata() { (",metadata", ",NEW._metadata") @@ -247,15 +248,15 @@ fn powersync_trigger_update_sql_impl( let trigger_name = quote_identifier_prefixed("ps_view_update_", view_name); let type_string = quote_string(name); - let json_fragment_new = json_object_fragment("NEW", &mut table_info.column_names())?; - let json_fragment_old = json_object_fragment("OLD", &mut table_info.column_names())?; + let json_fragment_new = json_object_fragment("NEW", &mut table_info.columns.iter())?; + let json_fragment_old = json_object_fragment("OLD", &mut table_info.columns.iter())?; let mut old_values_fragment = match &table_info.diff_include_old { None => None, Some(DiffIncludeOld::ForAllColumns) => Some(json_fragment_old.clone()), Some(DiffIncludeOld::OnlyForColumns { columns }) => Some(json_object_fragment( "OLD", - &mut columns.iter().map(|c| c.as_str()), + &mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())), )?), }; @@ -266,9 +267,12 @@ fn powersync_trigger_update_sql_impl( let filtered_new_fragment = match &table_info.diff_include_old { // When include_old_only_when_changed is combined with a column filter, make sure we // only include the powersync_diff of columns matched by the filter. - Some(DiffIncludeOld::OnlyForColumns { columns }) => Cow::Owned( - json_object_fragment("NEW", &mut columns.iter().map(|c| c.as_str()))?, - ), + Some(DiffIncludeOld::OnlyForColumns { columns }) => { + Cow::Owned(json_object_fragment( + "NEW", + &mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())), + )?) + } _ => Cow::Borrowed(json_fragment_new.as_str()), }; @@ -401,7 +405,7 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { /// Example output with prefix "NEW": "json_object('id', NEW.id, 'name', NEW.name, 'age', NEW.age)". fn json_object_fragment<'a>( prefix: &str, - name_results: &mut dyn Iterator, + columns: &mut dyn Iterator, ) -> Result { // floor(SQLITE_MAX_FUNCTION_ARG / 2). // To keep databases portable, we use the default limit of 100 args for this, @@ -409,13 +413,27 @@ fn json_object_fragment<'a>( const MAX_ARG_COUNT: usize = 50; let mut column_names_quoted: Vec = alloc::vec![]; - while let Some(name) = name_results.next() { - let quoted: String = format!( - "{:}, {:}.{:}", - quote_string(name), - prefix, - quote_identifier(name) - ); + while let Some(column) = columns.next() { + let name = &*column.name; + let quoted = match &*column.type_name { + // We really want the individual columns here to appear as they show up in the database. + // For text columns however, it's possible that e.g. NEW.column was created by a JSON + // function, meaning that it has a JSON subtype active - causing the json_object() call + // we're about to emit to include it as a subobject instead of a string. + "TEXT" | "text" => format!( + "{:}, concat({:}.{:})", + QuotedString(name), + prefix, + quote_identifier(name) + ), + _ => format!( + "{:}, {:}.{:}", + QuotedString(name), + prefix, + quote_identifier(name) + ), + }; + column_names_quoted.push(quoted); } diff --git a/dart/test/crud_test.dart b/dart/test/crud_test.dart index 558997d..bda2d02 100644 --- a/dart/test/crud_test.dart +++ b/dart/test/crud_test.dart @@ -661,5 +661,25 @@ void main() { // The update which didn't change any rows should not be recorded. expect(db.select('SELECT * FROM ps_crud'), hasLength(1)); }); + + test('json values are included as text', () { + db + ..execute('select powersync_replace_schema(?)', [ + json.encode({ + 'tables': [ + { + 'name': 'items', + 'columns': [ + {'name': 'col', 'type': 'text'} + ], + } + ] + }) + ]) + ..execute('INSERT INTO items (id, col) VALUES (uuid(), json_object())'); + + final [update] = db.select('SELECT data FROM ps_crud'); + expect(json.decode(update['data']), containsPair('data', {'col': '{}'})); + }); }); } diff --git a/dart/test/utils/migration_fixtures.dart b/dart/test/utils/migration_fixtures.dart index ce8b2b8..29a338c 100644 --- a/dart/test/utils/migration_fixtures.dart +++ b/dart/test/utils/migration_fixtures.dart @@ -536,8 +536,8 @@ END THEN RAISE (FAIL, 'id should be text') END; INSERT INTO "ps_data__lists" - SELECT NEW.id, json_object('description', NEW."description"); - INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff('{}', json_object('description', NEW."description"))))); + SELECT NEW.id, json_object('description', concat(NEW."description")); + INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff('{}', json_object('description', concat(NEW."description")))))); INSERT INTO ps_oplog(bucket, op_id, op, row_type, row_id, hash, superseded) SELECT '$local', 1, @@ -557,9 +557,9 @@ BEGIN THEN RAISE (FAIL, 'Cannot update id') END; UPDATE "ps_data__lists" - SET data = json_object('description', NEW."description") + SET data = json_object('description', concat(NEW."description")) WHERE id = NEW.id; - INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description"))))); + INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description")))))); INSERT INTO ps_oplog(bucket, op_id, op, row_type, row_id, hash, superseded) SELECT '$local', 1, @@ -598,8 +598,8 @@ END WHEN (typeof(NEW.id) != 'text') THEN RAISE (FAIL, 'id should be text') END; - INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', NEW."description"); - INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', NEW."description")))); + INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', concat(NEW."description")); + INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', concat(NEW."description"))))); END ;CREATE TRIGGER "ps_view_update_lists" INSTEAD OF UPDATE ON "lists" @@ -610,9 +610,9 @@ BEGIN THEN RAISE (FAIL, 'Cannot update id') END; UPDATE "ps_data__lists" - SET data = json_object('description', NEW."description") + SET data = json_object('description', concat(NEW."description")) WHERE id = NEW.id; - INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description"))),0); + INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description")))),0); END '''; @@ -636,8 +636,8 @@ END WHEN (typeof(NEW.id) != 'text') THEN RAISE (FAIL, 'id should be text') END; - INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', NEW."description"); - INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', NEW."description")))); + INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', concat(NEW."description")); + INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', concat(NEW."description"))))); END ;CREATE TRIGGER "ps_view_update_lists" INSTEAD OF UPDATE ON "lists" @@ -648,8 +648,8 @@ BEGIN THEN RAISE (FAIL, 'Cannot update id') END; UPDATE "ps_data__lists" - SET data = json_object('description', NEW."description") + SET data = json_object('description', concat(NEW."description")) WHERE id = NEW.id; - INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description"))),0); + INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description")))),0); END ''';