diff --git a/Cargo.lock b/Cargo.lock index a7812a9..8415016 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -564,7 +564,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "sqlite3-editor" -version = "1.0.197" +version = "1.0.199" dependencies = [ "base64", "clap", diff --git a/Cargo.toml b/Cargo.toml index ec2f523..2c22511 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sqlite3-editor" -version = "1.0.197" +version = "1.0.199" edition = "2021" [features] diff --git a/src/export.rs b/src/export.rs index 3b44fad..616441f 100644 --- a/src/export.rs +++ b/src/export.rs @@ -61,8 +61,8 @@ pub fn export_csv( .prepare(query) .or_else(|err| Error::new_query_error(err, query, &[]))?; - if options.delimiter.as_bytes().len() != 1 { - Error::new_other_error("The delimiter needs to be a single character.", None, None)?; + if options.delimiter.len() != 1 { + Error::new_other_error("The delimiter needs to be a single-byte character.", None, None)?; } // TODO: `stmt.column_count()` and `stmt.column_names()` should be called after `rows.next()` (see https://github.com/rusqlite/rusqlite/blob/b7309f2dca70716fee44c85082c585b330edb073/src/column.rs#L51-L53). diff --git a/src/export_test.rs b/src/export_test.rs index 8501ee9..2581b77 100644 --- a/src/export_test.rs +++ b/src/export_test.rs @@ -109,7 +109,7 @@ fn test_invalid_delimiter() { ) .unwrap_err() .to_string() - .contains("The delimiter needs to be a single character.")); + .contains("The delimiter needs to be a single-byte character.")); } #[test] diff --git a/src/import.rs b/src/import.rs index 568d335..34303ce 100644 --- a/src/import.rs +++ b/src/import.rs @@ -106,34 +106,16 @@ pub fn import_json( table_name: &str, input_file: Option, ) -> std::result::Result<(), Error> { - use serde_json::Value; - - let parsed: Vec> = - serde_json::from_reader::<_, Vec>>(open_reader(input_file)?)? - .into_iter() - .map(|map| { - map.into_iter() - .map(|(k, v)| { - ( - k, - match v { - Value::String(s) => s, - v => v.to_string(), - }, - ) - }) - .collect() - }) - .collect(); + let parsed = serde_json::from_reader::<_, Vec>>(open_reader(input_file)?)?; if parsed.is_empty() { - Error::new_other_error("No data present.", None, None)?; + return Error::new_other_error("No data present.", None, None); } let columns = parsed.first().unwrap().keys().map(|v| v.to_owned()).collect::>(); if columns.is_empty() { - Error::new_other_error("No column headers present.", None, None)?; + return Error::new_other_error("No column headers present.", None, None); } let mut con = connect(database_filepath, sql_cipher_key)?; @@ -145,7 +127,7 @@ pub fn import_json( escape_sql_identifier(table_name), columns .iter() - .map(|v| format!("{} TEXT", escape_sql_identifier(v))) + .map(|v| escape_sql_identifier(v)) .collect::>() .join(", ") ); @@ -165,23 +147,25 @@ pub fn import_json( let mut insert = tx .prepare(&stmt) .or_else(|err| Error::new_query_error(err, &stmt, &[]))?; - for record in &parsed { - let values = columns.iter().map(|column| record.get(column)).collect::>(); + for (record_id, record) in parsed.iter().enumerate() { + let mut values = Vec::<&Literal>::new(); + for column in &columns { + let Some(value) = record.get(column) else { + return Error::new_other_error( + format!("The row {} does not have the column '{column}'.", record_id + 1), + None, + None, + ); + }; + values.push(value); + } for (i, value) in values.iter().enumerate() { insert.raw_bind_parameter(i + 1, value).or_else(|err| { - Error::new_query_error( - err, - &stmt, - &values.iter().map(|v| (*v).into()).collect::>(), - ) + Error::new_query_error(err, &stmt, &values.iter().map(|&v| v.clone()).collect::>()) })?; } insert.raw_execute().or_else(|err| { - Error::new_query_error( - err, - &stmt, - &values.iter().map(|v| (*v).into()).collect::>(), - ) + Error::new_query_error(err, &stmt, &values.iter().map(|&v| v.clone()).collect::>()) })?; } } diff --git a/src/import_test.rs b/src/import_test.rs index 94c263c..e623114 100644 --- a/src/import_test.rs +++ b/src/import_test.rs @@ -128,7 +128,11 @@ fn test_import_json() { let tmp_json_file_path = tmp_json_file.path().to_str().unwrap().to_owned(); // Write a sample JSON file to import. - fs::write(&tmp_json_file, r#"[{"name":"Alice","age":20},{"name":"Bob","age":25}]"#).unwrap(); + fs::write( + &tmp_json_file, + r#"[{"name":"Alice","age":20,"optional":null},{"name":"Bob","age":25,"optional":0}]"#, + ) + .unwrap(); // Import the JSON file. assert!(import::import_json(tmp_db_filepath, &None, "test", Some(tmp_json_file_path)).is_ok()); @@ -137,9 +141,15 @@ fn test_import_json() { let result = serde_json::to_string( &rusqlite::Connection::open(tmp_db_filepath) .unwrap() - .prepare("SELECT * FROM test") + .prepare("SELECT name, age, optional FROM test") .unwrap() - .query_map([], |row| Ok((get_string(row, 0, |_| {})?, get_string(row, 1, |_| {})?))) + .query_map([], |row| { + Ok(( + row.get::<_, String>(0)?, + row.get::<_, i32>(1)?, + row.get::<_, Option>(2)?, + )) + }) .unwrap() .collect::, _>>() .unwrap(), @@ -147,7 +157,7 @@ fn test_import_json() { .unwrap(); // key orders are not maintained - assert!(result == r#"[["Alice","20"],["Bob","25"]]"# || result == r#"[["20","Alice"],["25","Bob"]]"#); + assert_eq!(result, r#"[["Alice",20,null],["Bob",25,0]]"#); } #[test]