Skip to content

Commit

Permalink
v1.0.193
Browse files Browse the repository at this point in the history
  • Loading branch information
yy0931 committed Sep 22, 2024
1 parent dbbcf5d commit 74e4b2f
Show file tree
Hide file tree
Showing 9 changed files with 215 additions and 57 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "sqlite3-editor"
version = "1.0.191"
version = "1.0.193"
edition = "2021"

[features]
Expand Down
24 changes: 24 additions & 0 deletions src/cache/pager_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use std::{rc::Rc, time::Duration};
use tempfile::NamedTempFile;

use crate::cache::{cache_entry::Records, pager::Pager};
use crate::error::{Error, ErrorCode};

#[test]
fn test_repeat_same_query() {
Expand All @@ -14,6 +15,7 @@ fn test_repeat_same_query() {
let mut pager = Pager::new();
pager.config.slow_query_threshold = Duration::ZERO;
pager.config.cache_time_limit_relative_to_queried_range = f64::MAX;
assert!(pager.total_cache_size_bytes() == 0);

let query = "SELECT * FROM t LIMIT ? OFFSET ?";
let params = &[3.into(), 0.into()];
Expand All @@ -34,6 +36,7 @@ fn test_repeat_same_query() {
);
assert_eq!(result1.n_rows(), 2);
assert_eq!(result1.columns(), Rc::new(vec!["x".to_owned(), "y".to_owned()]));
assert!(pager.total_cache_size_bytes() > 0);
}

#[test]
Expand Down Expand Up @@ -254,3 +257,24 @@ fn test_wrong_parameter_type() {
let params = &["1".into(), "1".into()];
assert_eq!(pager.query(&mut conn, query, params, |_| {}), Ok(None));
}

#[test]
fn test_failed_to_start_a_transaction() {
let mut conn = rusqlite::Connection::open_in_memory().unwrap();
conn.execute("CREATE TABLE t(x)", ()).unwrap();
conn.execute("BEGIN", ()).unwrap();

let mut pager = Pager::new();

let query = "SELECT * FROM t LIMIT ? OFFSET ?";
let params = &["1".into(), "1".into()];
assert_eq!(
pager.query(&mut conn, query, params, |_| {}),
Err(Error::Query {
message: "cannot start a transaction within a transaction".to_owned(),
query: "BEGIN;".to_owned(),
params: vec![],
code: ErrorCode::OtherError
})
);
}
16 changes: 16 additions & 0 deletions src/import_test.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::error::Error;
use crate::import;
use crate::sqlite3::get_string;
use std::fs;
Expand Down Expand Up @@ -65,6 +66,21 @@ fn test_import_csv_empty_lines() {
);
}

#[test]
fn test_import_csv_with_inconsistent_columns() {
let tmp_db_file = tempfile::NamedTempFile::new().unwrap();
let tmp_db_filepath = tmp_db_file.path().to_str().unwrap();

let mut tmp_csv_file = tempfile::NamedTempFile::new().unwrap();
let tmp_csv_file_path = tmp_csv_file.path().to_str().unwrap().to_owned();

// Write a sample CSV file to import.
writeln!(tmp_csv_file, "name,age\nAlice,20\nBob,25,30").unwrap();

// Import the CSV file.
assert_eq!(import::import_csv(tmp_db_filepath, &None, "test", ",", Some(tmp_csv_file_path.to_string())), Err(Error::Other { message: "CSV error: record 2 (line: 3, byte: 18): found record with 3 fields, but the previous record has 2 fields".to_owned(), query: None, params: None }));
}

#[test]
fn test_import_tsv() {
let tmp_db_file = tempfile::NamedTempFile::new().unwrap();
Expand Down
105 changes: 54 additions & 51 deletions src/list_placeholders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,63 +70,66 @@ pub fn list_placeholders(stmt: &SplittedStatement) -> Vec<Placeholder> {
}
}
}
match &token.token {
// https://www.sqlite.org/c3ref/bind_blob.html
// ?
Token::Placeholder(p) if p == "?" => {
result.push(Placeholder {
name: None,
ranges_relative_to_stmt: vec![PlaceholderRange::new(token)],
});
}
// ?NNN
Token::Placeholder(s) if QUESTION_NUMBER.is_match(s) => {
if let Ok(n) = s[1..].parse::<usize>().map(|v| v - 1) {
while result.len() < n + 1 {
result.push(Placeholder {
name: None,
ranges_relative_to_stmt: vec![],
});
}
if result[n].name.is_none() {
result[n].name = Some(s.to_owned());
result[n].ranges_relative_to_stmt.push(PlaceholderRange::new(token));
}
}
}
// :VVV, @VVV, $VVV
Token::Word(Word {
value: s,
quote_style: None,
keyword: Keyword::NoKeyword,
}) => {
if s.starts_with(":") || s.starts_with("@") || s.starts_with("$") {
if !is_previous_placeholder_unfinished {
match &token.token {
// https://www.sqlite.org/c3ref/bind_blob.html
// ?
Token::Placeholder(p) if p == "?" => {
result.push(Placeholder {
name: Some(s.to_owned()),
name: None,
ranges_relative_to_stmt: vec![PlaceholderRange::new(token)],
});
} else if let Some(sign) = previous_colon_or_at_sign {
let mut range = PlaceholderRange::new(token);
range.start.column = range.start.column.saturating_sub(1);
result.push(Placeholder {
name: Some(sign + s.as_str()),
ranges_relative_to_stmt: vec![range],
});
}
is_previous_placeholder_unfinished = true;
}
Token::Number(s, /* "L" suffix */ false) => {
if let Some(sign) = previous_colon_or_at_sign {
let mut range = PlaceholderRange::new(token);
range.start.column = range.start.column.saturating_sub(1);
result.push(Placeholder {
name: Some(sign + s.as_str()),
ranges_relative_to_stmt: vec![range],
});
// ?NNN
Token::Placeholder(s) if QUESTION_NUMBER.is_match(s) => {
if let Ok(n) = s[1..].parse::<usize>().map(|v| v - 1) {
while result.len() < n + 1 {
result.push(Placeholder {
name: None,
ranges_relative_to_stmt: vec![],
});
}
if result[n].name.is_none() {
result[n].name = Some(s.to_owned());
result[n].ranges_relative_to_stmt.push(PlaceholderRange::new(token));
}
}
}
// :VVV, @VVV, $VVV
Token::Word(Word {
value: s,
quote_style: None,
keyword: Keyword::NoKeyword,
}) => {
if s.starts_with(":") || s.starts_with("@") || s.starts_with("$") {
result.push(Placeholder {
name: Some(s.to_owned()),
ranges_relative_to_stmt: vec![PlaceholderRange::new(token)],
});
is_previous_placeholder_unfinished = true;
} else if let Some(sign) = previous_colon_or_at_sign {
let mut range = PlaceholderRange::new(token);
range.start.column = range.start.column.saturating_sub(1);
result.push(Placeholder {
name: Some(sign + s.as_str()),
ranges_relative_to_stmt: vec![range],
});
is_previous_placeholder_unfinished = true;
}
}
Token::Number(s, /* "L" suffix */ false) => {
if let Some(sign) = previous_colon_or_at_sign {
let mut range = PlaceholderRange::new(token);
range.start.column = range.start.column.saturating_sub(1);
result.push(Placeholder {
name: Some(sign + s.as_str()),
ranges_relative_to_stmt: vec![range],
});
is_previous_placeholder_unfinished = true;
}
}
is_previous_placeholder_unfinished = true;
_ => {}
}
_ => {}
}
previous_colon_or_at_sign = match token.token {
Token::Colon => Some(":".to_owned()),
Expand Down
6 changes: 6 additions & 0 deletions src/list_placeholders_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,3 +58,9 @@ pub fn test_placeholder_reuse() {
pub fn test_everything() {
compare("WITH x AS (SELECT @a) SELECT ?, ?, ?10, :10, @10, $10, :aa, @aa, $aa, ?12, ?, :1a1");
}

#[test]
#[cfg(not(feature = "sqlcipher"))] // "The bundled SQLCipher does not support the delimited numeric literal.
pub fn test_issue_65() {
compare("SELECT 1_2");
}
7 changes: 6 additions & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,12 @@ where
}

// Open request and response files
let mut r = File::open(&request_body_filepath).unwrap();
let mut r = File::open(&request_body_filepath).unwrap_or_else(|err| {
panic!(
"unable to open database file {}: {err:?}",
request_body_filepath.to_string_lossy()
)
});
let mut w = match std::fs::OpenOptions::new()
.write(true)
.create(true)
Expand Down
56 changes: 53 additions & 3 deletions src/semantic_highlight.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use sqlparser::{
dialect::SQLiteDialect,
Expand Down Expand Up @@ -32,18 +33,70 @@ pub struct SemanticHighlight {
pub end: ZeroIndexedLocation,
}

lazy_static! {
static ref HEXADECIMAL_NUMERIC_LITERAL: regex::Regex = regex::Regex::new(r#"^\d+(_\d+)*$"#).unwrap();
static ref NUMERIC_LITERAL_CONTINUATION: regex::Regex =
regex::Regex::new(r#"^(_\d+)+([eE](\d+(_\d+)*)?)?$"#).unwrap();
static ref HEXADECIMAL_LITERAL_CONTINUATION: regex::Regex = regex::Regex::new(r#"^X\d+(_\d+)*$"#).unwrap();
}

/// Tokenizes the given SQL input string and returns the tokens with highlighting information.
pub fn semantic_highlight(sql: &str) -> Vec<SemanticHighlight> {
let mut tokens = vec![];
let Ok(parsed_tokens) = tokenize_with_range_location(&SQLiteDialect {}, sql) else {
return tokens;
};

let mut ends_with_e = false;
let mut is_zero = false;

for TokenWithRangeLocation { token, start, end } in parsed_tokens {
let previous_token_ends_with_e = ends_with_e;
ends_with_e = false;
let previous_token_is_zero = is_zero;
is_zero = false;
if start == end {
continue;
}
tokens.push(SemanticHighlight {
kind: match token {
// number
Token::Number(s, is_long) => {
is_zero = s == "0" && !is_long;
SemanticTokenKind::Number
}
Token::HexStringLiteral(s) if HEXADECIMAL_NUMERIC_LITERAL.is_match(&s) => SemanticTokenKind::Number,
Token::Word(Word {
quote_style: None,
value,
keyword: Keyword::NoKeyword,
}) if NUMERIC_LITERAL_CONTINUATION.is_match(&value) // _123e
&& tokens
.last()
.map(|last| last.end == start && last.kind == SemanticTokenKind::Number)
.unwrap_or(false) =>
{
ends_with_e = value.ends_with("e") || value.ends_with("E");
SemanticTokenKind::Number
}
Token::Minus | Token::Plus
if previous_token_ends_with_e && tokens.last().map(|last| last.end == start).unwrap_or(false) =>
{
SemanticTokenKind::Number
}
Token::Word(Word {
quote_style: None,
value,
keyword: Keyword::NoKeyword,
}) if previous_token_is_zero && HEXADECIMAL_LITERAL_CONTINUATION.is_match(&value) // Z12_34
&& tokens
.last()
.map(|last| last.end == start)
.unwrap_or(false) =>
{
SemanticTokenKind::Number
}

// word
Token::Word(w) => match w {
Word {
Expand All @@ -60,9 +113,6 @@ pub fn semantic_highlight(sql: &str) -> Vec<SemanticHighlight> {
_ => SemanticTokenKind::Keyword,
},

// number
Token::Number(_, _) => SemanticTokenKind::Number,

// string
Token::SingleQuotedString(_)
| Token::DollarQuotedString(_)
Expand Down
54 changes: 54 additions & 0 deletions src/semantic_highlight_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,22 @@ fn test_pragma() {
);
}

#[test]
fn test_blob_literal() {
assert_eq!(
semantic_highlight("SELECT 0x'ff'")
.into_iter()
.map(|t| t.kind)
.collect::<Vec<_>>(),
[
SemanticTokenKind::Keyword, // "SELECT"
SemanticTokenKind::Other, // " "
SemanticTokenKind::String, // "0x"
SemanticTokenKind::String, // "'ff'"
]
);
}

#[test]
fn test_tokenizer_error() {
assert_eq!(
Expand All @@ -110,3 +126,41 @@ fn test_tokenizer_error() {
[]
);
}

fn assert_all_tokens_are_number(expr: &str) {
let tokens = semantic_highlight(expr).into_iter().map(|t| t.kind).collect::<Vec<_>>();
assert_eq!(
tokens,
tokens.iter().map(|_| SemanticTokenKind::Number).collect::<Vec<_>>()
);
}

#[test]
fn test_delimited_numeric_literal_integer_and_float() {
for part1 in ["00", "00_00_00"] {
assert_all_tokens_are_number(part1);
assert_all_tokens_are_number(&format!(".{part1}"));
for part2 in ["00", "00_00_00"] {
assert_all_tokens_are_number(&format!("{part1}.{part2}"));
for e in ["e", "E"] {
assert_all_tokens_are_number(&format!(".{part1}{e}{part2}"));
assert_all_tokens_are_number(&format!(".{part1}{e}+{part2}"));
assert_all_tokens_are_number(&format!(".{part1}{e}-{part2}"));

for part3 in ["00", "00_00_00"] {
assert_all_tokens_are_number(&format!("{part1}.{part2}{e}{part3}"));
assert_all_tokens_are_number(&format!("{part1}.{part2}{e}+{part3}"));
assert_all_tokens_are_number(&format!("{part1}.{part2}{e}-{part3}"));
}
}
}
}
}

#[test]
fn test_delimited_numeric_literal_hex() {
assert_all_tokens_are_number("0x00");
assert_all_tokens_are_number("0x00_00_00");
assert_all_tokens_are_number("0X00");
assert_all_tokens_are_number("0X00_00");
}

0 comments on commit 74e4b2f

Please sign in to comment.