Skip to content
This repository has been archived by the owner on Sep 3, 2023. It is now read-only.

Commit

Permalink
v0.2.4 - Fixed prepared statement exeuction bug
Browse files Browse the repository at this point in the history
  • Loading branch information
bobozaur committed Apr 4, 2022
1 parent 836f777 commit 9cde5f0
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 19 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "exasol"
version = "0.2.3"
version = "0.2.4"
edition = "2021"
authors = ["bobozaur"]
description = "Exasol client library implemented in Rust."
Expand Down
5 changes: 2 additions & 3 deletions src/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,10 +207,9 @@ impl Connection {
/// prepared_stmt.execute(vec_data).unwrap();
/// ```
///
/// String literals resembling a parameter can be escaped, if, for any reason, needed:
/// (Exasol doesn't really seem to like combining parameters with literals in prepared statements)
/// String literals resembling a parameter can be escaped, if needed:
///
/// ``` should_panic
/// ```
/// # use exasol::{connect, QueryResult, PreparedStatement};
/// # use exasol::error::Result;
/// # use serde_json::json;
Expand Down
6 changes: 4 additions & 2 deletions src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,7 @@ impl PreparedStatement {
/// ["a", "b", 1, "x"],
/// ["c", "d", 2, "x"],
/// ["e", "f", 3, "x"],
/// ["g", "h", 4, "x"]
/// ]
/// );
///
Expand Down Expand Up @@ -422,13 +423,14 @@ impl PreparedStatement {
.map(|c| c.as_str())
.collect::<Vec<&str>>();

let col_major_data = to_col_major(&col_names, data).map_err(DriverError::DataError)?;
let (col_major_data, num_rows) =
to_col_major(&col_names, data).map_err(DriverError::DataError)?;

let payload = json!({
"command": "executePreparedStatement",
"statementHandle": &self.statement_handle,
"numColumns": num_columns,
"numRows": col_major_data.len(),
"numRows": num_rows,
"columns": columns,
"data": col_major_data
});
Expand Down
35 changes: 22 additions & 13 deletions src/row.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,17 +337,19 @@ fn col_major_seq_data() {
struct SomeRow(String, String);

let row = SomeRow("val1".to_owned(), "val2".to_owned());
let row_major_data = vec![row.clone(), row.clone()];
let row_major_data = vec![row.clone(), row.clone(), row.clone()];

let columns = &["col1", "col2"];
let col_major_data = to_col_major(columns, row_major_data).unwrap();
let (col_major_data, num_rows) = to_col_major(columns, row_major_data).unwrap();
assert_eq!(
col_major_data,
vec![
vec!["val1".to_owned(), "val1".to_owned()],
vec!["val2".to_owned(), "val2".to_owned()]
vec!["val1".to_owned(), "val1".to_owned(), "val1".to_owned()],
vec!["val2".to_owned(), "val2".to_owned(), "val2".to_owned()]
]
)
);

assert_eq!(num_rows, 3);
}

#[test]
Expand All @@ -363,17 +365,19 @@ fn col_major_map_data() {
col2: "val2".to_owned(),
};

let row_major_data = vec![row.clone(), row.clone()];
let row_major_data = vec![row.clone(), row.clone(), row.clone()];

let columns = &["col2", "col1"];
let col_major_data = to_col_major(columns, row_major_data).unwrap();
let (col_major_data, num_rows) = to_col_major(columns, row_major_data).unwrap();
assert_eq!(
col_major_data,
vec![
vec!["val2".to_owned(), "val2".to_owned()],
vec!["val1".to_owned(), "val1".to_owned()]
vec!["val2".to_owned(), "val2".to_owned(), "val2".to_owned()],
vec!["val1".to_owned(), "val1".to_owned(), "val1".to_owned()]
]
)
);

assert_eq!(num_rows, 3);
}

/// Function used to transpose data from an iterator or serializable types,
Expand All @@ -383,18 +387,23 @@ fn col_major_map_data() {
/// Sequence-like data is processed as such, and the columns are merely used to assert length.
/// Map-like data though requires columns so the data is processed in the expected order. Also,
/// duplicate columns are not supported, as column values from the map get consumed.
pub(crate) fn to_col_major<T, C, S>(columns: &[&C], data: T) -> DataResult<Vec<Vec<Value>>>
pub(crate) fn to_col_major<T, C, S>(columns: &[&C], data: T) -> DataResult<(Vec<Vec<Value>>, usize)>
where
S: Serialize,
C: ?Sized + Hash + Ord + Display,
String: Borrow<C>,
T: IntoIterator<Item = S>,
{
let mut num_rows = 0usize;
let iter_data = data
.into_iter()
.map(|s| to_seq_iter(s, columns).map(IntoIterator::into_iter))
.map(|s| {
num_rows += 1;
to_seq_iter(s, columns).map(IntoIterator::into_iter)
})
.collect::<DataResult<Vec<IntoIter<Value>>>>()?;
Ok(ColumnMajorIterator(iter_data).collect::<Vec<Vec<Value>>>())
let col_major_data = ColumnMajorIterator(iter_data).collect::<Vec<Vec<Value>>>();
Ok((col_major_data, num_rows))
}

#[inline]
Expand Down

0 comments on commit 9cde5f0

Please sign in to comment.