Skip to content

Commit

Permalink
Merge branch 'influxdata:main' into fix-potential-overflow-in-write-b…
Browse files Browse the repository at this point in the history
…uffer-validator
  • Loading branch information
LorrensP-2158466 authored Jul 5, 2024
2 parents a6eb49f + 1fd355e commit 0302d72
Show file tree
Hide file tree
Showing 23 changed files with 824 additions and 380 deletions.
4 changes: 4 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

209 changes: 24 additions & 185 deletions README.md

Large diffs are not rendered by default.

Binary file added assets/influxdb-logo-dark.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added assets/influxdb-logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions influxdb3/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ uuid.workspace = true

# Optional Dependencies
console-subscriber = { version = "0.1.10", optional = true, features = ["parking_lot"] }
hashbrown = { workspace = true, features = ["serde"] }

[features]
default = ["jemalloc_replacing_malloc", "azure", "gcp", "aws"]
Expand Down
124 changes: 100 additions & 24 deletions influxdb3/tests/server/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -717,9 +717,9 @@ async fn api_v1_query_json_format() {
],
"name": "cpu",
"values": [
["1970-01-01T00:00:01", "a", 0.9],
["1970-01-01T00:00:02", "a", 0.89],
["1970-01-01T00:00:03", "a", 0.85]
["1970-01-01T00:00:01Z", "a", 0.9],
["1970-01-01T00:00:02Z", "a", 0.89],
["1970-01-01T00:00:03Z", "a", 0.85]
]
}
],
Expand All @@ -745,9 +745,9 @@ async fn api_v1_query_json_format() {
],
"name": "mem",
"values": [
["1970-01-01T00:00:04", "a", 0.5],
["1970-01-01T00:00:05", "a", 0.6],
["1970-01-01T00:00:06", "a", 0.7]
["1970-01-01T00:00:04Z", "a", 0.5],
["1970-01-01T00:00:05Z", "a", 0.6],
["1970-01-01T00:00:06Z", "a", 0.7]
]
},
{
Expand All @@ -758,9 +758,9 @@ async fn api_v1_query_json_format() {
],
"name": "cpu",
"values": [
["1970-01-01T00:00:01", "a", 0.9],
["1970-01-01T00:00:02", "a", 0.89],
["1970-01-01T00:00:03", "a", 0.85]
["1970-01-01T00:00:01Z", "a", 0.9],
["1970-01-01T00:00:02Z", "a", 0.89],
["1970-01-01T00:00:03Z", "a", 0.85]
]
}
],
Expand All @@ -786,9 +786,9 @@ async fn api_v1_query_json_format() {
],
"name": "cpu",
"values": [
["1970-01-01T00:00:01", "a", 0.9],
["1970-01-01T00:00:02", "a", 0.89],
["1970-01-01T00:00:03", "a", 0.85]
["1970-01-01T00:00:01Z", "a", 0.9],
["1970-01-01T00:00:02Z", "a", 0.89],
["1970-01-01T00:00:03Z", "a", 0.85]
]
}
],
Expand Down Expand Up @@ -879,32 +879,32 @@ async fn api_v1_query_csv_format() {
epoch: None,
query: "SELECT time, host, usage FROM cpu",
expected: "name,tags,time,host,usage\n\
cpu,,1970-01-01T00:00:01,a,0.9\n\
cpu,,1970-01-01T00:00:02,a,0.89\n\
cpu,,1970-01-01T00:00:03,a,0.85\n\r\n",
cpu,,1970-01-01T00:00:01Z,a,0.9\n\
cpu,,1970-01-01T00:00:02Z,a,0.89\n\
cpu,,1970-01-01T00:00:03Z,a,0.85\n\r\n",
},
// Basic Query with multiple measurements:
TestCase {
database: Some("foo"),
epoch: None,
query: "SELECT time, host, usage FROM cpu, mem",
expected: "name,tags,time,host,usage\n\
mem,,1970-01-01T00:00:04,a,0.5\n\
mem,,1970-01-01T00:00:05,a,0.6\n\
mem,,1970-01-01T00:00:06,a,0.7\n\
cpu,,1970-01-01T00:00:01,a,0.9\n\
cpu,,1970-01-01T00:00:02,a,0.89\n\
cpu,,1970-01-01T00:00:03,a,0.85\n\r\n",
mem,,1970-01-01T00:00:04Z,a,0.5\n\
mem,,1970-01-01T00:00:05Z,a,0.6\n\
mem,,1970-01-01T00:00:06Z,a,0.7\n\
cpu,,1970-01-01T00:00:01Z,a,0.9\n\
cpu,,1970-01-01T00:00:02Z,a,0.89\n\
cpu,,1970-01-01T00:00:03Z,a,0.85\n\r\n",
},
// Basic Query with db in query string:
TestCase {
database: None,
epoch: None,
query: "SELECT time, host, usage FROM foo.autogen.cpu",
expected: "name,tags,time,host,usage\n\
cpu,,1970-01-01T00:00:01,a,0.9\n\
cpu,,1970-01-01T00:00:02,a,0.89\n\
cpu,,1970-01-01T00:00:03,a,0.85\n\r\n",
cpu,,1970-01-01T00:00:01Z,a,0.9\n\
cpu,,1970-01-01T00:00:02Z,a,0.89\n\
cpu,,1970-01-01T00:00:03Z,a,0.85\n\r\n",
},
// Basic Query epoch parameter set:
TestCase {
Expand Down Expand Up @@ -1168,3 +1168,79 @@ async fn api_v1_query_chunked() {
assert_eq!(t.expected, values, "query failed: {q}", q = t.query);
}
}

#[tokio::test]
async fn api_v1_query_data_conversion() {
let server = TestServer::spawn().await;

server
.write_lp_to_db(
"foo",
"weather,location=us-midwest temperature_integer=82i 1465839830100400200\n\
weather,location=us-midwest temperature_float=82 1465839830100400200\n\
weather,location=us-midwest temperature_str=\"too warm\" 1465839830100400200\n\
weather,location=us-midwest too_hot=true 1465839830100400200",
Precision::Nanosecond,
)
.await
.unwrap();

struct TestCase<'a> {
database: Option<&'a str>,
epoch: Option<&'a str>,
query: &'a str,
expected: Value,
}

let test_cases = [
// Basic Query:
TestCase {
database: Some("foo"),
epoch: None,
query: "SELECT time, location, temperature_integer, temperature_float, temperature_str, too_hot FROM weather",
expected: json!({
"results": [
{
"series": [
{
"columns": [
"time",
"location",
"temperature_integer",
"temperature_float",
"temperature_str",
"too_hot"
],
"name": "weather",
"values": [
["2016-06-13T17:43:50.100400200Z", "us-midwest", 82, 82.0, "too warm", true],
]
}
],
"statement_id": 0
}
]
}),
},

];

for t in test_cases {
let mut params = vec![("q", t.query)];
if let Some(db) = t.database {
params.push(("db", db));
}
if let Some(epoch) = t.epoch {
params.push(("epoch", epoch));
}
let resp = server
.api_v1_query(&params, None)
.await
.json::<Value>()
.await
.unwrap();
println!("\n{q}", q = t.query);
println!("{resp:#}");
assert_eq!(t.expected, resp, "query failed: {q}", q = t.query);
}
}
1 change: 1 addition & 0 deletions influxdb3_server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ iox_query_influxql_rewrite = { path = "../iox_query_influxql_rewrite" }
# crates.io Dependencies
anyhow.workspace = true
arrow.workspace = true
arrow-array.workspace = true
arrow-csv.workspace = true
arrow-flight.workspace = true
arrow-json.workspace = true
Expand Down
Loading

0 comments on commit 0302d72

Please sign in to comment.