Skip to content

Commit e597ab3

Browse files
committed
multi stage and running total pass tests
1 parent a143666 commit e597ab3

File tree

27 files changed

+711
-160
lines changed

27 files changed

+711
-160
lines changed

packages/cubejs-backend-shared/src/time.ts

-1
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,6 @@ export const timeSeries = (granularity: string, dateRange: QueryDateRange, optio
179179
// moment.range works with strings
180180
const range = moment.range(<any>dateRange[0], <any>dateRange[1]);
181181

182-
console.log("!!!! timeSeries:", TIME_SERIES[granularity](range, options.timestampPrecision));
183182
return TIME_SERIES[granularity](range, options.timestampPrecision);
184183
};
185184

packages/cubejs-schema-compiler/src/adapter/BaseQuery.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -734,7 +734,6 @@ export class BaseQuery {
734734
offset = offset || 'end';
735735
return this.timeDimensions.map(
736736
d => [d, (dateFrom, dateTo, dateField, dimensionDateFrom, dimensionDateTo, isFromStartToEnd) => {
737-
console.log("!!!!!! IsFromStartToEnd: ", isFromStartToEnd, " --");
738737
// dateFrom based window
739738
const conditions = [];
740739
if (trailingInterval !== 'unbounded') {
@@ -1420,7 +1419,6 @@ export class BaseQuery {
14201419
)
14211420
).join(' AND ');
14221421

1423-
console.log("!!! date join contdition sql: ", dateJoinConditionSql);
14241422

14251423
return this.overTimeSeriesSelect(
14261424
cumulativeMeasures,
@@ -3292,6 +3290,8 @@ export class BaseQuery {
32923290
cube: 'CUBE({{ exprs_concat }})',
32933291
negative: '-({{ expr }})',
32943292
not: 'NOT ({{ expr }})',
3293+
add_interval: '{{ date }} + interval \'{{ interval }}\'',
3294+
sub_interval: '{{ date }} - interval \'{{ interval }}\'',
32953295
true: 'TRUE',
32963296
false: 'FALSE',
32973297
like: '{{ expr }} {% if negated %}NOT {% endif %}LIKE {{ pattern }}',

packages/cubejs-schema-compiler/test/integration/postgres/sql-generation.test.ts

+6-6
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ describe('SQL Generation', () => {
6464
offset: 'start'
6565
}
6666
},
67-
revenueRolling3day: {
67+
revenueRollingThreeDay: {
6868
type: 'sum',
6969
sql: 'amount',
7070
rollingWindow: {
@@ -666,7 +666,7 @@ describe('SQL Generation', () => {
666666
console.log(query.buildSqlAndParams());
667667

668668
// TODO ordering doesn't work for running total
669-
return dbRunner.testQuery(query.buildSqlAndParams()).then(res => {
669+
return dbRunner.testQuery(query.buildSqlAndParamsTest()).then(res => {
670670
console.log(JSON.stringify(res));
671671
expect(res).toEqual(
672672
[{
@@ -704,7 +704,7 @@ describe('SQL Generation', () => {
704704
});
705705
});
706706

707-
it('rolling 1', async () => runQueryTest({
707+
it('rolling', async () => runQueryTest({
708708
measures: [
709709
'visitors.revenueRolling'
710710
],
@@ -730,7 +730,7 @@ describe('SQL Generation', () => {
730730
{ visitors__created_at_day: '2017-01-10T00:00:00.000Z', visitors__revenue_rolling: null }
731731
]));
732732

733-
it('rolling multiplied 1', async () => runQueryTest({
733+
it('rolling multiplied', async () => runQueryTest({
734734
measures: [
735735
'visitors.revenueRolling',
736736
'visitor_checkins.visitor_checkins_count'
@@ -769,7 +769,7 @@ describe('SQL Generation', () => {
769769

770770
it('rolling month', async () => runQueryTest({
771771
measures: [
772-
'visitors.revenueRolling3day'
772+
'visitors.revenueRollingThreeDay'
773773
],
774774
timeDimensions: [{
775775
dimension: 'visitors.created_at',
@@ -781,7 +781,7 @@ describe('SQL Generation', () => {
781781
}],
782782
timezone: 'America/Los_Angeles'
783783
}, [
784-
{ visitors__created_at_week: '2017-01-09T00:00:00.000Z', visitors__revenue_rolling3day: '900' }
784+
{ visitors__created_at_week: '2017-01-09T00:00:00.000Z', visitors__revenue_rolling_three_day: '900' }
785785
]));
786786

787787
it('rolling count', async () => runQueryTest({

rust/cubesqlplanner/cubesqlplanner/src/plan/filter.rs

+8-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use cubenativeutils::CubeError;
66
use std::fmt;
77
use std::rc::Rc;
88

9-
#[derive(Clone)]
9+
#[derive(Clone, PartialEq)]
1010
pub enum FilterGroupOperator {
1111
Or,
1212
And,
@@ -18,13 +18,19 @@ pub struct FilterGroup {
1818
pub items: Vec<FilterItem>,
1919
}
2020

21+
impl PartialEq for FilterGroup {
22+
fn eq(&self, other: &Self) -> bool {
23+
self.operator == other.operator && self.items == other.items
24+
}
25+
}
26+
2127
impl FilterGroup {
2228
pub fn new(operator: FilterGroupOperator, items: Vec<FilterItem>) -> Self {
2329
Self { operator, items }
2430
}
2531
}
2632

27-
#[derive(Clone)]
33+
#[derive(Clone, PartialEq)]
2834
pub enum FilterItem {
2935
Group(Rc<FilterGroup>),
3036
Item(Rc<BaseFilter>),

rust/cubesqlplanner/cubesqlplanner/src/plan/join.rs

+32-61
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use super::{time_series, Schema, SingleAliasedSource};
22
use crate::planner::sql_templates::PlanSqlTemplates;
33
use crate::planner::{BaseJoinCondition, BaseMember, VisitorContext};
44
use cubenativeutils::CubeError;
5+
use lazy_static::lazy_static;
56

67
use std::rc::Rc;
78

@@ -33,85 +34,55 @@ impl RollingWindowJoinCondition {
3334
}
3435
}
3536

36-
/*
37-
*
38-
offset = offset || 'end';
39-
return this.timeDimensions.map(
40-
d => [d, (dateFrom, dateTo, dateField, dimensionDateFrom, dimensionDateTo, isFromStartToEnd) => {
41-
// dateFrom based window
42-
const conditions = [];
43-
if (trailingInterval !== 'unbounded') {
44-
const startDate = isFromStartToEnd || offset === 'start' ? dateFrom : dateTo;
45-
const trailingStart = trailingInterval ? this.subtractInterval(startDate, trailingInterval) : startDate;
46-
const sign = offset === 'start' ? '>=' : '>';
47-
conditions.push(`${dateField} ${sign} ${trailingStart}`);
48-
}
49-
if (leadingInterval !== 'unbounded') {
50-
const endDate = isFromStartToEnd || offset === 'end' ? dateTo : dateFrom;
51-
const leadingEnd = leadingInterval ? this.addInterval(endDate, leadingInterval) : endDate;
52-
const sign = offset === 'end' ? '<=' : '<';
53-
conditions.push(`${dateField} ${sign} ${leadingEnd}`);
54-
}
55-
return conditions.length ? conditions.join(' AND ') : '1 = 1';
56-
}]
57-
);
58-
*/
5937
pub fn to_sql(
6038
&self,
6139
templates: &PlanSqlTemplates,
6240
context: Rc<VisitorContext>,
6341
schema: Rc<Schema>,
6442
) -> Result<String, CubeError> {
6543
let mut conditions = vec![];
66-
/* let date_column_alias = if let Some(column) = schema.find_column_for_member(&self.time_dimension.full_name(), &None) {
67-
templates.column_reference(&source, &column.alias.clone())
68-
} else {
69-
dimension.to_sql(context.clone(), schema.clone())
70-
} */
7144
let date_column_alias =
7245
self.resolve_time_column_alias(templates, context.clone(), schema.clone())?;
73-
if let Some(trailing_interval) = &self.trailing_interval {
74-
if trailing_interval != "unbounded" {
75-
let start_date = if self.offset == "start" {
76-
templates
77-
.column_reference(&Some(self.time_series_source.clone()), "date_from")?
78-
} else {
79-
templates.column_reference(&Some(self.time_series_source.clone()), "date_to")?
80-
};
8146

82-
let trailing_start = if let Some(trailing_interval) = &self.trailing_interval {
83-
format!("{start_date} - interval '{trailing_interval}'")
84-
} else {
85-
start_date
86-
};
47+
lazy_static! {
48+
static ref UNBOUNDED: Option<String> = Some("unbounded".to_string());
49+
}
8750

88-
let sign = if self.offset == "start" { ">=" } else { ">" };
51+
if self.trailing_interval != *UNBOUNDED {
52+
let start_date = if self.offset == "start" {
53+
templates.column_reference(&Some(self.time_series_source.clone()), "date_from")?
54+
} else {
55+
templates.column_reference(&Some(self.time_series_source.clone()), "date_to")?
56+
};
8957

90-
conditions.push(format!("{date_column_alias} {sign} {trailing_start}"));
91-
}
58+
let trailing_start = if let Some(trailing_interval) = &self.trailing_interval {
59+
format!("{start_date} - interval '{trailing_interval}'")
60+
} else {
61+
start_date
62+
};
63+
64+
let sign = if self.offset == "start" { ">=" } else { ">" };
65+
66+
conditions.push(format!("{date_column_alias} {sign} {trailing_start}"));
9267
}
9368

94-
if let Some(leading_interval) = &self.trailing_interval {
95-
if leading_interval != "unbounded" {
96-
let end_date = if self.offset == "end" {
97-
templates.column_reference(&Some(self.time_series_source.clone()), "date_to")?
98-
} else {
99-
templates
100-
.column_reference(&Some(self.time_series_source.clone()), "date_from")?
101-
};
69+
if self.leading_interval != *UNBOUNDED {
70+
let end_date = if self.offset == "end" {
71+
templates.column_reference(&Some(self.time_series_source.clone()), "date_to")?
72+
} else {
73+
templates.column_reference(&Some(self.time_series_source.clone()), "date_from")?
74+
};
10275

103-
let leading_end = if let Some(leading_interval) = &self.leading_interval {
104-
format!("{end_date} + interval '{leading_interval}'")
105-
} else {
106-
end_date
107-
};
76+
let leading_end = if let Some(leading_interval) = &self.leading_interval {
77+
format!("{end_date} + interval '{leading_interval}'")
78+
} else {
79+
end_date
80+
};
10881

109-
let sign = if self.offset == "end" { "<=" } else { "<" };
82+
let sign = if self.offset == "end" { "<=" } else { "<" };
11083

111-
conditions.push(format!("{date_column_alias} {sign} {leading_end}"));
112-
}
84+
conditions.push(format!("{date_column_alias} {sign} {leading_end}"));
11385
}
114-
11586
let result = if conditions.is_empty() {
11687
templates.always_true()?
11788
} else {

rust/cubesqlplanner/cubesqlplanner/src/plan/time_series.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,12 @@ pub struct TimeSeries {
1212

1313
impl TimeSeries {
1414
pub fn make_schema(&self, self_alias: Option<String>) -> Schema {
15-
/* let column = SchemaColumn::new(
15+
let column = SchemaColumn::new(
1616
self_alias,
17-
format!("from_date"),
17+
format!("date_from"),
1818
self.time_dimension_name.clone(),
1919
);
20-
Schema::new(vec![column], vec![]) */
21-
Schema::empty()
20+
Schema::new(vec![column], vec![])
2221
}
2322

2423
pub fn to_sql(&self, templates: &PlanSqlTemplates) -> Result<String, CubeError> {

rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs

+8
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,14 @@ impl BaseMeasure {
205205
self.rolling_window().is_some()
206206
}
207207

208+
pub fn is_running_total(&self) -> bool {
209+
self.measure_type() == "runningTotal"
210+
}
211+
212+
pub fn is_cumulative(&self) -> bool {
213+
self.is_rolling_window() || self.is_running_total()
214+
}
215+
208216
//FIXME dublicate with symbol
209217
pub fn measure_type(&self) -> &String {
210218
&self.definition.static_data().measure_type

rust/cubesqlplanner/cubesqlplanner/src/planner/base_time_dimension.rs

+18-6
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ pub struct BaseTimeDimension {
1111
query_tools: Rc<QueryTools>,
1212
granularity: Option<String>,
1313
date_range: Option<Vec<String>>,
14+
alias_suffix: String,
1415
}
1516

1617
impl BaseMember for BaseTimeDimension {
@@ -55,12 +56,7 @@ impl BaseMember for BaseTimeDimension {
5556
}
5657

5758
fn alias_suffix(&self) -> Option<String> {
58-
let granularity = if let Some(granularity) = &self.granularity {
59-
granularity
60-
} else {
61-
"day"
62-
};
63-
Some(granularity.to_string())
59+
Some(self.alias_suffix.clone())
6460
}
6561
}
6662

@@ -71,14 +67,30 @@ impl BaseTimeDimension {
7167
granularity: Option<String>,
7268
date_range: Option<Vec<String>>,
7369
) -> Result<Rc<Self>, CubeError> {
70+
let alias_suffix = if let Some(granularity) = &granularity {
71+
granularity.clone()
72+
} else {
73+
"day".to_string()
74+
};
7475
Ok(Rc::new(Self {
7576
dimension: BaseDimension::try_new_required(member_evaluator, query_tools.clone())?,
7677
query_tools,
7778
granularity,
7879
date_range,
80+
alias_suffix,
7981
}))
8082
}
8183

84+
pub fn change_granularity(&self, new_granularity: Option<String>) -> Rc<Self> {
85+
Rc::new(Self {
86+
dimension: self.dimension.clone(),
87+
query_tools: self.query_tools.clone(),
88+
granularity: new_granularity,
89+
date_range: self.date_range.clone(),
90+
alias_suffix: self.alias_suffix.clone(),
91+
})
92+
}
93+
8294
pub fn get_granularity(&self) -> Option<String> {
8395
self.granularity.clone()
8496
}

0 commit comments

Comments
 (0)