-
Notifications
You must be signed in to change notification settings - Fork 2k
Expand file tree
/
Copy pathDatabricksQuery.ts
More file actions
219 lines (186 loc) · 8.86 KB
/
DatabricksQuery.ts
File metadata and controls
219 lines (186 loc) · 8.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
import R from 'ramda';
import { BaseFilter, BaseQuery } from '@cubejs-backend/schema-compiler';
import { parseSqlInterval } from '@cubejs-backend/shared';
const GRANULARITY_TO_INTERVAL: Record<string, string> = {
day: 'day',
week: 'week',
hour: 'hour',
minute: 'minute',
second: 'second',
month: 'month',
quarter: 'quarter',
year: 'year'
};
class DatabricksFilter extends BaseFilter {
public likeIgnoreCase(column: any, not: any, param: any, type: string) {
const p = (!type || type === 'contains' || type === 'ends') ? '%' : '';
const s = (!type || type === 'contains' || type === 'starts') ? '%' : '';
return `LOWER(${column})${not ? ' NOT' : ''} LIKE CONCAT('${p}', LOWER(${this.allocateParam(param)}), '${s}')`;
}
}
export class DatabricksQuery extends BaseQuery {
public newFilter(filter: any): BaseFilter {
return new DatabricksFilter(this, filter);
}
public castToString(sql: string): string {
return `CAST(${sql} as STRING)`;
}
public hllInit(sql: string) {
return `hll_sketch_agg(${sql})`;
}
public hllMerge(sql: string) {
return `hll_union_agg(${sql})`;
}
public hllCardinality(sql: string): string {
return `hll_sketch_estimate(${sql})`;
}
public hllCardinalityMerge(sql: string): string {
return `hll_sketch_estimate(hll_union_agg(${sql}))`;
}
public countDistinctApprox(sql: string) {
return `approx_count_distinct(${sql})`;
}
public convertTz(field: string) {
return `from_utc_timestamp(${field}, '${this.timezone}')`;
}
public timeStampCast(value: string) {
return `from_utc_timestamp(replace(replace(${value}, 'T', ' '), 'Z', ''), 'UTC')`;
}
public dateTimeCast(value: string) {
return `from_utc_timestamp(${value}, 'UTC')`; // TODO
}
public subtractInterval(date: string, interval: string): string {
const intervalParsed = parseSqlInterval(interval);
let res = date;
for (const [key, value] of Object.entries(intervalParsed)) {
res = `(${res} - INTERVAL '${value}' ${key})`;
}
return res;
}
public addInterval(date: string, interval: string): string {
const intervalParsed = parseSqlInterval(interval);
let res = date;
for (const [key, value] of Object.entries(intervalParsed)) {
res = `(${res} + INTERVAL '${value}' ${key})`;
}
return res;
}
public timeGroupedColumn(granularity: string, dimension: string): string {
return `date_trunc('${GRANULARITY_TO_INTERVAL[granularity]}', ${dimension})`;
}
/**
* Returns sql for source expression floored to timestamps aligned with
* intervals relative to origin timestamp point.
*/
public dateBin(interval: string, source: string, origin: string): string {
const [intervalFormatted, timeUnit] = this.formatInterval(interval);
const beginOfTime = this.dateTimeCast('\'1970-01-01T00:00:00\'');
return `${this.dateTimeCast(`'${origin}'`)} + INTERVAL ${intervalFormatted} *
floor(
date_diff(${timeUnit}, ${this.dateTimeCast(`'${origin}'`)}, ${source}) /
date_diff(${timeUnit}, ${beginOfTime}, ${beginOfTime} + INTERVAL ${intervalFormatted})
)`;
}
/**
* The input interval with (possible) plural units, like "2 years", "3 months", "4 weeks", "5 days"...
* will be converted to Databricks dialect.
* @see https://docs.databricks.com/en/sql/language-manual/data-types/interval-type.html
* It returns a tuple of (formatted interval, timeUnit to use in datediff functions)
*/
private formatInterval(interval: string): [string, string] {
const intervalParsed = parseSqlInterval(interval);
const intKeys = Object.keys(intervalParsed).length;
if (intervalParsed.year && intKeys === 1) {
return [`'${intervalParsed.year}' YEAR`, 'YEAR'];
} else if (intervalParsed.year && intervalParsed.month && intKeys === 2) {
return [`'${intervalParsed.year}-${intervalParsed.month}' YEAR TO MONTH`, 'MONTH'];
} else if (intervalParsed.month && intKeys === 1) {
return [`'${intervalParsed.month}' MONTH`, 'MONTH'];
} else if (intervalParsed.day && intKeys === 1) {
return [`'${intervalParsed.day}' DAY`, 'DAY'];
} else if (intervalParsed.day && intervalParsed.hour && intKeys === 2) {
return [`'${intervalParsed.day} ${intervalParsed.hour}' DAY TO HOUR`, 'HOUR'];
} else if (intervalParsed.day && intervalParsed.hour && intervalParsed.minute && intKeys === 3) {
return [`'${intervalParsed.day} ${intervalParsed.hour}:${intervalParsed.minute}' DAY TO MINUTE`, 'MINUTE'];
} else if (intervalParsed.day && intervalParsed.hour && intervalParsed.minute && intervalParsed.second && intKeys === 4) {
return [`'${intervalParsed.day} ${intervalParsed.hour}:${intervalParsed.minute}:${intervalParsed.second}' DAY TO SECOND`, 'SECOND'];
} else if (intervalParsed.hour && intervalParsed.minute && intKeys === 2) {
return [`'${intervalParsed.hour}:${intervalParsed.minute}' HOUR TO MINUTE`, 'MINUTE'];
} else if (intervalParsed.hour && intervalParsed.minute && intervalParsed.second && intKeys === 3) {
return [`'${intervalParsed.hour}:${intervalParsed.minute}:${intervalParsed.second}' HOUR TO SECOND`, 'SECOND'];
} else if (intervalParsed.minute && intervalParsed.second && intKeys === 2) {
return [`'${intervalParsed.minute}:${intervalParsed.second}' MINUTE TO SECOND`, 'SECOND'];
}
// No need to support microseconds.
throw new Error(`Cannot transform interval expression "${interval}" to Databricks dialect`);
}
public escapeColumnName(name: string) {
return `\`${name}\``;
}
public override getFieldIndex(id: string): string | number | null {
const idx = super.getFieldIndex(id);
if (idx !== null) {
return idx;
}
return this.escapeColumnName(this.aliasName(id, false));
}
public unixTimestampSql() {
return 'unix_timestamp()';
}
public defaultRefreshKeyRenewalThreshold() {
return 120;
}
public supportGeneratedSeriesForCustomTd() {
return true;
}
public sqlTemplates() {
const templates = super.sqlTemplates();
templates.functions.CURRENTDATE = 'CURRENT_DATE';
templates.functions.DATETRUNC = 'DATE_TRUNC({{ args_concat }})';
templates.functions.DATEPART = 'DATE_PART({{ args_concat }})';
templates.functions.BTRIM = 'TRIM({% if args[1] is defined %}{{ args[1] }} FROM {% endif %}{{ args[0] }})';
templates.functions.LTRIM = 'LTRIM({{ args|reverse|join(", ") }})';
templates.functions.RTRIM = 'RTRIM({{ args|reverse|join(", ") }})';
templates.functions.DATEDIFF = 'DATEDIFF({{ date_part }}, DATE_TRUNC(\'{{ date_part }}\', {{ args[1] }}), DATE_TRUNC(\'{{ date_part }}\', {{ args[2] }}))';
templates.functions.LEAST = 'LEAST({{ args_concat }})';
templates.functions.GREATEST = 'GREATEST({{ args_concat }})';
templates.functions.TRUNC = 'CASE WHEN ({{ args[0] }}) >= 0 THEN FLOOR({{ args_concat }}) ELSE CEIL({{ args_concat }}) END';
templates.expressions.timestamp_literal = 'from_utc_timestamp(\'{{ value }}\', \'UTC\')';
templates.expressions.extract = '{% if date_part|lower == "epoch" %}unix_timestamp({{ expr }}){% elif date_part|lower == "isodow" %}EXTRACT(DAYOFWEEK_ISO FROM {{ expr }}){% else %}EXTRACT({{ date_part }} FROM {{ expr }}){% endif %}';
templates.expressions.interval_single_date_part = 'INTERVAL \'{{ num }}\' {{ date_part }}';
templates.quotes.identifiers = '`';
templates.quotes.escape = '``';
templates.statements.time_series_select = 'SELECT date_from::timestamp AS `date_from`,\n' +
'date_to::timestamp AS `date_to` \n' +
'FROM(\n' +
' VALUES ' +
'{% for time_item in seria %}' +
'(\'{{ time_item | join(\'\\\', \\\'\') }}\')' +
'{% if not loop.last %}, {% endif %}' +
'{% endfor %}' +
') AS dates (date_from, date_to)';
templates.statements.generated_time_series_select = 'SELECT d AS date_from,\n' +
'(d + INTERVAL {{ granularity }}) - INTERVAL 1 MILLISECOND AS date_to\n' +
' FROM (SELECT explode(sequence(\n' +
' from_utc_timestamp({{ start }}, \'UTC\'), from_utc_timestamp({{ end }}, \'UTC\'), INTERVAL {{ granularity }}\n' +
' )) AS d)';
templates.statements.generated_time_series_with_cte_range_source =
'SELECT d AS date_from,\n' +
'(d + INTERVAL {{ granularity }}) - INTERVAL 1 MILLISECOND AS date_to\n' +
'FROM {{ range_source }}\n' +
'LATERAL VIEW explode(\n' +
' sequence(\n' +
' CAST({{ min_name }} AS TIMESTAMP),\n' +
' CAST({{ max_name }} AS TIMESTAMP),\n' +
' INTERVAL {{ granularity }}\n' +
' )\n' +
') dates AS d';
// TODO: Databricks has `TIMESTAMP_NTZ` with logic similar to Pg's `TIMESTAMP`
// but that requires Runtime 13.3+. Should this be enabled?
// templates.types.timestamp = 'TIMESTAMP_NTZ';
delete templates.types.time;
// Databricks intervals have a YearMonth or DayTime type variants, but no universal type
delete templates.types.interval;
return templates;
}
}