-
Hi guys, Previously I was using the following codes to convert Really Appreciated!! Regards, class date_add(Func): # put the presto func name here, e.g., date_add
arg_types = {'unit': True, 'duration': True, 'time': True}
class date_diff(Func):
arg_types = {'unit': True, 'start_time': True, 'end_time': True} class sqlTranslator:
'''
use this class to interpret and convert Presto SQL to Spark SQL
'''
param_dict = {'[[inc_start_date]]': "'{inc_start_date}'", '[[inc_end_date]]': "'{inc_end_date}'",'[[inc_start_datetime]]': "'{inc_start_date}'",'[[inc_end_datetime]]': "'{inc_end_date}'" }
def __init__(self, query):
for s,t in self.param_dict.items():
query = query.replace(s,t)
self.query = query
def get_structure(self):
'''
return the query structures
'''
tokens = Tokenizer().tokenize(self.query)
tree = Parser(functions={
**date_add.default_parser_mappings(),
**date_diff.default_parser_mappings(),}).parse(tokens)[0]
return tree
def get_spark_sql(self, pretty = True):
'''
return the spark query
'''
output = Generator(TRANSFORMS={
# spark format for date_add
date_add: lambda self, e: f"({self.sql(e, 'time')} + interval '{self.sql(e, 'duration')}' {literal_eval(self.sql(e, 'unit'))}) " if int(self.sql(e, 'duration')) > 0
else f"({self.sql(e, 'time')} - interval '{abs(int(self.sql(e, 'duration')))}' {literal_eval(self.sql(e, 'unit'))})" ,
# spark format for date_diff
date_diff: lambda self, e: f"floor((months_between({self.sql(e, 'end_time')},{self.sql(e, 'start_time')}))/12)" if literal_eval(self.sql(e, 'unit')) == 'year'
else f"floor((months_between({self.sql(e, 'end_time')},{self.sql(e, 'start_time')}))/3)" if literal_eval(self.sql(e, 'unit')) == 'quarter'
else f"floor(months_between({self.sql(e, 'end_time')},{self.sql(e, 'start_time')}))" if literal_eval(self.sql(e, 'unit')) == 'month'
else f"floor(datediff({self.sql(e, 'end_time')},{self.sql(e, 'start_time')})/7)" if literal_eval(self.sql(e, 'unit')) == 'week'
else f"datediff({self.sql(e, 'end_time')},{self.sql(e, 'start_time')})" if literal_eval(self.sql(e, 'unit')) == 'day'
else f"floor(((unix_timestamp({self.sql(e, 'end_time')}) - unix_timestamp({self.sql(e, 'start_time')}))/(3600)))" if literal_eval(self.sql(e, 'unit')) == 'hour'
else f"floor((unix_timestamp({self.sql(e, 'end_time')}) - unix_timestamp({self.sql(e, 'start_time')})))" if literal_eval(self.sql(e, 'unit')) == 'second'
else None
}).generate(self.get_structure())
spark_query = transpile(output,read = 'presto', write = 'spark', pretty = pretty)[0]
return spark_query |
Beta Was this translation helpful? Give feedback.
Replies: 1 comment 3 replies
-
you need to subclass now in sqlglot 4.0 look at dialects/ presto/hive for examples https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/presto.py |
Beta Was this translation helpful? Give feedback.
you need to subclass now in sqlglot 4.0 look at dialects/ presto/hive for examples
https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/presto.py