Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions sqlglot/dialects/athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,4 +285,5 @@ class _TrinoGenerator(Trino.Generator):
**Trino.Generator.TRANSFORMS,
exp.PartitionedByProperty: _partitioned_by_property_sql,
exp.LocationProperty: _location_property_sql,
exp.Localtime: lambda self, e: "LOCALTIME",
}
1 change: 1 addition & 0 deletions sqlglot/dialects/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,6 +497,7 @@ class Tokenizer(tokens.Tokenizer):
KEYWORDS.pop("DIV")
KEYWORDS.pop("VALUES")
KEYWORDS.pop("/*+")
KEYWORDS.pop("LOCALTIME")

class Parser(parser.Parser):
PREFIXED_PIVOT_COLUMNS = True
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/clickhouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,7 @@ class Tokenizer(tokens.Tokenizer):
"PREWHERE": TokenType.PREWHERE,
}
KEYWORDS.pop("/*+")
KEYWORDS.pop("LOCALTIME")

SINGLE_TOKENS = {
**tokens.Tokenizer.SINGLE_TOKENS,
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/duckdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,6 +878,7 @@ class Generator(generator.Generator):
exp.JSONFormat: _json_format_sql,
exp.JSONValueArray: _json_extract_value_array_sql,
exp.Lateral: explode_to_unnest_sql,
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LogicalOr: rename_func("BOOL_OR"),
exp.LogicalAnd: rename_func("BOOL_AND"),
exp.MakeInterval: lambda self, e: no_make_interval_sql(self, e, sep=" "),
Expand Down
2 changes: 2 additions & 0 deletions sqlglot/dialects/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,8 @@ class Tokenizer(tokens.Tokenizer):
"BD": "DECIMAL",
}

KEYWORDS.pop("LOCALTIME")

class Parser(parser.Parser):
LOG_DEFAULTS_TO_LN = True
STRICT_CAST = False
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,6 +775,7 @@ class Generator(generator.Generator):
exp.ILike: no_ilike_sql,
exp.JSONExtractScalar: arrow_json_extract_sql,
exp.Length: length_or_char_length_sql,
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LogicalOr: rename_func("MAX"),
exp.LogicalAnd: rename_func("MIN"),
exp.Max: max_or_greatest,
Expand Down
2 changes: 2 additions & 0 deletions sqlglot/dialects/oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ class Tokenizer(tokens.Tokenizer):
"VARCHAR2": TokenType.VARCHAR,
}

KEYWORDS.pop("LOCALTIME")

class Parser(parser.Parser):
WINDOW_BEFORE_PAREN_TOKENS = {TokenType.OVER, TokenType.KEEP}
VALUES_FOLLOWED_BY_PAREN = False
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -664,6 +664,7 @@ class Generator(generator.Generator):
exp.JSONPathRoot: lambda *_: "",
exp.JSONPathSubscript: lambda self, e: self.json_path_part(e.this),
exp.LastDay: no_last_day_sql,
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LogicalOr: rename_func("BOOL_OR"),
exp.LogicalAnd: rename_func("BOOL_AND"),
exp.Max: max_or_greatest,
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,7 @@ class Generator(generator.Generator):
exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost", "max_dist")(
rename_func("LEVENSHTEIN_DISTANCE")
),
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LogicalAnd: rename_func("BOOL_AND"),
exp.LogicalOr: rename_func("BOOL_OR"),
exp.Pivot: no_pivot_sql,
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ class Tokenizer(Postgres.Tokenizer):
"BINARY VARYING": TokenType.VARBINARY,
}
KEYWORDS.pop("VALUES")
KEYWORDS.pop("LOCALTIME")

# Redshift allows # to appear as a table identifier prefix
SINGLE_TOKENS = Postgres.Tokenizer.SINGLE_TOKENS.copy()
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -1385,6 +1385,7 @@ class Generator(generator.Generator):
exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
rename_func("EDITDISTANCE")
),
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LocationProperty: lambda self, e: f"LOCATION={self.sql(e, 'this')}",
exp.LogicalAnd: rename_func("BOOLAND_AGG"),
exp.LogicalOr: rename_func("BOOLOR_AGG"),
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/trino.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ class Generator(Presto.Generator):
e: f"REDUCE({self.sql(e, 'this')}, 0, (acc, x) -> acc + x, acc -> acc)",
exp.ArrayUniqueAgg: lambda self, e: f"ARRAY_AGG(DISTINCT {self.sql(e, 'this')})",
exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, on_overflow=True),
exp.Localtime: lambda self, e: "LOCALTIME",
exp.LocationProperty: lambda self, e: self.property_sql(e),
exp.Merge: merge_without_target_sql,
exp.Select: transforms.preprocess(
Expand Down
1 change: 1 addition & 0 deletions sqlglot/dialects/tsql.py
Original file line number Diff line number Diff line change
Expand Up @@ -577,6 +577,7 @@ class Tokenizer(tokens.Tokenizer):
"XML": TokenType.XML,
}
KEYWORDS.pop("/*+")
KEYWORDS.pop("LOCALTIME")

COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}

Expand Down
4 changes: 4 additions & 0 deletions sqlglot/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6289,6 +6289,10 @@ class CurrentUser(Func):
arg_types = {"this": False}


class Localtime(Func):
arg_types = {"this": False}


class UtcDate(Func):
arg_types = {}

Expand Down
1 change: 1 addition & 0 deletions sqlglot/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@ class Parser(metaclass=_Parser):
TokenType.CURRENT_TIME: exp.CurrentTime,
TokenType.CURRENT_TIMESTAMP: exp.CurrentTimestamp,
TokenType.CURRENT_USER: exp.CurrentUser,
TokenType.LOCALTIME: exp.Localtime,
}

STRUCT_TYPE_TOKENS = {
Expand Down
2 changes: 2 additions & 0 deletions sqlglot/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,7 @@ class TokenType(AutoName):
CURRENT_SCHEMA = auto()
CURRENT_TIME = auto()
CURRENT_TIMESTAMP = auto()
LOCALTIME = auto()
CURRENT_USER = auto()
DECLARE = auto()
DEFAULT = auto()
Expand Down Expand Up @@ -803,6 +804,7 @@ class Tokenizer(metaclass=_Tokenizer):
"LIMIT": TokenType.LIMIT,
"LOAD": TokenType.LOAD,
"LOCK": TokenType.LOCK,
"LOCALTIME": TokenType.LOCALTIME,
"MERGE": TokenType.MERGE,
"NAMESPACE": TokenType.NAMESPACE,
"NATURAL": TokenType.NATURAL,
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,3 +308,10 @@ def test_parse_partitioned_by_returns_iceberg_transforms(self):
assert isinstance(parsed.this, exp.Schema)
assert next(n for n in parsed.this.expressions if isinstance(n, exp.PartitionedByBucket))
assert next(n for n in parsed.this.expressions if isinstance(n, exp.PartitionByTruncate))

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)
7 changes: 7 additions & 0 deletions tests/dialects/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -3388,6 +3388,13 @@ def test_to_json_string(self):
},
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)

def test_concat(self):
self.validate_all(
"SELECT CONCAT('T.P.', ' ', 'Bar') AS author",
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_clickhouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -1522,6 +1522,13 @@ def test_array_offset(self):
],
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)

def test_to_start_of(self):
for unit in ("SECOND", "DAY", "YEAR"):
self.validate_all(
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_duckdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2038,6 +2038,13 @@ def test_install(self):
self.validate_identity("FORCE INSTALL httpfs FROM 'https://extensions.duckdb.org'")
self.validate_identity("FORCE CHECKPOINT db", check_command_warning=True)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)

def test_cte_using_key(self):
self.validate_identity(
"WITH RECURSIVE tbl(a, b) USING KEY (a) AS (SELECT a, b FROM (VALUES (1, 3), (2, 4)) AS t(a, b) UNION SELECT a + 1, b FROM tbl WHERE a < 3) SELECT * FROM tbl"
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -972,3 +972,10 @@ def test_joins_without_on(self):
"duckdb": f"SELECT * FROM t1 {join} JOIN t2 ON TRUE",
},
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)
7 changes: 7 additions & 0 deletions tests/dialects/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1512,6 +1512,13 @@ def test_mod(self):
self.validate_identity("x MOD y", "x % y").assert_is(exp.Mod)
self.validate_identity("MOD(x, y)", "x % y").assert_is(exp.Mod)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)

def test_valid_interval_units(self):
for unit in (
"SECOND_MICROSECOND",
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -800,6 +800,13 @@ def test_merge_builder_alias(self):
"MERGE INTO my_table USING (SELECT * FROM something) source_table ON my_table.id = source_table.id WHEN MATCHED THEN UPDATE SET my_table.col1 = source_table.col1 WHEN NOT MATCHED THEN INSERT (my_table.id, my_table.col1) VALUES (source_table.id, source_table.col1)",
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)

def test_pseudocolumns(self):
ast = self.validate_identity(
"WITH t AS (SELECT 1 AS COL) SELECT col, ROWID FROM t WHERE ROWNUM = 1"
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -1655,6 +1655,13 @@ def test_begin_transaction(self):
f"BEGIN {keyword} {level}, {level}", f"BEGIN {level}, {level}"
).assert_is(exp.Transaction)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)

def test_interval_span(self):
for time_str in ["1 01:", "1 01:00", "1.5 01:", "-0.25 01:"]:
with self.subTest(f"Postgres INTERVAL span, omitted DAY TO MINUTE unit: {time_str}"):
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -1368,6 +1368,13 @@ def test_bit_aggs(self):
},
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)

def test_initcap(self):
self.validate_all(
"INITCAP(col)",
Expand Down
7 changes: 7 additions & 0 deletions tests/dialects/test_redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,3 +724,10 @@ def test_fetch_to_limit(self):
"postgres": "SELECT * FROM t FETCH FIRST 1 ROWS ONLY",
},
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)
7 changes: 7 additions & 0 deletions tests/dialects/test_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -3591,6 +3591,13 @@ def test_model_attribute(self):
"SELECT * FROM TABLE(model_trained_with_labeled_data!DETECT_ANOMALIES(INPUT_DATA => TABLE(view_with_data_to_analyze), TIMESTAMP_COLNAME => 'date', TARGET_COLNAME => 'sales', CONFIG_OBJECT => OBJECT_CONSTRUCT('prediction_interval', 0.99)))"
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)

def test_set_item_kind_attribute(self):
expr = parse_one("ALTER SESSION SET autocommit = FALSE", read="snowflake")
set_item = expr.find(exp.SetItem)
Expand Down
8 changes: 8 additions & 0 deletions tests/dialects/test_trino.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from sqlglot import exp
from tests.dialects.test_dialect import Validator


Expand Down Expand Up @@ -159,3 +160,10 @@ def test_json_value(self):
self.validate_identity(
f"""SELECT JSON_VALUE({json_doc}, 'lax $.price' RETURNING DECIMAL(4, 2) {on_option} ON EMPTY {on_option} ON ERROR) AS price"""
)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Localtime)
7 changes: 7 additions & 0 deletions tests/dialects/test_tsql.py
Original file line number Diff line number Diff line change
Expand Up @@ -2393,6 +2393,13 @@ def test_collation_parse(self):
exp.Alter
).args.get("actions")[0].args.get("collate").this.assert_is(exp.Var)

def test_localtime(self):
expr = self.validate_identity(
"SELECT LOCALTIME",
write_sql="SELECT LOCALTIME",
)
expr.expressions[0].assert_is(exp.Column)

def test_odbc_date_literals(self):
for value, cls in [
("{d'2024-01-01'}", exp.Date),
Expand Down
Loading