diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out index 4db56d6c70561..53595d1b8a3eb 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out @@ -212,7 +212,7 @@ select size(timestamp_array) from primitive_arrays -- !query analysis -Project [size(boolean_array#x, true) AS size(boolean_array)#x, size(tinyint_array#x, true) AS size(tinyint_array)#x, size(smallint_array#x, true) AS size(smallint_array)#x, size(int_array#x, true) AS size(int_array)#x, size(bigint_array#x, true) AS size(bigint_array)#x, size(decimal_array#x, true) AS size(decimal_array)#x, size(double_array#x, true) AS size(double_array)#x, size(float_array#x, true) AS size(float_array)#x, size(date_array#x, true) AS size(date_array)#x, size(timestamp_array#x, true) AS size(timestamp_array)#x] +Project [size(boolean_array#x, false) AS size(boolean_array)#x, size(tinyint_array#x, false) AS size(tinyint_array)#x, size(smallint_array#x, false) AS size(smallint_array)#x, size(int_array#x, false) AS size(int_array)#x, size(bigint_array#x, false) AS size(bigint_array)#x, size(decimal_array#x, false) AS size(decimal_array)#x, size(double_array#x, false) AS size(double_array)#x, size(float_array#x, false) AS size(float_array)#x, size(date_array#x, false) AS size(date_array)#x, size(timestamp_array#x, false) AS size(timestamp_array)#x] +- SubqueryAlias primitive_arrays +- View (`primitive_arrays`, [boolean_array#x, tinyint_array#x, smallint_array#x, int_array#x, bigint_array#x, decimal_array#x, double_array#x, float_array#x, date_array#x, timestamp_array#x]) +- Project [cast(boolean_array#x as array) AS boolean_array#x, cast(tinyint_array#x as array) AS tinyint_array#x, cast(smallint_array#x as array) AS smallint_array#x, cast(int_array#x as array) AS int_array#x, cast(bigint_array#x as array) AS bigint_array#x, cast(decimal_array#x as array) AS decimal_array#x, cast(double_array#x as array) AS double_array#x, cast(float_array#x as array) AS float_array#x, cast(date_array#x as array) AS date_array#x, cast(timestamp_array#x as array) AS timestamp_array#x] @@ -224,70 +224,70 @@ Project [size(boolean_array#x, true) AS size(boolean_array)#x, size(tinyint_arra -- !query select element_at(array(1, 2, 3), 5) -- !query analysis -Project [element_at(array(1, 2, 3), 5, None, false) AS element_at(array(1, 2, 3), 5)#x] +Project [element_at(array(1, 2, 3), 5, None, true) AS element_at(array(1, 2, 3), 5)#x] +- OneRowRelation -- !query select element_at(array(1, 2, 3), -5) -- !query analysis -Project [element_at(array(1, 2, 3), -5, None, false) AS element_at(array(1, 2, 3), -5)#x] +Project [element_at(array(1, 2, 3), -5, None, true) AS element_at(array(1, 2, 3), -5)#x] +- OneRowRelation -- !query select element_at(array(1, 2, 3), 0) -- !query analysis -Project [element_at(array(1, 2, 3), 0, None, false) AS element_at(array(1, 2, 3), 0)#x] +Project [element_at(array(1, 2, 3), 0, None, true) AS element_at(array(1, 2, 3), 0)#x] +- OneRowRelation -- !query select elt(4, '123', '456') -- !query analysis -Project [elt(4, 123, 456, false) AS elt(4, 123, 456)#x] +Project [elt(4, 123, 456, true) AS elt(4, 123, 456)#x] +- OneRowRelation -- !query select elt(0, '123', '456') -- !query analysis -Project [elt(0, 123, 456, false) AS elt(0, 123, 456)#x] +Project [elt(0, 123, 456, true) AS elt(0, 123, 456)#x] +- OneRowRelation -- !query select elt(-1, '123', '456') -- !query analysis -Project [elt(-1, 123, 456, false) AS elt(-1, 123, 456)#x] +Project [elt(-1, 123, 456, true) AS elt(-1, 123, 456)#x] +- OneRowRelation -- !query select elt(null, '123', '456') -- !query analysis -Project [elt(cast(null as int), 123, 456, false) AS elt(NULL, 123, 456)#x] +Project [elt(cast(null as int), 123, 456, true) AS elt(NULL, 123, 456)#x] +- OneRowRelation -- !query select elt(null, '123', null) -- !query analysis -Project [elt(cast(null as int), 123, cast(null as string), false) AS elt(NULL, 123, NULL)#x] +Project [elt(cast(null as int), 123, cast(null as string), true) AS elt(NULL, 123, NULL)#x] +- OneRowRelation -- !query select elt(1, '123', null) -- !query analysis -Project [elt(1, 123, cast(null as string), false) AS elt(1, 123, NULL)#x] +Project [elt(1, 123, cast(null as string), true) AS elt(1, 123, NULL)#x] +- OneRowRelation -- !query select elt(2, '123', null) -- !query analysis -Project [elt(2, 123, cast(null as string), false) AS elt(2, 123, NULL)#x] +Project [elt(2, 123, cast(null as string), true) AS elt(2, 123, NULL)#x] +- OneRowRelation @@ -360,21 +360,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2), true) AS size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2), false) AS size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +- OneRowRelation -- !query select size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2, 3), true) AS size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2, 3), false) AS size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +- OneRowRelation -- !query select size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10), 0, 1, 2, 3), true) AS size(arrays_zip(array(1, 2, 3), array(4), NULL, array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10), 0, 1, 2, 3), false) AS size(arrays_zip(array(1, 2, 3), array(4), NULL, array(7, 8, 9, 10)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/cast.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/cast.sql.out index e0687b564d3d1..643dfd3771ffe 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/cast.sql.out @@ -205,57 +205,193 @@ Project [hex(cast(abc as binary)) AS hex(CAST(abc AS BINARY))#x] -- !query SELECT HEX(CAST(CAST(123 AS byte) AS binary)) -- !query analysis -Project [hex(cast(cast(123 as tinyint) as binary)) AS hex(CAST(CAST(123 AS TINYINT) AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 44, + "fragment" : "CAST(CAST(123 AS byte) AS binary)" + } ] +} -- !query SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) -- !query analysis -Project [hex(cast(cast(-123 as tinyint) as binary)) AS hex(CAST(CAST(-123 AS TINYINT) AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(-123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 45, + "fragment" : "CAST(CAST(-123 AS byte) AS binary)" + } ] +} -- !query SELECT HEX(CAST(123S AS binary)) -- !query analysis -Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"SMALLINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(123S AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123S AS binary)) -- !query analysis -Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"SMALLINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 32, + "fragment" : "CAST(-123S AS binary)" + } ] +} -- !query SELECT HEX(CAST(123 AS binary)) -- !query analysis -Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 30, + "fragment" : "CAST(123 AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123 AS binary)) -- !query analysis -Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(-123 AS binary)" + } ] +} -- !query SELECT HEX(CAST(123L AS binary)) -- !query analysis -Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"BIGINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(123L AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123L AS binary)) -- !query analysis -Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"BIGINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 32, + "fragment" : "CAST(-123L AS binary)" + } ] +} -- !query @@ -804,8 +940,25 @@ Project [hex(cast(abc as binary)) AS hex(CAST(abc AS BINARY))#x] -- !query SELECT HEX((123 :: byte) :: binary) -- !query analysis -Project [hex(cast(cast(123 as tinyint) as binary)) AS hex(CAST(CAST(123 AS TINYINT) AS BINARY))#x] -+- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 34, + "fragment" : "(123 :: byte) :: binary" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/collations.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/collations.sql.out index c8e28c2cfafc9..45ab1cdcff79e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/collations.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/collations.sql.out @@ -884,7 +884,7 @@ Project [concat_ws(,, cast(utf8_lcase#x as string), collate(word, utf8_binary)) -- !query select elt(2, s, utf8_binary) from t5 -- !query analysis -Project [elt(2, s#x, utf8_binary#x, false) AS elt(2, s, utf8_binary)#x] +Project [elt(2, s#x, utf8_binary#x, true) AS elt(2, s, utf8_binary)#x] +- SubqueryAlias spark_catalog.default.t5 +- Relation spark_catalog.default.t5[s#x,utf8_binary#x,utf8_lcase#x] parquet @@ -918,7 +918,7 @@ org.apache.spark.sql.AnalysisException -- !query select elt(1, utf8_binary collate utf8_binary, utf8_lcase collate utf8_binary) from t5 -- !query analysis -Project [elt(1, collate(utf8_binary#x, utf8_binary), collate(utf8_lcase#x, utf8_binary), false) AS elt(1, collate(utf8_binary, utf8_binary), collate(utf8_lcase, utf8_binary))#x] +Project [elt(1, collate(utf8_binary#x, utf8_binary), collate(utf8_lcase#x, utf8_binary), true) AS elt(1, collate(utf8_binary, utf8_binary), collate(utf8_lcase, utf8_binary))#x] +- SubqueryAlias spark_catalog.default.t5 +- Relation spark_catalog.default.t5[s#x,utf8_binary#x,utf8_lcase#x] parquet @@ -926,7 +926,7 @@ Project [elt(1, collate(utf8_binary#x, utf8_binary), collate(utf8_lcase#x, utf8_ -- !query select elt(1, utf8_binary collate utf8_binary, utf8_lcase) from t5 -- !query analysis -Project [elt(1, collate(utf8_binary#x, utf8_binary), cast(utf8_lcase#x as string), false) AS elt(1, collate(utf8_binary, utf8_binary), utf8_lcase)#x] +Project [elt(1, collate(utf8_binary#x, utf8_binary), cast(utf8_lcase#x as string), true) AS elt(1, collate(utf8_binary, utf8_binary), utf8_lcase)#x] +- SubqueryAlias spark_catalog.default.t5 +- Relation spark_catalog.default.t5[s#x,utf8_binary#x,utf8_lcase#x] parquet @@ -934,7 +934,7 @@ Project [elt(1, collate(utf8_binary#x, utf8_binary), cast(utf8_lcase#x as string -- !query select elt(1, utf8_binary, 'word'), elt(1, utf8_lcase, 'word') from t5 -- !query analysis -Project [elt(1, utf8_binary#x, word, false) AS elt(1, utf8_binary, word)#x, elt(1, utf8_lcase#x, cast(word as string collate UTF8_LCASE), false) AS elt(1, utf8_lcase, word)#x] +Project [elt(1, utf8_binary#x, word, true) AS elt(1, utf8_binary, word)#x, elt(1, utf8_lcase#x, cast(word as string collate UTF8_LCASE), true) AS elt(1, utf8_lcase, word)#x] +- SubqueryAlias spark_catalog.default.t5 +- Relation spark_catalog.default.t5[s#x,utf8_binary#x,utf8_lcase#x] parquet @@ -942,7 +942,7 @@ Project [elt(1, utf8_binary#x, word, false) AS elt(1, utf8_binary, word)#x, elt( -- !query select elt(1, utf8_binary, 'word' collate utf8_lcase), elt(1, utf8_lcase, 'word' collate utf8_binary) from t5 -- !query analysis -Project [elt(1, cast(utf8_binary#x as string collate UTF8_LCASE), collate(word, utf8_lcase), false) AS elt(1, utf8_binary, collate(word, utf8_lcase))#x, elt(1, cast(utf8_lcase#x as string), collate(word, utf8_binary), false) AS elt(1, utf8_lcase, collate(word, utf8_binary))#x] +Project [elt(1, cast(utf8_binary#x as string collate UTF8_LCASE), collate(word, utf8_lcase), true) AS elt(1, utf8_binary, collate(word, utf8_lcase))#x, elt(1, cast(utf8_lcase#x as string), collate(word, utf8_binary), true) AS elt(1, utf8_lcase, collate(word, utf8_binary))#x] +- SubqueryAlias spark_catalog.default.t5 +- Relation spark_catalog.default.t5[s#x,utf8_binary#x,utf8_lcase#x] parquet diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/comparator.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/comparator.sql.out index 022c260ac6f60..cf17e20fc76df 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/comparator.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/comparator.sql.out @@ -16,28 +16,28 @@ Project [(0x00 < 0xFF) AS (X'00' < X'FF')#x] -- !query select '1 ' = 1Y -- !query analysis -Project [(cast(1 as tinyint) = 1) AS (1 = 1)#x] +Project [(cast(1 as bigint) = cast(1 as bigint)) AS (1 = 1)#x] +- OneRowRelation -- !query select '\t1 ' = 1Y -- !query analysis -Project [(cast( 1 as tinyint) = 1) AS ( 1 = 1)#x] +Project [(cast( 1 as bigint) = cast(1 as bigint)) AS ( 1 = 1)#x] +- OneRowRelation -- !query select '1 ' = 1S -- !query analysis -Project [(cast(1 as smallint) = 1) AS (1 = 1)#x] +Project [(cast(1 as bigint) = cast(1 as bigint)) AS (1 = 1)#x] +- OneRowRelation -- !query select '1 ' = 1 -- !query analysis -Project [(cast(1 as int) = 1) AS (1 = 1)#x] +Project [(cast(1 as bigint) = cast(1 as bigint)) AS (1 = 1)#x] +- OneRowRelation @@ -51,7 +51,7 @@ Project [(cast( 1 as bigint) = 1) AS ( 1 = 1)#x] -- !query select ' 1' = cast(1.0 as float) -- !query analysis -Project [(cast( 1 as float) = cast(1.0 as float)) AS ( 1 = CAST(1.0 AS FLOAT))#x] +Project [(cast( 1 as double) = cast(cast(1.0 as float) as double)) AS ( 1 = CAST(1.0 AS FLOAT))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/csv-functions.sql.out index 4149f5f09947c..691864ef8b1cb 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/csv-functions.sql.out @@ -217,7 +217,7 @@ Project [to_csv(named_struct(a, 1, b, 2), Some(America/Los_Angeles)) AS to_csv(n -- !query select to_csv(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')) -- !query analysis -Project [to_csv((timestampFormat,dd/MM/yyyy), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false)), Some(America/Los_Angeles)) AS to_csv(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +Project [to_csv((timestampFormat,dd/MM/yyyy), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true)), Some(America/Los_Angeles)) AS to_csv(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out index 88c7d7b4e7d72..0e4d2d4e99e26 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out @@ -37,21 +37,21 @@ org.apache.spark.sql.catalyst.parser.ParseException -- !query select make_date(2019, 1, 1), make_date(12, 12, 12) -- !query analysis -Project [make_date(2019, 1, 1, false) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, false) AS make_date(12, 12, 12)#x] +Project [make_date(2019, 1, 1, true) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, true) AS make_date(12, 12, 12)#x] +- OneRowRelation -- !query select make_date(2000, 13, 1) -- !query analysis -Project [make_date(2000, 13, 1, false) AS make_date(2000, 13, 1)#x] +Project [make_date(2000, 13, 1, true) AS make_date(2000, 13, 1)#x] +- OneRowRelation -- !query select make_date(2000, 1, 33) -- !query analysis -Project [make_date(2000, 1, 33, false) AS make_date(2000, 1, 33)#x] +Project [make_date(2000, 1, 33, true) AS make_date(2000, 1, 33)#x] +- OneRowRelation @@ -148,21 +148,21 @@ select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(n -- !query select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_date(cast(null as string), None, Some(America/Los_Angeles), false) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), false) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), false) AS to_date(2016-12-31, yyyy-MM-dd)#x] +Project [to_date(cast(null as string), None, Some(America/Los_Angeles), true) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), true) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), true) AS to_date(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_date("16", "dd") -- !query analysis -Project [to_date(16, Some(dd), Some(America/Los_Angeles), false) AS to_date(16, dd)#x] +Project [to_date(16, Some(dd), Some(America/Los_Angeles), true) AS to_date(16, dd)#x] +- OneRowRelation -- !query select to_date("02-29", "MM-dd") -- !query analysis -Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), false) AS to_date(02-29, MM-dd)#x] +Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), true) AS to_date(02-29, MM-dd)#x] +- OneRowRelation @@ -201,21 +201,21 @@ select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(time -- !query select next_day("2015-07-23", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 as date), Mon, false) AS next_day(2015-07-23, Mon)#x] +Project [next_day(cast(2015-07-23 as date), Mon, true) AS next_day(2015-07-23, Mon)#x] +- OneRowRelation -- !query select next_day("2015-07-23", "xx") -- !query analysis -Project [next_day(cast(2015-07-23 as date), xx, false) AS next_day(2015-07-23, xx)#x] +Project [next_day(cast(2015-07-23 as date), xx, true) AS next_day(2015-07-23, xx)#x] +- OneRowRelation -- !query select next_day("2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(2015-07-23 12:12:12, Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(2015-07-23 12:12:12, Mon)#x] +- OneRowRelation @@ -228,28 +228,28 @@ select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon") -- !query select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +- OneRowRelation -- !query select next_day("xx", "Mon") -- !query analysis -Project [next_day(cast(xx as date), Mon, false) AS next_day(xx, Mon)#x] +Project [next_day(cast(xx as date), Mon, true) AS next_day(xx, Mon)#x] +- OneRowRelation -- !query select next_day(null, "Mon") -- !query analysis -Project [next_day(cast(null as date), Mon, false) AS next_day(NULL, Mon)#x] +Project [next_day(cast(null as date), Mon, true) AS next_day(NULL, Mon)#x] +- OneRowRelation -- !query select next_day(null, "xx") -- !query analysis -Project [next_day(cast(null as date), xx, false) AS next_day(NULL, xx)#x] +Project [next_day(cast(null as date), xx, true) AS next_day(NULL, xx)#x] +- OneRowRelation @@ -355,21 +355,15 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select date_add('2011-11-11', '1') -- !query analysis -Project [date_add(cast(2011-11-11 as date), 1) AS date_add(2011-11-11, 1)#x] +Project [date_add(cast(2011-11-11 as date), cast(1 as int)) AS date_add(2011-11-11, 1)#x] +- OneRowRelation -- !query select date_add('2011-11-11', '1.2') -- !query analysis -org.apache.spark.sql.AnalysisException -{ - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", - "messageParameters" : { - "functionName" : "date_add" - } -} +Project [date_add(cast(2011-11-11 as date), cast(1.2 as int)) AS date_add(2011-11-11, 1.2)#x] ++- OneRowRelation -- !query @@ -505,14 +499,7 @@ select date_sub(date'2011-11-11', '1') -- !query select date_sub(date'2011-11-11', '1.2') -- !query analysis -org.apache.spark.sql.AnalysisException -{ - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", - "messageParameters" : { - "functionName" : "date_sub" - } -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -543,49 +530,23 @@ Project [date_sub(cast(2011-11-11 12:12:12 as date), 1) AS date_sub(TIMESTAMP_NT -- !query select date_add('2011-11-11', int_str) from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_add('2011-11-11', int_str)" - } ] -} +Project [date_add(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_add(2011-11-11, int_str)#x] ++- SubqueryAlias date_view + +- View (`date_view`, [date_str#x, int_str#x]) + +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] + +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] + +- OneRowRelation -- !query select date_sub('2011-11-11', int_str) from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_sub('2011-11-11', int_str)" - } ] -} +Project [date_sub(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_sub(2011-11-11, int_str)#x] ++- SubqueryAlias date_view + +- View (`date_view`, [date_str#x, int_str#x]) + +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] + +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] + +- OneRowRelation -- !query @@ -661,25 +622,7 @@ select date '2001-10-01' - date '2001-09-28' -- !query select date '2001-10-01' - '2001-09-28' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2001-09-28\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "date '2001-10-01' - '2001-09-28'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -709,25 +652,7 @@ select date_str - date '2001-09-28' from date_view -- !query select date '2001-09-28' - date_str from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"date_str\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "date '2001-09-28' - date_str" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -739,7 +664,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -762,11 +687,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"DATE '2011-11-11'\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" + "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, "queryContext" : [ { "objectType" : "", @@ -804,7 +729,7 @@ select date '2012-01-01' - interval '2-2' year to month, -- !query select to_date('26/October/2015', 'dd/MMMMM/yyyy') -- !query analysis -Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), false) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), true) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out index 4221db822d024..3681a5dfd3904 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out @@ -37,21 +37,21 @@ org.apache.spark.sql.catalyst.parser.ParseException -- !query select make_date(2019, 1, 1), make_date(12, 12, 12) -- !query analysis -Project [make_date(2019, 1, 1, false) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, false) AS make_date(12, 12, 12)#x] +Project [make_date(2019, 1, 1, true) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, true) AS make_date(12, 12, 12)#x] +- OneRowRelation -- !query select make_date(2000, 13, 1) -- !query analysis -Project [make_date(2000, 13, 1, false) AS make_date(2000, 13, 1)#x] +Project [make_date(2000, 13, 1, true) AS make_date(2000, 13, 1)#x] +- OneRowRelation -- !query select make_date(2000, 1, 33) -- !query analysis -Project [make_date(2000, 1, 33, false) AS make_date(2000, 1, 33)#x] +Project [make_date(2000, 1, 33, true) AS make_date(2000, 1, 33)#x] +- OneRowRelation @@ -148,21 +148,21 @@ select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(n -- !query select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_date(cast(null as string), None, Some(America/Los_Angeles), false) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), false) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), false) AS to_date(2016-12-31, yyyy-MM-dd)#x] +Project [to_date(cast(null as string), None, Some(America/Los_Angeles), true) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), true) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), true) AS to_date(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_date("16", "dd") -- !query analysis -Project [to_date(16, Some(dd), Some(America/Los_Angeles), false) AS to_date(16, dd)#x] +Project [to_date(16, Some(dd), Some(America/Los_Angeles), true) AS to_date(16, dd)#x] +- OneRowRelation -- !query select to_date("02-29", "MM-dd") -- !query analysis -Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), false) AS to_date(02-29, MM-dd)#x] +Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), true) AS to_date(02-29, MM-dd)#x] +- OneRowRelation @@ -201,21 +201,21 @@ select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(time -- !query select next_day("2015-07-23", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 as date), Mon, false) AS next_day(2015-07-23, Mon)#x] +Project [next_day(cast(2015-07-23 as date), Mon, true) AS next_day(2015-07-23, Mon)#x] +- OneRowRelation -- !query select next_day("2015-07-23", "xx") -- !query analysis -Project [next_day(cast(2015-07-23 as date), xx, false) AS next_day(2015-07-23, xx)#x] +Project [next_day(cast(2015-07-23 as date), xx, true) AS next_day(2015-07-23, xx)#x] +- OneRowRelation -- !query select next_day("2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(2015-07-23 12:12:12, Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(2015-07-23 12:12:12, Mon)#x] +- OneRowRelation @@ -228,28 +228,28 @@ select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon") -- !query select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +- OneRowRelation -- !query select next_day("xx", "Mon") -- !query analysis -Project [next_day(cast(xx as date), Mon, false) AS next_day(xx, Mon)#x] +Project [next_day(cast(xx as date), Mon, true) AS next_day(xx, Mon)#x] +- OneRowRelation -- !query select next_day(null, "Mon") -- !query analysis -Project [next_day(cast(null as date), Mon, false) AS next_day(NULL, Mon)#x] +Project [next_day(cast(null as date), Mon, true) AS next_day(NULL, Mon)#x] +- OneRowRelation -- !query select next_day(null, "xx") -- !query analysis -Project [next_day(cast(null as date), xx, false) AS next_day(NULL, xx)#x] +Project [next_day(cast(null as date), xx, true) AS next_day(NULL, xx)#x] +- OneRowRelation @@ -355,21 +355,15 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select date_add('2011-11-11', '1') -- !query analysis -Project [date_add(cast(2011-11-11 as date), 1) AS date_add(2011-11-11, 1)#x] +Project [date_add(cast(2011-11-11 as date), cast(1 as int)) AS date_add(2011-11-11, 1)#x] +- OneRowRelation -- !query select date_add('2011-11-11', '1.2') -- !query analysis -org.apache.spark.sql.AnalysisException -{ - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", - "messageParameters" : { - "functionName" : "date_add" - } -} +Project [date_add(cast(2011-11-11 as date), cast(1.2 as int)) AS date_add(2011-11-11, 1.2)#x] ++- OneRowRelation -- !query @@ -505,14 +499,7 @@ select date_sub(date'2011-11-11', '1') -- !query select date_sub(date'2011-11-11', '1.2') -- !query analysis -org.apache.spark.sql.AnalysisException -{ - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", - "messageParameters" : { - "functionName" : "date_sub" - } -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -543,49 +530,23 @@ Project [date_sub(cast(2011-11-11 12:12:12 as date), 1) AS date_sub(TIMESTAMP_NT -- !query select date_add('2011-11-11', int_str) from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_add('2011-11-11', int_str)" - } ] -} +Project [date_add(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_add(2011-11-11, int_str)#x] ++- SubqueryAlias date_view + +- View (`date_view`, [date_str#x, int_str#x]) + +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] + +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] + +- OneRowRelation -- !query select date_sub('2011-11-11', int_str) from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_sub('2011-11-11', int_str)" - } ] -} +Project [date_sub(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_sub(2011-11-11, int_str)#x] ++- SubqueryAlias date_view + +- View (`date_view`, [date_str#x, int_str#x]) + +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] + +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] + +- OneRowRelation -- !query @@ -661,25 +622,7 @@ select date '2001-10-01' - date '2001-09-28' -- !query select date '2001-10-01' - '2001-09-28' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2001-09-28\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "date '2001-10-01' - '2001-09-28'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -709,25 +652,7 @@ select date_str - date '2001-09-28' from date_view -- !query select date '2001-09-28' - date_str from date_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"date_str\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "date '2001-09-28' - date_str" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -739,7 +664,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -762,11 +687,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"DATE '2011-11-11'\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" + "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, "queryContext" : [ { "objectType" : "", @@ -804,7 +729,7 @@ select date '2012-01-01' - interval '2-2' year to month, -- !query select to_date('26/October/2015', 'dd/MMMMM/yyyy') -- !query analysis -Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), false) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), true) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +- OneRowRelation @@ -1121,70 +1046,70 @@ Project [(localtimestamp(Some(America/Los_Angeles)) = localtimestamp(Some(Americ -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET') -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 1) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 60) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, null) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 59.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +- OneRowRelation @@ -1333,231 +1258,231 @@ select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020- -- !query select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp(1) -- !query analysis -Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1)#x] +Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1)#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS") -- !query analysis -Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +- OneRowRelation -- !query select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'") -- !query analysis -Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +- OneRowRelation -- !query select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd") -- !query analysis -Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +- OneRowRelation -- !query select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("16", "dd") -- !query analysis -Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(16, dd)#x] +Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(16, dd)#x] +- OneRowRelation -- !query select to_timestamp("02-29", "MM-dd") -- !query analysis -Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(02-29, MM-dd)#x] +Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(02-29, MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019 40", "yyyy mm") -- !query analysis -Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 40, yyyy mm)#x] +Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 40, yyyy mm)#x] +- OneRowRelation -- !query select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss") -- !query analysis -Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +- OneRowRelation @@ -1582,49 +1507,13 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:10\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:11\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -1650,49 +1539,13 @@ CreateViewCommand `ts_view`, select '2011-11-11 11:11:11' str, false, false, Loc -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "str - timestamp'2011-11-11 11:11:11'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "timestamp'2011-11-11 11:11:11' - str" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -1700,11 +1553,11 @@ select timestamp'2011-11-11 11:11:11' + '1' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -1722,11 +1575,11 @@ select '1' + timestamp'2011-11-11 11:11:11' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { @@ -1805,28 +1658,28 @@ select date '2012-01-01' - interval 3 hours, -- !query select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') -- !query analysis -Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +- OneRowRelation -- !query select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), false) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), true) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-invalid.sql.out index ad4a2feb9661c..74146ab17a4d4 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-invalid.sql.out @@ -2,126 +2,126 @@ -- !query select to_timestamp('294248', 'y') -- !query analysis -Project [to_timestamp(294248, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(294248, y)#x] +Project [to_timestamp(294248, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(294248, y)#x] +- OneRowRelation -- !query select to_timestamp('1', 'yy') -- !query analysis -Project [to_timestamp(1, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, yy)#x] +Project [to_timestamp(1, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, yy)#x] +- OneRowRelation -- !query select to_timestamp('-12', 'yy') -- !query analysis -Project [to_timestamp(-12, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(-12, yy)#x] +Project [to_timestamp(-12, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(-12, yy)#x] +- OneRowRelation -- !query select to_timestamp('123', 'yy') -- !query analysis -Project [to_timestamp(123, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(123, yy)#x] +Project [to_timestamp(123, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(123, yy)#x] +- OneRowRelation -- !query select to_timestamp('1', 'yyy') -- !query analysis -Project [to_timestamp(1, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, yyy)#x] +Project [to_timestamp(1, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, yyy)#x] +- OneRowRelation -- !query select to_timestamp('1234567', 'yyyyyyy') -- !query analysis -Project [to_timestamp(1234567, Some(yyyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1234567, yyyyyyy)#x] +Project [to_timestamp(1234567, Some(yyyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1234567, yyyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('366', 'D') -- !query analysis -Project [to_timestamp(366, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(366, D)#x] +Project [to_timestamp(366, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(366, D)#x] +- OneRowRelation -- !query select to_timestamp('9', 'DD') -- !query analysis -Project [to_timestamp(9, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, DD)#x] +Project [to_timestamp(9, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, DD)#x] +- OneRowRelation -- !query select to_timestamp('366', 'DD') -- !query analysis -Project [to_timestamp(366, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(366, DD)#x] +Project [to_timestamp(366, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(366, DD)#x] +- OneRowRelation -- !query select to_timestamp('9', 'DDD') -- !query analysis -Project [to_timestamp(9, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, DDD)#x] +Project [to_timestamp(9, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, DDD)#x] +- OneRowRelation -- !query select to_timestamp('99', 'DDD') -- !query analysis -Project [to_timestamp(99, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, DDD)#x] +Project [to_timestamp(99, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, DDD)#x] +- OneRowRelation -- !query select to_timestamp('30-365', 'dd-DDD') -- !query analysis -Project [to_timestamp(30-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(30-365, dd-DDD)#x] +Project [to_timestamp(30-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(30-365, dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('11-365', 'MM-DDD') -- !query analysis -Project [to_timestamp(11-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(11-365, MM-DDD)#x] +Project [to_timestamp(11-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(11-365, MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2019-366', 'yyyy-DDD') -- !query analysis -Project [to_timestamp(2019-366, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-366, yyyy-DDD)#x] +Project [to_timestamp(2019-366, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-366, yyyy-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-30-365', 'MM-dd-DDD') -- !query analysis -Project [to_timestamp(12-30-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-30-365, MM-dd-DDD)#x] +Project [to_timestamp(12-30-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-30-365, MM-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-01-365', 'yyyy-dd-DDD') -- !query analysis -Project [to_timestamp(2020-01-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-01-365, yyyy-dd-DDD)#x] +Project [to_timestamp(2020-01-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-01-365, yyyy-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-10-350', 'yyyy-MM-DDD') -- !query analysis -Project [to_timestamp(2020-10-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-10-350, yyyy-MM-DDD)#x] +Project [to_timestamp(2020-10-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-10-350, yyyy-MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') -- !query analysis -Project [to_timestamp(2020-11-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-11-31-366, yyyy-MM-dd-DDD)#x] +Project [to_timestamp(2020-11-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-11-31-366, yyyy-MM-dd-DDD)#x] +- OneRowRelation @@ -135,56 +135,56 @@ Project [from_csv(StructField(date,DateType,true), (dateFormat,yyyy-DDD), 2018-3 -- !query select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_date(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), false) AS to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_date(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), true) AS to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_date(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), false) AS to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_date(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), true) AS to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_timestamp(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_timestamp(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_timestamp(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_timestamp(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-legacy.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-legacy.sql.out index 7325f2756949e..7907279fb0204 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing-legacy.sql.out @@ -2,180 +2,180 @@ -- !query select to_timestamp('1', 'y') -- !query analysis -Project [to_timestamp(1, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, y)#x] +Project [to_timestamp(1, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, y)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'y') -- !query analysis -Project [to_timestamp(009999, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, y)#x] +Project [to_timestamp(009999, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, y)#x] +- OneRowRelation -- !query select to_timestamp('00', 'yy') -- !query analysis -Project [to_timestamp(00, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(00, yy)#x] +Project [to_timestamp(00, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(00, yy)#x] +- OneRowRelation -- !query select to_timestamp('99', 'yy') -- !query analysis -Project [to_timestamp(99, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, yy)#x] +Project [to_timestamp(99, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, yy)#x] +- OneRowRelation -- !query select to_timestamp('001', 'yyy') -- !query analysis -Project [to_timestamp(001, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(001, yyy)#x] +Project [to_timestamp(001, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(001, yyy)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'yyy') -- !query analysis -Project [to_timestamp(009999, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, yyy)#x] +Project [to_timestamp(009999, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, yyy)#x] +- OneRowRelation -- !query select to_timestamp('0001', 'yyyy') -- !query analysis -Project [to_timestamp(0001, Some(yyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(0001, yyyy)#x] +Project [to_timestamp(0001, Some(yyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(0001, yyyy)#x] +- OneRowRelation -- !query select to_timestamp('9999', 'yyyy') -- !query analysis -Project [to_timestamp(9999, Some(yyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9999, yyyy)#x] +Project [to_timestamp(9999, Some(yyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9999, yyyy)#x] +- OneRowRelation -- !query select to_timestamp('00001', 'yyyyy') -- !query analysis -Project [to_timestamp(00001, Some(yyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(00001, yyyyy)#x] +Project [to_timestamp(00001, Some(yyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(00001, yyyyy)#x] +- OneRowRelation -- !query select to_timestamp('09999', 'yyyyy') -- !query analysis -Project [to_timestamp(09999, Some(yyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(09999, yyyyy)#x] +Project [to_timestamp(09999, Some(yyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(09999, yyyyy)#x] +- OneRowRelation -- !query select to_timestamp('000001', 'yyyyyy') -- !query analysis -Project [to_timestamp(000001, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(000001, yyyyyy)#x] +Project [to_timestamp(000001, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(000001, yyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'yyyyyy') -- !query analysis -Project [to_timestamp(009999, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, yyyyyy)#x] +Project [to_timestamp(009999, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, yyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('9', 'D') -- !query analysis -Project [to_timestamp(9, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, D)#x] +Project [to_timestamp(9, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, D)#x] +- OneRowRelation -- !query select to_timestamp('300', 'D') -- !query analysis -Project [to_timestamp(300, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(300, D)#x] +Project [to_timestamp(300, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(300, D)#x] +- OneRowRelation -- !query select to_timestamp('09', 'DD') -- !query analysis -Project [to_timestamp(09, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(09, DD)#x] +Project [to_timestamp(09, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(09, DD)#x] +- OneRowRelation -- !query select to_timestamp('99', 'DD') -- !query analysis -Project [to_timestamp(99, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, DD)#x] +Project [to_timestamp(99, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, DD)#x] +- OneRowRelation -- !query select to_timestamp('100', 'DD') -- !query analysis -Project [to_timestamp(100, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(100, DD)#x] +Project [to_timestamp(100, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(100, DD)#x] +- OneRowRelation -- !query select to_timestamp('009', 'DDD') -- !query analysis -Project [to_timestamp(009, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009, DDD)#x] +Project [to_timestamp(009, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009, DDD)#x] +- OneRowRelation -- !query select to_timestamp('365', 'DDD') -- !query analysis -Project [to_timestamp(365, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(365, DDD)#x] +Project [to_timestamp(365, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(365, DDD)#x] +- OneRowRelation -- !query select to_timestamp('31-365', 'dd-DDD') -- !query analysis -Project [to_timestamp(31-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(31-365, dd-DDD)#x] +Project [to_timestamp(31-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(31-365, dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-365', 'MM-DDD') -- !query analysis -Project [to_timestamp(12-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-365, MM-DDD)#x] +Project [to_timestamp(12-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-365, MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-365', 'yyyy-DDD') -- !query analysis -Project [to_timestamp(2020-365, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-365, yyyy-DDD)#x] +Project [to_timestamp(2020-365, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-365, yyyy-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-31-365', 'MM-dd-DDD') -- !query analysis -Project [to_timestamp(12-31-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-31-365, MM-dd-DDD)#x] +Project [to_timestamp(12-31-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-31-365, MM-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-30-365', 'yyyy-dd-DDD') -- !query analysis -Project [to_timestamp(2020-30-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-30-365, yyyy-dd-DDD)#x] +Project [to_timestamp(2020-30-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-30-365, yyyy-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-12-350', 'yyyy-MM-DDD') -- !query analysis -Project [to_timestamp(2020-12-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-12-350, yyyy-MM-DDD)#x] +Project [to_timestamp(2020-12-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-12-350, yyyy-MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-12-31-366', 'yyyy-MM-dd-DDD') -- !query analysis -Project [to_timestamp(2020-12-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-12-31-366, yyyy-MM-dd-DDD)#x] +Project [to_timestamp(2020-12-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-12-31-366, yyyy-MM-dd-DDD)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing.sql.out index 7325f2756949e..7907279fb0204 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-parsing.sql.out @@ -2,180 +2,180 @@ -- !query select to_timestamp('1', 'y') -- !query analysis -Project [to_timestamp(1, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, y)#x] +Project [to_timestamp(1, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, y)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'y') -- !query analysis -Project [to_timestamp(009999, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, y)#x] +Project [to_timestamp(009999, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, y)#x] +- OneRowRelation -- !query select to_timestamp('00', 'yy') -- !query analysis -Project [to_timestamp(00, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(00, yy)#x] +Project [to_timestamp(00, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(00, yy)#x] +- OneRowRelation -- !query select to_timestamp('99', 'yy') -- !query analysis -Project [to_timestamp(99, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, yy)#x] +Project [to_timestamp(99, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, yy)#x] +- OneRowRelation -- !query select to_timestamp('001', 'yyy') -- !query analysis -Project [to_timestamp(001, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(001, yyy)#x] +Project [to_timestamp(001, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(001, yyy)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'yyy') -- !query analysis -Project [to_timestamp(009999, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, yyy)#x] +Project [to_timestamp(009999, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, yyy)#x] +- OneRowRelation -- !query select to_timestamp('0001', 'yyyy') -- !query analysis -Project [to_timestamp(0001, Some(yyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(0001, yyyy)#x] +Project [to_timestamp(0001, Some(yyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(0001, yyyy)#x] +- OneRowRelation -- !query select to_timestamp('9999', 'yyyy') -- !query analysis -Project [to_timestamp(9999, Some(yyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9999, yyyy)#x] +Project [to_timestamp(9999, Some(yyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9999, yyyy)#x] +- OneRowRelation -- !query select to_timestamp('00001', 'yyyyy') -- !query analysis -Project [to_timestamp(00001, Some(yyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(00001, yyyyy)#x] +Project [to_timestamp(00001, Some(yyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(00001, yyyyy)#x] +- OneRowRelation -- !query select to_timestamp('09999', 'yyyyy') -- !query analysis -Project [to_timestamp(09999, Some(yyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(09999, yyyyy)#x] +Project [to_timestamp(09999, Some(yyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(09999, yyyyy)#x] +- OneRowRelation -- !query select to_timestamp('000001', 'yyyyyy') -- !query analysis -Project [to_timestamp(000001, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(000001, yyyyyy)#x] +Project [to_timestamp(000001, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(000001, yyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('009999', 'yyyyyy') -- !query analysis -Project [to_timestamp(009999, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009999, yyyyyy)#x] +Project [to_timestamp(009999, Some(yyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009999, yyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('9', 'D') -- !query analysis -Project [to_timestamp(9, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, D)#x] +Project [to_timestamp(9, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, D)#x] +- OneRowRelation -- !query select to_timestamp('300', 'D') -- !query analysis -Project [to_timestamp(300, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(300, D)#x] +Project [to_timestamp(300, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(300, D)#x] +- OneRowRelation -- !query select to_timestamp('09', 'DD') -- !query analysis -Project [to_timestamp(09, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(09, DD)#x] +Project [to_timestamp(09, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(09, DD)#x] +- OneRowRelation -- !query select to_timestamp('99', 'DD') -- !query analysis -Project [to_timestamp(99, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, DD)#x] +Project [to_timestamp(99, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, DD)#x] +- OneRowRelation -- !query select to_timestamp('100', 'DD') -- !query analysis -Project [to_timestamp(100, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(100, DD)#x] +Project [to_timestamp(100, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(100, DD)#x] +- OneRowRelation -- !query select to_timestamp('009', 'DDD') -- !query analysis -Project [to_timestamp(009, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(009, DDD)#x] +Project [to_timestamp(009, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(009, DDD)#x] +- OneRowRelation -- !query select to_timestamp('365', 'DDD') -- !query analysis -Project [to_timestamp(365, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(365, DDD)#x] +Project [to_timestamp(365, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(365, DDD)#x] +- OneRowRelation -- !query select to_timestamp('31-365', 'dd-DDD') -- !query analysis -Project [to_timestamp(31-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(31-365, dd-DDD)#x] +Project [to_timestamp(31-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(31-365, dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-365', 'MM-DDD') -- !query analysis -Project [to_timestamp(12-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-365, MM-DDD)#x] +Project [to_timestamp(12-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-365, MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-365', 'yyyy-DDD') -- !query analysis -Project [to_timestamp(2020-365, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-365, yyyy-DDD)#x] +Project [to_timestamp(2020-365, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-365, yyyy-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-31-365', 'MM-dd-DDD') -- !query analysis -Project [to_timestamp(12-31-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-31-365, MM-dd-DDD)#x] +Project [to_timestamp(12-31-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-31-365, MM-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-30-365', 'yyyy-dd-DDD') -- !query analysis -Project [to_timestamp(2020-30-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-30-365, yyyy-dd-DDD)#x] +Project [to_timestamp(2020-30-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-30-365, yyyy-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-12-350', 'yyyy-MM-DDD') -- !query analysis -Project [to_timestamp(2020-12-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-12-350, yyyy-MM-DDD)#x] +Project [to_timestamp(2020-12-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-12-350, yyyy-MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-12-31-366', 'yyyy-MM-dd-DDD') -- !query analysis -Project [to_timestamp(2020-12-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-12-31-366, yyyy-MM-dd-DDD)#x] +Project [to_timestamp(2020-12-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-12-31-366, yyyy-MM-dd-DDD)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-special.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-special.sql.out index 01d1f2c40a4a6..6768297fd8116 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-special.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-special.sql.out @@ -8,7 +8,7 @@ select date'999999-03-18', date'-0001-1-28', date'0015' -- !query select make_date(999999, 3, 18), make_date(-1, 1, 28) -- !query analysis -Project [make_date(999999, 3, 18, false) AS make_date(999999, 3, 18)#x, make_date(-1, 1, 28, false) AS make_date(-1, 1, 28)#x] +Project [make_date(999999, 3, 18, true) AS make_date(999999, 3, 18)#x, make_date(-1, 1, 28, true) AS make_date(-1, 1, 28)#x] +- OneRowRelation @@ -21,5 +21,5 @@ select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timesta -- !query select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0) -- !query analysis -Project [make_timestamp(-1969, 12, 31, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(-1969, 12, 31, 16, 0, 0.0)#x, make_timestamp(-15, 3, 18, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(-15, 3, 18, 16, 0, 0.0)#x, make_timestamp(99999, 3, 18, 12, 3, cast(17.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(99999, 3, 18, 12, 3, 17.0)#x] +Project [make_timestamp(-1969, 12, 31, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(-1969, 12, 31, 16, 0, 0.0)#x, make_timestamp(-15, 3, 18, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(-15, 3, 18, 16, 0, 0.0)#x, make_timestamp(99999, 3, 18, 12, 3, cast(17.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(99999, 3, 18, 12, 3, 17.0)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/decimalArithmeticOperations.sql.out index 4a2199033f819..d75f4d41bd425 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/decimalArithmeticOperations.sql.out @@ -268,12 +268,13 @@ SetCommand (spark.sql.decimalOperations.allowPrecisionLoss,Some(false)) -- !query -select id, a+b, a-b, a*b, a/b from decimals_test order by id +select /*+ COALESCE(1) */ id, a+b, a-b, a*b, a/b from decimals_test order by id -- !query analysis Sort [id#x ASC NULLS FIRST], true -+- Project [id#x, (a#x + b#x) AS (a + b)#x, (a#x - b#x) AS (a - b)#x, (a#x * b#x) AS (a * b)#x, (a#x / b#x) AS (a / b)#x] - +- SubqueryAlias spark_catalog.default.decimals_test - +- Relation spark_catalog.default.decimals_test[id#x,a#x,b#x] parquet ++- Repartition 1, false + +- Project [id#x, (a#x + b#x) AS (a + b)#x, (a#x - b#x) AS (a - b)#x, (a#x * b#x) AS (a * b)#x, (a#x / b#x) AS (a / b)#x] + +- SubqueryAlias spark_catalog.default.decimals_test + +- Relation spark_catalog.default.decimals_test[id#x,a#x,b#x] parquet -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out index c42f2db3f0f9f..987941eee05c7 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/extract.sql.out @@ -3,7 +3,7 @@ CREATE TEMPORARY VIEW t AS select '2011-05-06 07:08:09.1234567' as c, to_timestamp_ntz('2011-05-06 07:08:09.1234567') as ntz, interval 10 year 20 month as i, interval 30 day 40 hour 50 minute 6.7890 second as j -- !query analysis CreateViewCommand `t`, select '2011-05-06 07:08:09.1234567' as c, to_timestamp_ntz('2011-05-06 07:08:09.1234567') as ntz, interval 10 year 20 month as i, interval 30 day 40 hour 50 minute 6.7890 second as j, false, false, LocalTempView, UNSUPPORTED, true - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -14,7 +14,7 @@ Project [extract(year, c#x) AS extract(year FROM c)#x, extract(year, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -25,7 +25,7 @@ Project [extract(y, c#x) AS extract(y FROM c)#x, extract(y, ntz#x) AS extract(y +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -36,7 +36,7 @@ Project [extract(years, c#x) AS extract(years FROM c)#x, extract(years, ntz#x) A +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -47,7 +47,7 @@ Project [extract(yr, c#x) AS extract(yr FROM c)#x, extract(yr, ntz#x) AS extract +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -58,7 +58,7 @@ Project [extract(yrs, c#x) AS extract(yrs FROM c)#x, extract(yrs, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -69,7 +69,7 @@ Project [extract(yearofweek, c#x) AS extract(yearofweek FROM c)#x, extract(yearo +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -80,7 +80,7 @@ Project [extract(quarter, c#x) AS extract(quarter FROM c)#x, extract(quarter, nt +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -91,7 +91,7 @@ Project [extract(qtr, c#x) AS extract(qtr FROM c)#x, extract(qtr, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -102,7 +102,7 @@ Project [extract(month, c#x) AS extract(month FROM c)#x, extract(month, ntz#x) A +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -113,7 +113,7 @@ Project [extract(mon, c#x) AS extract(mon FROM c)#x, extract(mon, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -124,7 +124,7 @@ Project [extract(mons, c#x) AS extract(mons FROM c)#x, extract(mons, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -135,7 +135,7 @@ Project [extract(months, c#x) AS extract(months FROM c)#x, extract(months, ntz#x +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -146,7 +146,7 @@ Project [extract(week, c#x) AS extract(week FROM c)#x, extract(week, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -157,7 +157,7 @@ Project [extract(w, c#x) AS extract(w FROM c)#x, extract(w, ntz#x) AS extract(w +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -168,7 +168,7 @@ Project [extract(weeks, c#x) AS extract(weeks FROM c)#x, extract(weeks, ntz#x) A +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -179,7 +179,7 @@ Project [extract(day, c#x) AS extract(day FROM c)#x, extract(day, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -190,7 +190,7 @@ Project [extract(d, c#x) AS extract(d FROM c)#x, extract(d, ntz#x) AS extract(d +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -201,7 +201,7 @@ Project [extract(days, c#x) AS extract(days FROM c)#x, extract(days, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -212,7 +212,7 @@ Project [extract(dayofweek, c#x) AS extract(dayofweek FROM c)#x, extract(dayofwe +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -223,7 +223,7 @@ Project [extract(dow, c#x) AS extract(dow FROM c)#x, extract(dow, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -234,7 +234,7 @@ Project [extract(dayofweek_iso, c#x) AS extract(dayofweek_iso FROM c)#x, extract +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -245,7 +245,7 @@ Project [extract(dow_iso, c#x) AS extract(dow_iso FROM c)#x, extract(dow_iso, nt +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -256,7 +256,7 @@ Project [extract(doy, c#x) AS extract(doy FROM c)#x, extract(doy, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -267,7 +267,7 @@ Project [extract(hour, c#x) AS extract(hour FROM c)#x, extract(hour, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -278,7 +278,7 @@ Project [extract(h, c#x) AS extract(h FROM c)#x, extract(h, ntz#x) AS extract(h +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -289,7 +289,7 @@ Project [extract(hours, c#x) AS extract(hours FROM c)#x, extract(hours, ntz#x) A +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -300,7 +300,7 @@ Project [extract(hr, c#x) AS extract(hr FROM c)#x, extract(hr, ntz#x) AS extract +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -311,7 +311,7 @@ Project [extract(hrs, c#x) AS extract(hrs FROM c)#x, extract(hrs, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -322,7 +322,7 @@ Project [extract(minute, c#x) AS extract(minute FROM c)#x, extract(minute, ntz#x +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -333,7 +333,7 @@ Project [extract(m, c#x) AS extract(m FROM c)#x, extract(m, ntz#x) AS extract(m +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -344,7 +344,7 @@ Project [extract(min, c#x) AS extract(min FROM c)#x, extract(min, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -355,7 +355,7 @@ Project [extract(mins, c#x) AS extract(mins FROM c)#x, extract(mins, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -366,7 +366,7 @@ Project [extract(minutes, c#x) AS extract(minutes FROM c)#x, extract(minutes, nt +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -377,7 +377,7 @@ Project [extract(second, c#x) AS extract(second FROM c)#x, extract(second, ntz#x +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -388,7 +388,7 @@ Project [extract(s, c#x) AS extract(s FROM c)#x, extract(s, ntz#x) AS extract(s +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -399,7 +399,7 @@ Project [extract(sec, c#x) AS extract(sec FROM c)#x, extract(sec, ntz#x) AS extr +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -410,7 +410,7 @@ Project [extract(seconds, c#x) AS extract(seconds FROM c)#x, extract(seconds, nt +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -421,7 +421,7 @@ Project [extract(secs, c#x) AS extract(secs FROM c)#x, extract(secs, ntz#x) AS e +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -495,7 +495,7 @@ Project [date_part(year, c#x) AS date_part(year, c)#x, date_part(year, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -506,7 +506,7 @@ Project [date_part(y, c#x) AS date_part(y, c)#x, date_part(y, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -517,7 +517,7 @@ Project [date_part(years, c#x) AS date_part(years, c)#x, date_part(years, ntz#x) +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -528,7 +528,7 @@ Project [date_part(yr, c#x) AS date_part(yr, c)#x, date_part(yr, ntz#x) AS date_ +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -539,7 +539,7 @@ Project [date_part(yrs, c#x) AS date_part(yrs, c)#x, date_part(yrs, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -550,7 +550,7 @@ Project [date_part(yearofweek, c#x) AS date_part(yearofweek, c)#x, date_part(yea +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -561,7 +561,7 @@ Project [date_part(quarter, c#x) AS date_part(quarter, c)#x, date_part(quarter, +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -572,7 +572,7 @@ Project [date_part(qtr, c#x) AS date_part(qtr, c)#x, date_part(qtr, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -583,7 +583,7 @@ Project [date_part(month, c#x) AS date_part(month, c)#x, date_part(month, ntz#x) +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -594,7 +594,7 @@ Project [date_part(mon, c#x) AS date_part(mon, c)#x, date_part(mon, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -605,7 +605,7 @@ Project [date_part(mons, c#x) AS date_part(mons, c)#x, date_part(mons, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -616,7 +616,7 @@ Project [date_part(months, c#x) AS date_part(months, c)#x, date_part(months, ntz +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -627,7 +627,7 @@ Project [date_part(week, c#x) AS date_part(week, c)#x, date_part(week, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -638,7 +638,7 @@ Project [date_part(w, c#x) AS date_part(w, c)#x, date_part(w, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -649,7 +649,7 @@ Project [date_part(weeks, c#x) AS date_part(weeks, c)#x, date_part(weeks, ntz#x) +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -660,7 +660,7 @@ Project [date_part(day, c#x) AS date_part(day, c)#x, date_part(day, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -671,7 +671,7 @@ Project [date_part(d, c#x) AS date_part(d, c)#x, date_part(d, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -682,7 +682,7 @@ Project [date_part(days, c#x) AS date_part(days, c)#x, date_part(days, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -693,7 +693,7 @@ Project [date_part(dayofweek, c#x) AS date_part(dayofweek, c)#x, date_part(dayof +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -704,7 +704,7 @@ Project [date_part(dow, c#x) AS date_part(dow, c)#x, date_part(dow, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -715,7 +715,7 @@ Project [date_part(dayofweek_iso, c#x) AS date_part(dayofweek_iso, c)#x, date_pa +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -726,7 +726,7 @@ Project [date_part(dow_iso, c#x) AS date_part(dow_iso, c)#x, date_part(dow_iso, +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -737,7 +737,7 @@ Project [date_part(doy, c#x) AS date_part(doy, c)#x, date_part(doy, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -748,7 +748,7 @@ Project [date_part(hour, c#x) AS date_part(hour, c)#x, date_part(hour, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -759,7 +759,7 @@ Project [date_part(h, c#x) AS date_part(h, c)#x, date_part(h, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -770,7 +770,7 @@ Project [date_part(hours, c#x) AS date_part(hours, c)#x, date_part(hours, ntz#x) +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -781,7 +781,7 @@ Project [date_part(hr, c#x) AS date_part(hr, c)#x, date_part(hr, ntz#x) AS date_ +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -792,7 +792,7 @@ Project [date_part(hrs, c#x) AS date_part(hrs, c)#x, date_part(hrs, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -803,7 +803,7 @@ Project [date_part(minute, c#x) AS date_part(minute, c)#x, date_part(minute, ntz +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -814,7 +814,7 @@ Project [date_part(m, c#x) AS date_part(m, c)#x, date_part(m, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -825,7 +825,7 @@ Project [date_part(min, c#x) AS date_part(min, c)#x, date_part(min, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -836,7 +836,7 @@ Project [date_part(mins, c#x) AS date_part(mins, c)#x, date_part(mins, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -847,7 +847,7 @@ Project [date_part(minutes, c#x) AS date_part(minutes, c)#x, date_part(minutes, +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -858,7 +858,7 @@ Project [date_part(second, c#x) AS date_part(second, c)#x, date_part(second, ntz +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -869,7 +869,7 @@ Project [date_part(s, c#x) AS date_part(s, c)#x, date_part(s, ntz#x) AS date_par +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -880,7 +880,7 @@ Project [date_part(sec, c#x) AS date_part(sec, c)#x, date_part(sec, ntz#x) AS da +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -891,7 +891,7 @@ Project [date_part(seconds, c#x) AS date_part(seconds, c)#x, date_part(seconds, +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -902,7 +902,7 @@ Project [date_part(secs, c#x) AS date_part(secs, c)#x, date_part(secs, ntz#x) AS +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -956,7 +956,7 @@ Project [date_part(null, c#x) AS date_part(NULL, c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -989,7 +989,7 @@ Project [date_part(null, i#x) AS date_part(NULL, i)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1000,7 +1000,7 @@ Project [extract(year, c#x) AS extract(year FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1011,7 +1011,7 @@ Project [extract(quarter, c#x) AS extract(quarter FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1022,7 +1022,7 @@ Project [extract(month, c#x) AS extract(month FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1033,7 +1033,7 @@ Project [extract(week, c#x) AS extract(week FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1044,7 +1044,7 @@ Project [extract(day, c#x) AS extract(day FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1055,7 +1055,7 @@ Project [extract(days, c#x) AS extract(days FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1066,7 +1066,7 @@ Project [extract(dayofweek, c#x) AS extract(dayofweek FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1077,7 +1077,7 @@ Project [extract(dow, c#x) AS extract(dow FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1088,7 +1088,7 @@ Project [extract(doy, c#x) AS extract(doy FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1099,7 +1099,7 @@ Project [extract(hour, c#x) AS extract(hour FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1110,7 +1110,7 @@ Project [extract(minute, c#x) AS extract(minute FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1121,7 +1121,7 @@ Project [extract(second, c#x) AS extract(second FROM c)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1132,7 +1132,7 @@ Project [cast(c#x - j#x as string) AS c - j#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1143,7 +1143,7 @@ Project [day(cast(cast(c#x - j#x as string) as date)) AS day(c - j)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1154,29 +1154,29 @@ Project [extract(day, cast(c#x - j#x as string)) AS extract(day FROM c - j)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation -- !query select extract(month from to_timestamp(c) - i) from t -- !query analysis -Project [extract(month, to_timestamp(c#x, None, TimestampType, Some(America/Los_Angeles), false) - i#x) AS extract(month FROM to_timestamp(c) - i)#x] +Project [extract(month, to_timestamp(c#x, None, TimestampType, Some(America/Los_Angeles), true) - i#x) AS extract(month FROM to_timestamp(c) - i)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation -- !query select extract(second from to_timestamp(c) - j) from t -- !query analysis -Project [extract(second, cast(to_timestamp(c#x, None, TimestampType, Some(America/Los_Angeles), false) - j#x as timestamp)) AS extract(second FROM to_timestamp(c) - j)#x] +Project [extract(second, cast(to_timestamp(c#x, None, TimestampType, Some(America/Los_Angeles), true) - j#x as timestamp)) AS extract(second FROM to_timestamp(c) - j)#x] +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation @@ -1432,7 +1432,7 @@ Project [datepart(year, c#x) AS datepart(year FROM c)#x, datepart(year, ntz#x) A +- SubqueryAlias t +- View (`t`, [c#x, ntz#x, i#x, j#x]) +- Project [cast(c#x as string) AS c#x, cast(ntz#x as timestamp_ntz) AS ntz#x, cast(i#x as interval year to month) AS i#x, cast(j#x as interval day to second) AS j#x] - +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), false) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] + +- Project [2011-05-06 07:08:09.1234567 AS c#x, to_timestamp_ntz(2011-05-06 07:08:09.1234567, None, TimestampNTZType, Some(America/Los_Angeles), true) AS ntz#x, INTERVAL '11-8' YEAR TO MONTH AS i#x, INTERVAL '31 16:50:06.789' DAY TO SECOND AS j#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-all-mosha.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-all-mosha.sql.out index da3f3de3fb448..b7dd089ba86a8 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-all-mosha.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-all-mosha.sql.out @@ -60,7 +60,7 @@ SELECT i + 1, f / i, substring(s, 2, 3), extract(year from t), d / 2, size(a) FR GROUP BY ALL ORDER BY 1, 3, 4, 5, 6, 2 -- !query analysis Sort [(i + 1)#x ASC NULLS FIRST, substring(s, 2, 3)#x ASC NULLS FIRST, extract(year FROM t)#x ASC NULLS FIRST, (d / 2)#x ASC NULLS FIRST, size(a)#x ASC NULLS FIRST, (f / i)#x ASC NULLS FIRST], true -+- Aggregate [(i#x + 1), (f#x / cast(i#x as decimal(10,0))), substring(s#x, 2, 3), extract(year, t#x), (cast(d#x as double) / cast(2 as double)), size(a#x, true)], [(i#x + 1) AS (i + 1)#x, (f#x / cast(i#x as decimal(10,0))) AS (f / i)#x, substring(s#x, 2, 3) AS substring(s, 2, 3)#x, extract(year, t#x) AS extract(year FROM t)#x, (cast(d#x as double) / cast(2 as double)) AS (d / 2)#x, size(a#x, true) AS size(a)#x] ++- Aggregate [(i#x + 1), (f#x / cast(i#x as decimal(10,0))), substring(s#x, 2, 3), extract(year, t#x), (cast(cast(d#x as bigint) as double) / cast(cast(2 as bigint) as double)), size(a#x, false)], [(i#x + 1) AS (i + 1)#x, (f#x / cast(i#x as decimal(10,0))) AS (f / i)#x, substring(s#x, 2, 3) AS substring(s, 2, 3)#x, extract(year, t#x) AS extract(year FROM t)#x, (cast(cast(d#x as bigint) as double) / cast(cast(2 as bigint) as double)) AS (d / 2)#x, size(a#x, false) AS size(a)#x] +- SubqueryAlias stuff +- View (`stuff`, [i#x, f#x, s#x, t#x, d#x, a#x]) +- Project [cast(i#x as int) AS i#x, cast(f#x as decimal(6,4)) AS f#x, cast(s#x as string) AS s#x, cast(t#x as string) AS t#x, cast(d#x as string) AS d#x, cast(a#x as array) AS a#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-filter.sql.out index 94d39111b29ed..dd36da7723556 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by-filter.sql.out @@ -119,7 +119,7 @@ SELECT COUNT(id) FILTER (WHERE hiredate = date "2001-01-01") FROM emp -- !query SELECT COUNT(id) FILTER (WHERE hiredate = to_date('2001-01-01 00:00:00')) FROM emp -- !query analysis -Aggregate [count(id#x) FILTER (WHERE (hiredate#x = to_date(2001-01-01 00:00:00, None, Some(America/Los_Angeles), false))) AS count(id) FILTER (WHERE (hiredate = to_date(2001-01-01 00:00:00)))#xL] +Aggregate [count(id#x) FILTER (WHERE (hiredate#x = to_date(2001-01-01 00:00:00, None, Some(America/Los_Angeles), true))) AS count(id) FILTER (WHERE (hiredate = to_date(2001-01-01 00:00:00)))#xL] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -131,7 +131,7 @@ Aggregate [count(id#x) FILTER (WHERE (hiredate#x = to_date(2001-01-01 00:00:00, -- !query SELECT COUNT(id) FILTER (WHERE hiredate = to_timestamp("2001-01-01 00:00:00")) FROM emp -- !query analysis -Aggregate [count(id#x) FILTER (WHERE (cast(hiredate#x as timestamp) = to_timestamp(2001-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), false))) AS count(id) FILTER (WHERE (hiredate = to_timestamp(2001-01-01 00:00:00)))#xL] +Aggregate [count(id#x) FILTER (WHERE (cast(hiredate#x as timestamp) = to_timestamp(2001-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), true))) AS count(id) FILTER (WHERE (hiredate = to_timestamp(2001-01-01 00:00:00)))#xL] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -179,7 +179,7 @@ Aggregate [count(distinct id#x) AS count(DISTINCT id)#xL, count(distinct id#x) F -- !query SELECT COUNT(DISTINCT id) FILTER (WHERE hiredate = to_timestamp("2001-01-01 00:00:00")), COUNT(DISTINCT id) FILTER (WHERE hiredate = to_date('2001-01-01 00:00:00')) FROM emp -- !query analysis -Aggregate [count(distinct id#x) FILTER (WHERE (cast(hiredate#x as timestamp) = to_timestamp(2001-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), false))) AS count(DISTINCT id) FILTER (WHERE (hiredate = to_timestamp(2001-01-01 00:00:00)))#xL, count(distinct id#x) FILTER (WHERE (hiredate#x = to_date(2001-01-01 00:00:00, None, Some(America/Los_Angeles), false))) AS count(DISTINCT id) FILTER (WHERE (hiredate = to_date(2001-01-01 00:00:00)))#xL] +Aggregate [count(distinct id#x) FILTER (WHERE (cast(hiredate#x as timestamp) = to_timestamp(2001-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), true))) AS count(DISTINCT id) FILTER (WHERE (hiredate = to_timestamp(2001-01-01 00:00:00)))#xL, count(distinct id#x) FILTER (WHERE (hiredate#x = to_date(2001-01-01 00:00:00, None, Some(America/Los_Angeles), true))) AS count(DISTINCT id) FILTER (WHERE (hiredate = to_date(2001-01-01 00:00:00)))#xL] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -373,7 +373,7 @@ SELECT dept_id, SUM(salary) FILTER (WHERE hiredate > date "2003-01-01") FROM emp -- !query SELECT dept_id, SUM(salary) FILTER (WHERE hiredate > to_date("2003-01-01")) FROM emp GROUP BY dept_id -- !query analysis -Aggregate [dept_id#x], [dept_id#x, sum(salary#x) FILTER (WHERE (hiredate#x > to_date(2003-01-01, None, Some(America/Los_Angeles), false))) AS sum(salary) FILTER (WHERE (hiredate > to_date(2003-01-01)))#x] +Aggregate [dept_id#x], [dept_id#x, sum(salary#x) FILTER (WHERE (hiredate#x > to_date(2003-01-01, None, Some(America/Los_Angeles), true))) AS sum(salary) FILTER (WHERE (hiredate > to_date(2003-01-01)))#x] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -385,7 +385,7 @@ Aggregate [dept_id#x], [dept_id#x, sum(salary#x) FILTER (WHERE (hiredate#x > to_ -- !query SELECT dept_id, SUM(salary) FILTER (WHERE hiredate > to_timestamp("2003-01-01 00:00:00")) FROM emp GROUP BY dept_id -- !query analysis -Aggregate [dept_id#x], [dept_id#x, sum(salary#x) FILTER (WHERE (cast(hiredate#x as timestamp) > to_timestamp(2003-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), false))) AS sum(salary) FILTER (WHERE (hiredate > to_timestamp(2003-01-01 00:00:00)))#x] +Aggregate [dept_id#x], [dept_id#x, sum(salary#x) FILTER (WHERE (cast(hiredate#x as timestamp) > to_timestamp(2003-01-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), true))) AS sum(salary) FILTER (WHERE (hiredate > to_timestamp(2003-01-01 00:00:00)))#x] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -481,7 +481,7 @@ SELECT 'foo', SUM(salary) FILTER (WHERE hiredate >= date "2003-01-01") FROM emp -- !query SELECT 'foo', SUM(salary) FILTER (WHERE hiredate >= to_date("2003-01-01")) FROM emp GROUP BY 1 -- !query analysis -Aggregate [foo], [foo AS foo#x, sum(salary#x) FILTER (WHERE (hiredate#x >= to_date(2003-01-01, None, Some(America/Los_Angeles), false))) AS sum(salary) FILTER (WHERE (hiredate >= to_date(2003-01-01)))#x] +Aggregate [foo], [foo AS foo#x, sum(salary#x) FILTER (WHERE (hiredate#x >= to_date(2003-01-01, None, Some(America/Los_Angeles), true))) AS sum(salary) FILTER (WHERE (hiredate >= to_date(2003-01-01)))#x] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] @@ -493,7 +493,7 @@ Aggregate [foo], [foo AS foo#x, sum(salary#x) FILTER (WHERE (hiredate#x >= to_da -- !query SELECT 'foo', SUM(salary) FILTER (WHERE hiredate >= to_timestamp("2003-01-01")) FROM emp GROUP BY 1 -- !query analysis -Aggregate [foo], [foo AS foo#x, sum(salary#x) FILTER (WHERE (cast(hiredate#x as timestamp) >= to_timestamp(2003-01-01, None, TimestampType, Some(America/Los_Angeles), false))) AS sum(salary) FILTER (WHERE (hiredate >= to_timestamp(2003-01-01)))#x] +Aggregate [foo], [foo AS foo#x, sum(salary#x) FILTER (WHERE (cast(hiredate#x as timestamp) >= to_timestamp(2003-01-01, None, TimestampType, Some(America/Los_Angeles), true))) AS sum(salary) FILTER (WHERE (hiredate >= to_timestamp(2003-01-01)))#x] +- SubqueryAlias emp +- View (`EMP`, [id#x, emp_name#x, hiredate#x, salary#x, dept_id#x]) +- Project [cast(id#x as int) AS id#x, cast(emp_name#x as string) AS emp_name#x, cast(hiredate#x as date) AS hiredate#x, cast(salary#x as double) AS salary#x, cast(dept_id#x as int) AS dept_id#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out index 8849aa4452252..34ff2a2186f0b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out @@ -700,25 +700,8 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT every("true") -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"true\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"BOOLEAN\"", - "sqlExpr" : "\"every(true)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "every(\"true\")" - } ] -} +Aggregate [every(cast(true as boolean)) AS every(true)#x] ++- OneRowRelation -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/higher-order-functions.sql.out index 1281b19eb2f86..c06d1e5534aed 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/higher-order-functions.sql.out @@ -171,7 +171,7 @@ Project [aggregate(ys#x, named_struct(sum, 0, n, 0), lambdafunction(named_struct -- !query select transform(zs, z -> aggregate(z, 1, (acc, val) -> acc * val * size(z))) as v from nested -- !query analysis -Project [transform(zs#x, lambdafunction(aggregate(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, true)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +Project [transform(zs#x, lambdafunction(aggregate(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, false)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] @@ -211,7 +211,7 @@ Project [reduce(ys#x, named_struct(sum, 0, n, 0), lambdafunction(named_struct(co -- !query select transform(zs, z -> reduce(z, 1, (acc, val) -> acc * val * size(z))) as v from nested -- !query analysis -Project [transform(zs#x, lambdafunction(reduce(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, true)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +Project [transform(zs#x, lambdafunction(reduce(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, false)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] @@ -247,7 +247,7 @@ Project [exists(cast(null as array), lambdafunction((lambda y#x > 30), lamb -- !query select zip_with(ys, zs, (a, b) -> a + size(b)) as v from nested -- !query analysis -Project [zip_with(ys#x, zs#x, lambdafunction((lambda a#x + size(lambda b#x, true)), lambda a#x, lambda b#x, false)) AS v#x] +Project [zip_with(ys#x, zs#x, lambdafunction((lambda a#x + size(lambda b#x, false)), lambda a#x, lambda b#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out index c0196bbe118ef..c023e3b56f117 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out @@ -605,63 +605,63 @@ Project [INTERVAL '59' SECOND AS INTERVAL '59' SECOND#x] -- !query select make_interval(1) -- !query analysis -Project [make_interval(1, 0, 0, 0, 0, 0, 0.000000, false) AS make_interval(1, 0, 0, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 0, 0, 0, 0, 0, 0.000000, true) AS make_interval(1, 0, 0, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2) -- !query analysis -Project [make_interval(1, 2, 0, 0, 0, 0, 0.000000, false) AS make_interval(1, 2, 0, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 0, 0, 0, 0, 0.000000, true) AS make_interval(1, 2, 0, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3) -- !query analysis -Project [make_interval(1, 2, 3, 0, 0, 0, 0.000000, false) AS make_interval(1, 2, 3, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 0, 0, 0, 0.000000, true) AS make_interval(1, 2, 3, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4) -- !query analysis -Project [make_interval(1, 2, 3, 4, 0, 0, 0.000000, false) AS make_interval(1, 2, 3, 4, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 0, 0, 0.000000, true) AS make_interval(1, 2, 3, 4, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 0, 0.000000, false) AS make_interval(1, 2, 3, 4, 5, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 5, 0, 0.000000, true) AS make_interval(1, 2, 3, 4, 5, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5, 6) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 6, 0.000000, false) AS make_interval(1, 2, 3, 4, 5, 6, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 5, 6, 0.000000, true) AS make_interval(1, 2, 3, 4, 5, 6, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5, 6, 7.008009) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 6, cast(7.008009 as decimal(18,6)), false) AS make_interval(1, 2, 3, 4, 5, 6, 7.008009)#x] +Project [make_interval(1, 2, 3, 4, 5, 6, cast(7.008009 as decimal(18,6)), true) AS make_interval(1, 2, 3, 4, 5, 6, 7.008009)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456) -- !query analysis -Project [make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456, false) AS make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456)#x] +Project [make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456, true) AS make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456)#x] +- OneRowRelation -- !query select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) -- !query analysis -Project [make_interval(0, 0, 0, 0, 0, 0, cast(1234567890123456789 as decimal(18,6)), false) AS make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)#x] +Project [make_interval(0, 0, 0, 0, 0, 0, cast(1234567890123456789 as decimal(18,6)), true) AS make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out index fef9d0c5b6250..842b190c5a753 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out @@ -9,7 +9,7 @@ Project [to_json(named_struct(a, 1, b, 2), Some(America/Los_Angeles)) AS to_json -- !query select to_json(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')) -- !query analysis -Project [to_json((timestampFormat,dd/MM/yyyy), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false)), Some(America/Los_Angeles)) AS to_json(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +Project [to_json((timestampFormat,dd/MM/yyyy), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true)), Some(America/Los_Angeles)) AS to_json(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/map.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/map.sql.out index cd8f0e043b9ae..177f73608fba9 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/map.sql.out @@ -2,7 +2,7 @@ -- !query select element_at(map(1, 'a', 2, 'b'), 5) -- !query analysis -Project [element_at(map(1, a, 2, b), 5, None, false) AS element_at(map(1, a, 2, b), 5)#x] +Project [element_at(map(1, a, 2, b), 5, None, true) AS element_at(map(1, a, 2, b), 5)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/math.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/math.sql.out index 5fe1b69352f57..1fa7b7513993d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/math.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/math.sql.out @@ -394,42 +394,42 @@ Project [bround(-9223372036854775808, -1) AS bround(-9223372036854775808, -1)#xL -- !query SELECT conv('100', 2, 10) -- !query analysis -Project [conv(100, 2, 10, false) AS conv(100, 2, 10)#x] +Project [conv(100, 2, 10, true) AS conv(100, 2, 10)#x] +- OneRowRelation -- !query SELECT conv(-10, 16, -10) -- !query analysis -Project [conv(cast(-10 as string), 16, -10, false) AS conv(-10, 16, -10)#x] +Project [conv(cast(-10 as string), 16, -10, true) AS conv(-10, 16, -10)#x] +- OneRowRelation -- !query SELECT conv('9223372036854775808', 10, 16) -- !query analysis -Project [conv(9223372036854775808, 10, 16, false) AS conv(9223372036854775808, 10, 16)#x] +Project [conv(9223372036854775808, 10, 16, true) AS conv(9223372036854775808, 10, 16)#x] +- OneRowRelation -- !query SELECT conv('92233720368547758070', 10, 16) -- !query analysis -Project [conv(92233720368547758070, 10, 16, false) AS conv(92233720368547758070, 10, 16)#x] +Project [conv(92233720368547758070, 10, 16, true) AS conv(92233720368547758070, 10, 16)#x] +- OneRowRelation -- !query SELECT conv('9223372036854775807', 36, 10) -- !query analysis -Project [conv(9223372036854775807, 36, 10, false) AS conv(9223372036854775807, 36, 10)#x] +Project [conv(9223372036854775807, 36, 10, true) AS conv(9223372036854775807, 36, 10)#x] +- OneRowRelation -- !query SELECT conv('-9223372036854775807', 36, 10) -- !query analysis -Project [conv(-9223372036854775807, 36, 10, false) AS conv(-9223372036854775807, 36, 10)#x] +Project [conv(-9223372036854775807, 36, 10, true) AS conv(-9223372036854775807, 36, 10)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/array.sql.out similarity index 93% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/array.sql.out index 53595d1b8a3eb..4db56d6c70561 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/array.sql.out @@ -212,7 +212,7 @@ select size(timestamp_array) from primitive_arrays -- !query analysis -Project [size(boolean_array#x, false) AS size(boolean_array)#x, size(tinyint_array#x, false) AS size(tinyint_array)#x, size(smallint_array#x, false) AS size(smallint_array)#x, size(int_array#x, false) AS size(int_array)#x, size(bigint_array#x, false) AS size(bigint_array)#x, size(decimal_array#x, false) AS size(decimal_array)#x, size(double_array#x, false) AS size(double_array)#x, size(float_array#x, false) AS size(float_array)#x, size(date_array#x, false) AS size(date_array)#x, size(timestamp_array#x, false) AS size(timestamp_array)#x] +Project [size(boolean_array#x, true) AS size(boolean_array)#x, size(tinyint_array#x, true) AS size(tinyint_array)#x, size(smallint_array#x, true) AS size(smallint_array)#x, size(int_array#x, true) AS size(int_array)#x, size(bigint_array#x, true) AS size(bigint_array)#x, size(decimal_array#x, true) AS size(decimal_array)#x, size(double_array#x, true) AS size(double_array)#x, size(float_array#x, true) AS size(float_array)#x, size(date_array#x, true) AS size(date_array)#x, size(timestamp_array#x, true) AS size(timestamp_array)#x] +- SubqueryAlias primitive_arrays +- View (`primitive_arrays`, [boolean_array#x, tinyint_array#x, smallint_array#x, int_array#x, bigint_array#x, decimal_array#x, double_array#x, float_array#x, date_array#x, timestamp_array#x]) +- Project [cast(boolean_array#x as array) AS boolean_array#x, cast(tinyint_array#x as array) AS tinyint_array#x, cast(smallint_array#x as array) AS smallint_array#x, cast(int_array#x as array) AS int_array#x, cast(bigint_array#x as array) AS bigint_array#x, cast(decimal_array#x as array) AS decimal_array#x, cast(double_array#x as array) AS double_array#x, cast(float_array#x as array) AS float_array#x, cast(date_array#x as array) AS date_array#x, cast(timestamp_array#x as array) AS timestamp_array#x] @@ -224,70 +224,70 @@ Project [size(boolean_array#x, false) AS size(boolean_array)#x, size(tinyint_arr -- !query select element_at(array(1, 2, 3), 5) -- !query analysis -Project [element_at(array(1, 2, 3), 5, None, true) AS element_at(array(1, 2, 3), 5)#x] +Project [element_at(array(1, 2, 3), 5, None, false) AS element_at(array(1, 2, 3), 5)#x] +- OneRowRelation -- !query select element_at(array(1, 2, 3), -5) -- !query analysis -Project [element_at(array(1, 2, 3), -5, None, true) AS element_at(array(1, 2, 3), -5)#x] +Project [element_at(array(1, 2, 3), -5, None, false) AS element_at(array(1, 2, 3), -5)#x] +- OneRowRelation -- !query select element_at(array(1, 2, 3), 0) -- !query analysis -Project [element_at(array(1, 2, 3), 0, None, true) AS element_at(array(1, 2, 3), 0)#x] +Project [element_at(array(1, 2, 3), 0, None, false) AS element_at(array(1, 2, 3), 0)#x] +- OneRowRelation -- !query select elt(4, '123', '456') -- !query analysis -Project [elt(4, 123, 456, true) AS elt(4, 123, 456)#x] +Project [elt(4, 123, 456, false) AS elt(4, 123, 456)#x] +- OneRowRelation -- !query select elt(0, '123', '456') -- !query analysis -Project [elt(0, 123, 456, true) AS elt(0, 123, 456)#x] +Project [elt(0, 123, 456, false) AS elt(0, 123, 456)#x] +- OneRowRelation -- !query select elt(-1, '123', '456') -- !query analysis -Project [elt(-1, 123, 456, true) AS elt(-1, 123, 456)#x] +Project [elt(-1, 123, 456, false) AS elt(-1, 123, 456)#x] +- OneRowRelation -- !query select elt(null, '123', '456') -- !query analysis -Project [elt(cast(null as int), 123, 456, true) AS elt(NULL, 123, 456)#x] +Project [elt(cast(null as int), 123, 456, false) AS elt(NULL, 123, 456)#x] +- OneRowRelation -- !query select elt(null, '123', null) -- !query analysis -Project [elt(cast(null as int), 123, cast(null as string), true) AS elt(NULL, 123, NULL)#x] +Project [elt(cast(null as int), 123, cast(null as string), false) AS elt(NULL, 123, NULL)#x] +- OneRowRelation -- !query select elt(1, '123', null) -- !query analysis -Project [elt(1, 123, cast(null as string), true) AS elt(1, 123, NULL)#x] +Project [elt(1, 123, cast(null as string), false) AS elt(1, 123, NULL)#x] +- OneRowRelation -- !query select elt(2, '123', null) -- !query analysis -Project [elt(2, 123, cast(null as string), true) AS elt(2, 123, NULL)#x] +Project [elt(2, 123, cast(null as string), false) AS elt(2, 123, NULL)#x] +- OneRowRelation @@ -360,21 +360,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2), false) AS size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2), true) AS size(arrays_zip(array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +- OneRowRelation -- !query select size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2, 3), false) AS size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10), 0, 1, 2, 3), true) AS size(arrays_zip(array(), array(1, 2, 3), array(4), array(7, 8, 9, 10)))#x] +- OneRowRelation -- !query select size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10))) -- !query analysis -Project [size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10), 0, 1, 2, 3), false) AS size(arrays_zip(array(1, 2, 3), array(4), NULL, array(7, 8, 9, 10)))#x] +Project [size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10), 0, 1, 2, 3), true) AS size(arrays_zip(array(1, 2, 3), array(4), NULL, array(7, 8, 9, 10)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/cast.sql.out similarity index 82% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/cast.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/cast.sql.out index 643dfd3771ffe..e0687b564d3d1 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/cast.sql.out @@ -205,193 +205,57 @@ Project [hex(cast(abc as binary)) AS hex(CAST(abc AS BINARY))#x] -- !query SELECT HEX(CAST(CAST(123 AS byte) AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 44, - "fragment" : "CAST(CAST(123 AS byte) AS binary)" - } ] -} +Project [hex(cast(cast(123 as tinyint) as binary)) AS hex(CAST(CAST(123 AS TINYINT) AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(-123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 45, - "fragment" : "CAST(CAST(-123 AS byte) AS binary)" - } ] -} +Project [hex(cast(cast(-123 as tinyint) as binary)) AS hex(CAST(CAST(-123 AS TINYINT) AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(123S AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"SMALLINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(123S AS binary)" - } ] -} +Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(-123S AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"SMALLINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 32, - "fragment" : "CAST(-123S AS binary)" - } ] -} +Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(123 AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"INT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 30, - "fragment" : "CAST(123 AS binary)" - } ] -} +Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(-123 AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"INT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(-123 AS binary)" - } ] -} +Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(123L AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"BIGINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(123L AS binary)" - } ] -} +Project [hex(cast(123 as binary)) AS hex(CAST(123 AS BINARY))#x] ++- OneRowRelation -- !query SELECT HEX(CAST(-123L AS binary)) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"BIGINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 32, - "fragment" : "CAST(-123L AS binary)" - } ] -} +Project [hex(cast(-123 as binary)) AS hex(CAST(-123 AS BINARY))#x] ++- OneRowRelation -- !query @@ -940,25 +804,8 @@ Project [hex(cast(abc as binary)) AS hex(CAST(abc AS BINARY))#x] -- !query SELECT HEX((123 :: byte) :: binary) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 34, - "fragment" : "(123 :: byte) :: binary" - } ] -} +Project [hex(cast(cast(123 as tinyint) as binary)) AS hex(CAST(CAST(123 AS TINYINT) AS BINARY))#x] ++- OneRowRelation -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/conditional-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/conditional-functions.sql.out similarity index 99% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/conditional-functions.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/conditional-functions.sql.out index 5effa73c413a6..7df6556cddd4e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/conditional-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/conditional-functions.sql.out @@ -131,7 +131,7 @@ Project [zeroifnull(null) AS zeroifnull(NULL)#x, zeroifnull(1) AS zeroifnull(1)# -- !query SELECT zeroifnull('abc') -- !query analysis -Project [zeroifnull(abc) AS zeroifnull(abc)#xL] +Project [zeroifnull(abc) AS zeroifnull(abc)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/date.sql.out similarity index 84% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/date.sql.out index 0e4d2d4e99e26..88c7d7b4e7d72 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/date.sql.out @@ -37,21 +37,21 @@ org.apache.spark.sql.catalyst.parser.ParseException -- !query select make_date(2019, 1, 1), make_date(12, 12, 12) -- !query analysis -Project [make_date(2019, 1, 1, true) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, true) AS make_date(12, 12, 12)#x] +Project [make_date(2019, 1, 1, false) AS make_date(2019, 1, 1)#x, make_date(12, 12, 12, false) AS make_date(12, 12, 12)#x] +- OneRowRelation -- !query select make_date(2000, 13, 1) -- !query analysis -Project [make_date(2000, 13, 1, true) AS make_date(2000, 13, 1)#x] +Project [make_date(2000, 13, 1, false) AS make_date(2000, 13, 1)#x] +- OneRowRelation -- !query select make_date(2000, 1, 33) -- !query analysis -Project [make_date(2000, 1, 33, true) AS make_date(2000, 1, 33)#x] +Project [make_date(2000, 1, 33, false) AS make_date(2000, 1, 33)#x] +- OneRowRelation @@ -148,21 +148,21 @@ select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(n -- !query select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_date(cast(null as string), None, Some(America/Los_Angeles), true) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), true) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), true) AS to_date(2016-12-31, yyyy-MM-dd)#x] +Project [to_date(cast(null as string), None, Some(America/Los_Angeles), false) AS to_date(NULL)#x, to_date(2016-12-31, None, Some(America/Los_Angeles), false) AS to_date(2016-12-31)#x, to_date(2016-12-31, Some(yyyy-MM-dd), Some(America/Los_Angeles), false) AS to_date(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_date("16", "dd") -- !query analysis -Project [to_date(16, Some(dd), Some(America/Los_Angeles), true) AS to_date(16, dd)#x] +Project [to_date(16, Some(dd), Some(America/Los_Angeles), false) AS to_date(16, dd)#x] +- OneRowRelation -- !query select to_date("02-29", "MM-dd") -- !query analysis -Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), true) AS to_date(02-29, MM-dd)#x] +Project [to_date(02-29, Some(MM-dd), Some(America/Los_Angeles), false) AS to_date(02-29, MM-dd)#x] +- OneRowRelation @@ -201,21 +201,21 @@ select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(time -- !query select next_day("2015-07-23", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 as date), Mon, true) AS next_day(2015-07-23, Mon)#x] +Project [next_day(cast(2015-07-23 as date), Mon, false) AS next_day(2015-07-23, Mon)#x] +- OneRowRelation -- !query select next_day("2015-07-23", "xx") -- !query analysis -Project [next_day(cast(2015-07-23 as date), xx, true) AS next_day(2015-07-23, xx)#x] +Project [next_day(cast(2015-07-23 as date), xx, false) AS next_day(2015-07-23, xx)#x] +- OneRowRelation -- !query select next_day("2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(2015-07-23 12:12:12, Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(2015-07-23 12:12:12, Mon)#x] +- OneRowRelation @@ -228,28 +228,28 @@ select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon") -- !query select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon") -- !query analysis -Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, true) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +Project [next_day(cast(2015-07-23 12:12:12 as date), Mon, false) AS next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon)#x] +- OneRowRelation -- !query select next_day("xx", "Mon") -- !query analysis -Project [next_day(cast(xx as date), Mon, true) AS next_day(xx, Mon)#x] +Project [next_day(cast(xx as date), Mon, false) AS next_day(xx, Mon)#x] +- OneRowRelation -- !query select next_day(null, "Mon") -- !query analysis -Project [next_day(cast(null as date), Mon, true) AS next_day(NULL, Mon)#x] +Project [next_day(cast(null as date), Mon, false) AS next_day(NULL, Mon)#x] +- OneRowRelation -- !query select next_day(null, "xx") -- !query analysis -Project [next_day(cast(null as date), xx, true) AS next_day(NULL, xx)#x] +Project [next_day(cast(null as date), xx, false) AS next_day(NULL, xx)#x] +- OneRowRelation @@ -355,15 +355,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select date_add('2011-11-11', '1') -- !query analysis -Project [date_add(cast(2011-11-11 as date), cast(1 as int)) AS date_add(2011-11-11, 1)#x] +Project [date_add(cast(2011-11-11 as date), 1) AS date_add(2011-11-11, 1)#x] +- OneRowRelation -- !query select date_add('2011-11-11', '1.2') -- !query analysis -Project [date_add(cast(2011-11-11 as date), cast(1.2 as int)) AS date_add(2011-11-11, 1.2)#x] -+- OneRowRelation +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "date_add" + } +} -- !query @@ -499,7 +505,14 @@ select date_sub(date'2011-11-11', '1') -- !query select date_sub(date'2011-11-11', '1.2') -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "date_sub" + } +} -- !query @@ -530,23 +543,49 @@ Project [date_sub(cast(2011-11-11 12:12:12 as date), 1) AS date_sub(TIMESTAMP_NT -- !query select date_add('2011-11-11', int_str) from date_view -- !query analysis -Project [date_add(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_add(2011-11-11, int_str)#x] -+- SubqueryAlias date_view - +- View (`date_view`, [date_str#x, int_str#x]) - +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] - +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"int_str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "date_add('2011-11-11', int_str)" + } ] +} -- !query select date_sub('2011-11-11', int_str) from date_view -- !query analysis -Project [date_sub(cast(2011-11-11 as date), cast(int_str#x as int)) AS date_sub(2011-11-11, int_str)#x] -+- SubqueryAlias date_view - +- View (`date_view`, [date_str#x, int_str#x]) - +- Project [cast(date_str#x as string) AS date_str#x, cast(int_str#x as string) AS int_str#x] - +- Project [2011-11-11 AS date_str#x, 1 AS int_str#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"int_str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "date_sub('2011-11-11', int_str)" + } ] +} -- !query @@ -622,7 +661,25 @@ select date '2001-10-01' - date '2001-09-28' -- !query select date '2001-10-01' - '2001-09-28' -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2001-09-28\"", + "inputType" : "\"DOUBLE\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "date '2001-10-01' - '2001-09-28'" + } ] +} -- !query @@ -652,7 +709,25 @@ select date_str - date '2001-09-28' from date_view -- !query select date '2001-09-28' - date_str from date_view -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"date_str\"", + "inputType" : "\"DOUBLE\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "date '2001-09-28' - date_str" + } ] +} -- !query @@ -664,7 +739,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DATE\"", + "inputType" : "\"DOUBLE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -687,11 +762,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"DATE '2011-11-11'\"", - "inputType" : "\"DATE\"", + "inputSql" : "\"1\"", + "inputType" : "\"DOUBLE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" + "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, "queryContext" : [ { "objectType" : "", @@ -729,7 +804,7 @@ select date '2012-01-01' - interval '2-2' year to month, -- !query select to_date('26/October/2015', 'dd/MMMMM/yyyy') -- !query analysis -Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), true) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +Project [to_date(26/October/2015, Some(dd/MMMMM/yyyy), Some(America/Los_Angeles), false) AS to_date(26/October/2015, dd/MMMMM/yyyy)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-parsing-invalid.sql.out similarity index 71% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-parsing-invalid.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-parsing-invalid.sql.out index 74146ab17a4d4..ad4a2feb9661c 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-parsing-invalid.sql.out @@ -2,126 +2,126 @@ -- !query select to_timestamp('294248', 'y') -- !query analysis -Project [to_timestamp(294248, Some(y), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(294248, y)#x] +Project [to_timestamp(294248, Some(y), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(294248, y)#x] +- OneRowRelation -- !query select to_timestamp('1', 'yy') -- !query analysis -Project [to_timestamp(1, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, yy)#x] +Project [to_timestamp(1, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, yy)#x] +- OneRowRelation -- !query select to_timestamp('-12', 'yy') -- !query analysis -Project [to_timestamp(-12, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(-12, yy)#x] +Project [to_timestamp(-12, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(-12, yy)#x] +- OneRowRelation -- !query select to_timestamp('123', 'yy') -- !query analysis -Project [to_timestamp(123, Some(yy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(123, yy)#x] +Project [to_timestamp(123, Some(yy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(123, yy)#x] +- OneRowRelation -- !query select to_timestamp('1', 'yyy') -- !query analysis -Project [to_timestamp(1, Some(yyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1, yyy)#x] +Project [to_timestamp(1, Some(yyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1, yyy)#x] +- OneRowRelation -- !query select to_timestamp('1234567', 'yyyyyyy') -- !query analysis -Project [to_timestamp(1234567, Some(yyyyyyy), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1234567, yyyyyyy)#x] +Project [to_timestamp(1234567, Some(yyyyyyy), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1234567, yyyyyyy)#x] +- OneRowRelation -- !query select to_timestamp('366', 'D') -- !query analysis -Project [to_timestamp(366, Some(D), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(366, D)#x] +Project [to_timestamp(366, Some(D), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(366, D)#x] +- OneRowRelation -- !query select to_timestamp('9', 'DD') -- !query analysis -Project [to_timestamp(9, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, DD)#x] +Project [to_timestamp(9, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, DD)#x] +- OneRowRelation -- !query select to_timestamp('366', 'DD') -- !query analysis -Project [to_timestamp(366, Some(DD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(366, DD)#x] +Project [to_timestamp(366, Some(DD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(366, DD)#x] +- OneRowRelation -- !query select to_timestamp('9', 'DDD') -- !query analysis -Project [to_timestamp(9, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(9, DDD)#x] +Project [to_timestamp(9, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(9, DDD)#x] +- OneRowRelation -- !query select to_timestamp('99', 'DDD') -- !query analysis -Project [to_timestamp(99, Some(DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(99, DDD)#x] +Project [to_timestamp(99, Some(DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(99, DDD)#x] +- OneRowRelation -- !query select to_timestamp('30-365', 'dd-DDD') -- !query analysis -Project [to_timestamp(30-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(30-365, dd-DDD)#x] +Project [to_timestamp(30-365, Some(dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(30-365, dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('11-365', 'MM-DDD') -- !query analysis -Project [to_timestamp(11-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(11-365, MM-DDD)#x] +Project [to_timestamp(11-365, Some(MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(11-365, MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2019-366', 'yyyy-DDD') -- !query analysis -Project [to_timestamp(2019-366, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-366, yyyy-DDD)#x] +Project [to_timestamp(2019-366, Some(yyyy-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-366, yyyy-DDD)#x] +- OneRowRelation -- !query select to_timestamp('12-30-365', 'MM-dd-DDD') -- !query analysis -Project [to_timestamp(12-30-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12-30-365, MM-dd-DDD)#x] +Project [to_timestamp(12-30-365, Some(MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12-30-365, MM-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-01-365', 'yyyy-dd-DDD') -- !query analysis -Project [to_timestamp(2020-01-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-01-365, yyyy-dd-DDD)#x] +Project [to_timestamp(2020-01-365, Some(yyyy-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-01-365, yyyy-dd-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-10-350', 'yyyy-MM-DDD') -- !query analysis -Project [to_timestamp(2020-10-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-10-350, yyyy-MM-DDD)#x] +Project [to_timestamp(2020-10-350, Some(yyyy-MM-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-10-350, yyyy-MM-DDD)#x] +- OneRowRelation -- !query select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') -- !query analysis -Project [to_timestamp(2020-11-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-11-31-366, yyyy-MM-dd-DDD)#x] +Project [to_timestamp(2020-11-31-366, Some(yyyy-MM-dd-DDD), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-11-31-366, yyyy-MM-dd-DDD)#x] +- OneRowRelation @@ -135,56 +135,56 @@ Project [from_csv(StructField(date,DateType,true), (dateFormat,yyyy-DDD), 2018-3 -- !query select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_date(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), true) AS to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_date(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), false) AS to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_date(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), true) AS to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_date(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), Some(America/Los_Angeles), false) AS to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_timestamp(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_timestamp(2020-01-27T20:06:11.847, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_timestamp(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +Project [to_timestamp(Unparseable, Some(yyyy-MM-dd HH:mm:ss.SSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#x] +- OneRowRelation -- !query select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation -- !query select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query analysis -Project [to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), true) AS to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +Project [to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS, Some(America/Los_Angeles), false) AS to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-special.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-special.sql.out similarity index 56% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-special.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-special.sql.out index 6768297fd8116..01d1f2c40a4a6 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/datetime-special.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/datetime-special.sql.out @@ -8,7 +8,7 @@ select date'999999-03-18', date'-0001-1-28', date'0015' -- !query select make_date(999999, 3, 18), make_date(-1, 1, 28) -- !query analysis -Project [make_date(999999, 3, 18, true) AS make_date(999999, 3, 18)#x, make_date(-1, 1, 28, true) AS make_date(-1, 1, 28)#x] +Project [make_date(999999, 3, 18, false) AS make_date(999999, 3, 18)#x, make_date(-1, 1, 28, false) AS make_date(-1, 1, 28)#x] +- OneRowRelation @@ -21,5 +21,5 @@ select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timesta -- !query select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0) -- !query analysis -Project [make_timestamp(-1969, 12, 31, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(-1969, 12, 31, 16, 0, 0.0)#x, make_timestamp(-15, 3, 18, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(-15, 3, 18, 16, 0, 0.0)#x, make_timestamp(99999, 3, 18, 12, 3, cast(17.0 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(99999, 3, 18, 12, 3, 17.0)#x] +Project [make_timestamp(-1969, 12, 31, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(-1969, 12, 31, 16, 0, 0.0)#x, make_timestamp(-15, 3, 18, 16, 0, cast(0.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(-15, 3, 18, 16, 0, 0.0)#x, make_timestamp(99999, 3, 18, 12, 3, cast(17.0 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(99999, 3, 18, 12, 3, 17.0)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/decimalArithmeticOperations.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/decimalArithmeticOperations.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/decimalArithmeticOperations.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/double-quoted-identifiers-disabled.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/double-quoted-identifiers-disabled.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/double-quoted-identifiers-enabled.sql.out similarity index 56% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/double-quoted-identifiers-enabled.sql.out index 22dfeac5fd0b6..a02bf525f947d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/double-quoted-identifiers-enabled.sql.out @@ -2,32 +2,27 @@ -- !query SELECT 1 FROM "not_exist" -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 15, - "stopIndex" : 25, - "fragment" : "\"not_exist\"" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } -- !query USE SCHEMA "not_exist" -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "SCHEMA_NOT_FOUND", - "sqlState" : "42704", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "schemaName" : "`spark_catalog`.`not_exist`" + "error" : "'\"not_exist\"'", + "hint" : "" } } @@ -35,122 +30,84 @@ org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException -- !query ALTER TABLE "not_exist" ADD COLUMN not_exist int -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 23, - "fragment" : "\"not_exist\"" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } -- !query ALTER TABLE not_exist ADD COLUMN "not_exist" int -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 21, - "fragment" : "not_exist" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } -- !query SELECT 1 AS "not_exist" FROM not_exist -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 30, - "stopIndex" : 38, - "fragment" : "not_exist" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } -- !query SELECT 1 FROM not_exist AS X("hello") -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 15, - "stopIndex" : 23, - "fragment" : "not_exist" - } ] + "error" : "'\"hello\"'", + "hint" : "" + } } -- !query SELECT "not_exist"() -- !query analysis -org.apache.spark.sql.AnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "UNRESOLVED_ROUTINE", - "sqlState" : "42883", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "routineName" : "`not_exist`", - "searchPath" : "[`system`.`builtin`, `system`.`session`, `spark_catalog`.`default`]" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "\"not_exist\"()" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } -- !query SELECT "not_exist".not_exist() -- !query analysis -org.apache.spark.sql.AnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "UNRESOLVED_ROUTINE", - "sqlState" : "42883", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "routineName" : "`not_exist`.`not_exist`", - "searchPath" : "[`system`.`builtin`, `system`.`session`, `spark_catalog`.`default`]" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "\"not_exist\".not_exist()" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -312,62 +269,29 @@ org.apache.spark.sql.AnalysisException -- !query SELECT "hello" -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION", - "sqlState" : "42703", - "messageParameters" : { - "objectName" : "`hello`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 14, - "fragment" : "\"hello\"" - } ] -} +Project [hello AS hello#x] ++- OneRowRelation -- !query CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1 -- !query analysis -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "PARSE_SYNTAX_ERROR", - "sqlState" : "42601", - "messageParameters" : { - "error" : "'\"hello\"'", - "hint" : "" - } -} +CreateViewCommand `v`, [(c1,Some(hello))], SELECT 1, false, false, LocalTempView, UNSUPPORTED, true + +- Project [1 AS 1#x] + +- OneRowRelation -- !query DROP VIEW v -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchTableException -{ - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", - "messageParameters" : { - "relationName" : "`spark_catalog`.`default`.`v`" - } -} +DropTempViewCommand v -- !query SELECT INTERVAL "1" YEAR -- !query analysis -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "PARSE_SYNTAX_ERROR", - "sqlState" : "42601", - "messageParameters" : { - "error" : "'\"1\"'", - "hint" : "" - } -} +Project [INTERVAL '1' YEAR AS INTERVAL '1' YEAR#x] ++- OneRowRelation -- !query @@ -401,54 +325,69 @@ Project [INTERVAL '1' YEAR AS INTERVAL '1' YEAR#x] -- !query CREATE SCHEMA "myschema" -- !query analysis -CreateNamespace false -+- ResolvedNamespace V2SessionCatalog(spark_catalog), [myschema] +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myschema\"'", + "hint" : "" + } +} -- !query CREATE TEMPORARY VIEW "myview"("c1") AS WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v" -- !query analysis -CreateViewCommand `myview`, [(c1,None)], WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v", false, false, LocalTempView, UNSUPPORTED, true - +- WithCTE - :- CTERelationDef xxxx, false - : +- SubqueryAlias v - : +- Project [1#x AS a#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [a#x] - +- SubqueryAlias v - +- CTERelationRef xxxx, true, [a#x], false +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myview\"'", + "hint" : "" + } +} -- !query SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1") -- !query analysis -Project [a1#x AS a2#x] -+- SubqueryAlias atab - +- Project [c1#x AS a1#x] - +- SubqueryAlias myview - +- View (`myview`, [c1#x]) - +- Project [cast(a#x as int) AS c1#x] - +- WithCTE - :- CTERelationDef xxxx, false - : +- SubqueryAlias v - : +- Project [1#x AS a#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [a#x] - +- SubqueryAlias v - +- CTERelationRef xxxx, true, [a#x], false +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"a2\"'", + "hint" : "" + } +} -- !query DROP TABLE "myview" -- !query analysis -DropTempViewCommand myview +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myview\"'", + "hint" : "" + } +} -- !query DROP SCHEMA "myschema" -- !query analysis -DropNamespace false, false -+- ResolvedNamespace V2SessionCatalog(spark_catalog), [myschema] +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myschema\"'", + "hint" : "" + } +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/higher-order-functions.sql.out similarity index 97% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/higher-order-functions.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/higher-order-functions.sql.out index c06d1e5534aed..1281b19eb2f86 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/higher-order-functions.sql.out @@ -171,7 +171,7 @@ Project [aggregate(ys#x, named_struct(sum, 0, n, 0), lambdafunction(named_struct -- !query select transform(zs, z -> aggregate(z, 1, (acc, val) -> acc * val * size(z))) as v from nested -- !query analysis -Project [transform(zs#x, lambdafunction(aggregate(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, false)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +Project [transform(zs#x, lambdafunction(aggregate(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, true)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] @@ -211,7 +211,7 @@ Project [reduce(ys#x, named_struct(sum, 0, n, 0), lambdafunction(named_struct(co -- !query select transform(zs, z -> reduce(z, 1, (acc, val) -> acc * val * size(z))) as v from nested -- !query analysis -Project [transform(zs#x, lambdafunction(reduce(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, false)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +Project [transform(zs#x, lambdafunction(reduce(lambda z#x, 1, lambdafunction(((lambda acc#x * lambda val#x) * size(lambda z#x, true)), lambda acc#x, lambda val#x, false), lambdafunction(lambda id#x, lambda id#x, false)), lambda z#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] @@ -247,7 +247,7 @@ Project [exists(cast(null as array), lambdafunction((lambda y#x > 30), lamb -- !query select zip_with(ys, zs, (a, b) -> a + size(b)) as v from nested -- !query analysis -Project [zip_with(ys#x, zs#x, lambdafunction((lambda a#x + size(lambda b#x, false)), lambda a#x, lambda b#x, false)) AS v#x] +Project [zip_with(ys#x, zs#x, lambdafunction((lambda a#x + size(lambda b#x, true)), lambda a#x, lambda b#x, false)) AS v#x] +- SubqueryAlias nested +- View (`nested`, [x#x, ys#x, zs#x]) +- Project [cast(x#x as int) AS x#x, cast(ys#x as array) AS ys#x, cast(zs#x as array>) AS zs#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/interval.sql.out similarity index 98% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/interval.sql.out index c023e3b56f117..c0196bbe118ef 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/interval.sql.out @@ -605,63 +605,63 @@ Project [INTERVAL '59' SECOND AS INTERVAL '59' SECOND#x] -- !query select make_interval(1) -- !query analysis -Project [make_interval(1, 0, 0, 0, 0, 0, 0.000000, true) AS make_interval(1, 0, 0, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 0, 0, 0, 0, 0, 0.000000, false) AS make_interval(1, 0, 0, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2) -- !query analysis -Project [make_interval(1, 2, 0, 0, 0, 0, 0.000000, true) AS make_interval(1, 2, 0, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 0, 0, 0, 0, 0.000000, false) AS make_interval(1, 2, 0, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3) -- !query analysis -Project [make_interval(1, 2, 3, 0, 0, 0, 0.000000, true) AS make_interval(1, 2, 3, 0, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 0, 0, 0, 0.000000, false) AS make_interval(1, 2, 3, 0, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4) -- !query analysis -Project [make_interval(1, 2, 3, 4, 0, 0, 0.000000, true) AS make_interval(1, 2, 3, 4, 0, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 0, 0, 0.000000, false) AS make_interval(1, 2, 3, 4, 0, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 0, 0.000000, true) AS make_interval(1, 2, 3, 4, 5, 0, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 5, 0, 0.000000, false) AS make_interval(1, 2, 3, 4, 5, 0, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5, 6) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 6, 0.000000, true) AS make_interval(1, 2, 3, 4, 5, 6, 0.000000)#x] +Project [make_interval(1, 2, 3, 4, 5, 6, 0.000000, false) AS make_interval(1, 2, 3, 4, 5, 6, 0.000000)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 5, 6, 7.008009) -- !query analysis -Project [make_interval(1, 2, 3, 4, 5, 6, cast(7.008009 as decimal(18,6)), true) AS make_interval(1, 2, 3, 4, 5, 6, 7.008009)#x] +Project [make_interval(1, 2, 3, 4, 5, 6, cast(7.008009 as decimal(18,6)), false) AS make_interval(1, 2, 3, 4, 5, 6, 7.008009)#x] +- OneRowRelation -- !query select make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456) -- !query analysis -Project [make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456, true) AS make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456)#x] +Project [make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456, false) AS make_interval(1, 2, 3, 4, 0, 0, 123456789012.123456)#x] +- OneRowRelation -- !query select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) -- !query analysis -Project [make_interval(0, 0, 0, 0, 0, 0, cast(1234567890123456789 as decimal(18,6)), true) AS make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)#x] +Project [make_interval(0, 0, 0, 0, 0, 0, cast(1234567890123456789 as decimal(18,6)), false) AS make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/keywords.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/keywords.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/keywords.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/keywords.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/literals.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/literals.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/map.sql.out similarity index 97% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/map.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/map.sql.out index 177f73608fba9..cd8f0e043b9ae 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/map.sql.out @@ -2,7 +2,7 @@ -- !query select element_at(map(1, 'a', 2, 'b'), 5) -- !query analysis -Project [element_at(map(1, a, 2, b), 5, None, true) AS element_at(map(1, a, 2, b), 5)#x] +Project [element_at(map(1, a, 2, b), 5, None, false) AS element_at(map(1, a, 2, b), 5)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/math.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/math.sql.out similarity index 94% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/math.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/math.sql.out index 1fa7b7513993d..5fe1b69352f57 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/math.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/math.sql.out @@ -394,42 +394,42 @@ Project [bround(-9223372036854775808, -1) AS bround(-9223372036854775808, -1)#xL -- !query SELECT conv('100', 2, 10) -- !query analysis -Project [conv(100, 2, 10, true) AS conv(100, 2, 10)#x] +Project [conv(100, 2, 10, false) AS conv(100, 2, 10)#x] +- OneRowRelation -- !query SELECT conv(-10, 16, -10) -- !query analysis -Project [conv(cast(-10 as string), 16, -10, true) AS conv(-10, 16, -10)#x] +Project [conv(cast(-10 as string), 16, -10, false) AS conv(-10, 16, -10)#x] +- OneRowRelation -- !query SELECT conv('9223372036854775808', 10, 16) -- !query analysis -Project [conv(9223372036854775808, 10, 16, true) AS conv(9223372036854775808, 10, 16)#x] +Project [conv(9223372036854775808, 10, 16, false) AS conv(9223372036854775808, 10, 16)#x] +- OneRowRelation -- !query SELECT conv('92233720368547758070', 10, 16) -- !query analysis -Project [conv(92233720368547758070, 10, 16, true) AS conv(92233720368547758070, 10, 16)#x] +Project [conv(92233720368547758070, 10, 16, false) AS conv(92233720368547758070, 10, 16)#x] +- OneRowRelation -- !query SELECT conv('9223372036854775807', 36, 10) -- !query analysis -Project [conv(9223372036854775807, 36, 10, true) AS conv(9223372036854775807, 36, 10)#x] +Project [conv(9223372036854775807, 36, 10, false) AS conv(9223372036854775807, 36, 10)#x] +- OneRowRelation -- !query SELECT conv('-9223372036854775807', 36, 10) -- !query analysis -Project [conv(-9223372036854775807, 36, 10, true) AS conv(-9223372036854775807, 36, 10)#x] +Project [conv(-9223372036854775807, 36, 10, false) AS conv(-9223372036854775807, 36, 10)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/parse-schema-string.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/parse-schema-string.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/parse-schema-string.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/parse-schema-string.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/string-functions.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/string-functions.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/string-functions.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/timestamp.sql.out similarity index 76% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/timestamp.sql.out index 560974d28c545..dcfd783b648f8 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/timestamp.sql.out @@ -90,70 +90,70 @@ Project [(localtimestamp(Some(America/Los_Angeles)) = localtimestamp(Some(Americ -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET') -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 1) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 60) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, null) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 59.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +- OneRowRelation @@ -302,231 +302,231 @@ select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020- -- !query select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp(1) -- !query analysis -Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1)#x] +Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1)#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS") -- !query analysis -Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +- OneRowRelation -- !query select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'") -- !query analysis -Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +- OneRowRelation -- !query select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd") -- !query analysis -Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +- OneRowRelation -- !query select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("16", "dd") -- !query analysis -Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(16, dd)#x] +Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(16, dd)#x] +- OneRowRelation -- !query select to_timestamp("02-29", "MM-dd") -- !query analysis -Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(02-29, MM-dd)#x] +Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(02-29, MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019 40", "yyyy mm") -- !query analysis -Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 40, yyyy mm)#x] +Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 40, yyyy mm)#x] +- OneRowRelation -- !query select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss") -- !query analysis -Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +- OneRowRelation @@ -551,13 +551,49 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2011-11-11 11:11:10\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" + } ] +} -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2011-11-11 11:11:11\"", + "inputType" : "\"STRING\"", + "paramIndex" : "first", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" + } ] +} -- !query @@ -583,13 +619,49 @@ CreateViewCommand `ts_view`, select '2011-11-11 11:11:11' str, false, false, Loc -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "first", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "str - timestamp'2011-11-11 11:11:11'" + } ] +} -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query analysis -[Analyzer test output redacted due to nondeterminism] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "timestamp'2011-11-11 11:11:11' - str" + } ] +} -- !query @@ -597,11 +669,11 @@ select timestamp'2011-11-11 11:11:11' + '1' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "actualDataType" : "\"TIMESTAMP\"", - "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "left" : "\"TIMESTAMP\"", + "right" : "\"DOUBLE\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -619,11 +691,11 @@ select '1' + timestamp'2011-11-11 11:11:11' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "actualDataType" : "\"TIMESTAMP\"", - "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "left" : "\"DOUBLE\"", + "right" : "\"TIMESTAMP\"", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { @@ -702,28 +774,28 @@ select date '2012-01-01' - interval 3 hours, -- !query select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') -- !query analysis -Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +- OneRowRelation -- !query select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), true) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), false) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_aggregates.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_aggregates.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_aggregates.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_aggregates.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_arithmetic.sql.out similarity index 99% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_arithmetic.sql.out index 30654d1d71e2b..caf997f6ccbb2 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_arithmetic.sql.out @@ -23,7 +23,7 @@ Project [try_add(2147483647, cast(1 as decimal(10,0))) AS try_add(2147483647, 1) -- !query SELECT try_add(2147483647, "1") -- !query analysis -Project [try_add(2147483647, 1) AS try_add(2147483647, 1)#xL] +Project [try_add(2147483647, 1) AS try_add(2147483647, 1)#x] +- OneRowRelation @@ -305,7 +305,7 @@ Project [try_subtract(2147483647, cast(-1 as decimal(10,0))) AS try_subtract(214 -- !query SELECT try_subtract(2147483647, "-1") -- !query analysis -Project [try_subtract(2147483647, -1) AS try_subtract(2147483647, -1)#xL] +Project [try_subtract(2147483647, -1) AS try_subtract(2147483647, -1)#x] +- OneRowRelation @@ -403,7 +403,7 @@ Project [try_multiply(2147483647, cast(-2 as decimal(10,0))) AS try_multiply(214 -- !query SELECT try_multiply(2147483647, "-2") -- !query analysis -Project [try_multiply(2147483647, -2) AS try_multiply(2147483647, -2)#xL] +Project [try_multiply(2147483647, -2) AS try_multiply(2147483647, -2)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_datetime_functions.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_datetime_functions.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_datetime_functions.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_element_at.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_element_at.sql.out rename to sql/core/src/test/resources/sql-tests/analyzer-results/nonansi/try_element_at.sql.out diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/pipe-operators.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/pipe-operators.sql.out index 47eb8f2417381..2e38ed137b415 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/pipe-operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/pipe-operators.sql.out @@ -1930,14 +1930,17 @@ Union false, false -- !query values (0, 1) tab(x, y) |> union table t +|> where x = 0 -- !query analysis Distinct +- Union false, false - :- Project [x#x, cast(y#x as string) AS y#x] + :- Project [x#x, cast(y#x as bigint) AS y#xL] : +- SubqueryAlias tab : +- LocalRelation [x#x, y#x] - +- SubqueryAlias spark_catalog.default.t - +- Relation spark_catalog.default.t[x#x,y#x] csv + +- Project [x#x, cast(y#x as bigint) AS y#xL] + +- Filter (x#x = 0) + +- SubqueryAlias spark_catalog.default.t + +- Relation spark_catalog.default.t[x#x,y#x] csv -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/predicate-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/predicate-functions.sql.out index 7e720995c44b4..55822a10041f5 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/predicate-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/predicate-functions.sql.out @@ -156,7 +156,7 @@ Project [(1 = 1) AS (1 = 1)#x] -- !query select 1 = '1' -- !query analysis -Project [(1 = cast(1 as int)) AS (1 = 1)#x] +Project [(cast(1 as bigint) = cast(1 as bigint)) AS (1 = 1)#x] +- OneRowRelation @@ -177,28 +177,28 @@ Project [(cast(1.5 as double) = cast(1.51 as double)) AS (1.5 = 1.51)#x] -- !query select 1 > '1' -- !query analysis -Project [(1 > cast(1 as int)) AS (1 > 1)#x] +Project [(cast(1 as bigint) > cast(1 as bigint)) AS (1 > 1)#x] +- OneRowRelation -- !query select 2 > '1.0' -- !query analysis -Project [(2 > cast(1.0 as int)) AS (2 > 1.0)#x] +Project [(cast(2 as bigint) > cast(1.0 as bigint)) AS (2 > 1.0)#x] +- OneRowRelation -- !query select 2 > '2.0' -- !query analysis -Project [(2 > cast(2.0 as int)) AS (2 > 2.0)#x] +Project [(cast(2 as bigint) > cast(2.0 as bigint)) AS (2 > 2.0)#x] +- OneRowRelation -- !query select 2 > '2.2' -- !query analysis -Project [(2 > cast(2.2 as int)) AS (2 > 2.2)#x] +Project [(cast(2 as bigint) > cast(2.2 as bigint)) AS (2 > 2.2)#x] +- OneRowRelation @@ -212,35 +212,35 @@ Project [(cast(1.5 as double) > cast(0.5 as double)) AS (1.5 > 0.5)#x] -- !query select to_date('2009-07-30 04:17:52') > to_date('2009-07-30 04:17:52') -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) > to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false)) AS (to_date(2009-07-30 04:17:52) > to_date(2009-07-30 04:17:52))#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) > to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true)) AS (to_date(2009-07-30 04:17:52) > to_date(2009-07-30 04:17:52))#x] +- OneRowRelation -- !query select to_date('2009-07-30 04:17:52') > '2009-07-30 04:17:52' -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) > cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) > 2009-07-30 04:17:52)#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) > cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) > 2009-07-30 04:17:52)#x] +- OneRowRelation -- !query select 1 >= '1' -- !query analysis -Project [(1 >= cast(1 as int)) AS (1 >= 1)#x] +Project [(cast(1 as bigint) >= cast(1 as bigint)) AS (1 >= 1)#x] +- OneRowRelation -- !query select 2 >= '1.0' -- !query analysis -Project [(2 >= cast(1.0 as int)) AS (2 >= 1.0)#x] +Project [(cast(2 as bigint) >= cast(1.0 as bigint)) AS (2 >= 1.0)#x] +- OneRowRelation -- !query select 2 >= '2.0' -- !query analysis -Project [(2 >= cast(2.0 as int)) AS (2 >= 2.0)#x] +Project [(cast(2 as bigint) >= cast(2.0 as bigint)) AS (2 >= 2.0)#x] +- OneRowRelation @@ -261,35 +261,35 @@ Project [(cast(1.5 as double) >= cast(0.5 as double)) AS (1.5 >= 0.5)#x] -- !query select to_date('2009-07-30 04:17:52') >= to_date('2009-07-30 04:17:52') -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) >= to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false)) AS (to_date(2009-07-30 04:17:52) >= to_date(2009-07-30 04:17:52))#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) >= to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true)) AS (to_date(2009-07-30 04:17:52) >= to_date(2009-07-30 04:17:52))#x] +- OneRowRelation -- !query select to_date('2009-07-30 04:17:52') >= '2009-07-30 04:17:52' -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) >= cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) >= 2009-07-30 04:17:52)#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) >= cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) >= 2009-07-30 04:17:52)#x] +- OneRowRelation -- !query select 1 < '1' -- !query analysis -Project [(1 < cast(1 as int)) AS (1 < 1)#x] +Project [(cast(1 as bigint) < cast(1 as bigint)) AS (1 < 1)#x] +- OneRowRelation -- !query select 2 < '1.0' -- !query analysis -Project [(2 < cast(1.0 as int)) AS (2 < 1.0)#x] +Project [(cast(2 as bigint) < cast(1.0 as bigint)) AS (2 < 1.0)#x] +- OneRowRelation -- !query select 2 < '2.0' -- !query analysis -Project [(2 < cast(2.0 as int)) AS (2 < 2.0)#x] +Project [(cast(2 as bigint) < cast(2.0 as bigint)) AS (2 < 2.0)#x] +- OneRowRelation @@ -310,35 +310,35 @@ Project [(cast(0.5 as double) < cast(1.5 as double)) AS (0.5 < 1.5)#x] -- !query select to_date('2009-07-30 04:17:52') < to_date('2009-07-30 04:17:52') -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) < to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false)) AS (to_date(2009-07-30 04:17:52) < to_date(2009-07-30 04:17:52))#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) < to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true)) AS (to_date(2009-07-30 04:17:52) < to_date(2009-07-30 04:17:52))#x] +- OneRowRelation -- !query select to_date('2009-07-30 04:17:52') < '2009-07-30 04:17:52' -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) < cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) < 2009-07-30 04:17:52)#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) < cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) < 2009-07-30 04:17:52)#x] +- OneRowRelation -- !query select 1 <= '1' -- !query analysis -Project [(1 <= cast(1 as int)) AS (1 <= 1)#x] +Project [(cast(1 as bigint) <= cast(1 as bigint)) AS (1 <= 1)#x] +- OneRowRelation -- !query select 2 <= '1.0' -- !query analysis -Project [(2 <= cast(1.0 as int)) AS (2 <= 1.0)#x] +Project [(cast(2 as bigint) <= cast(1.0 as bigint)) AS (2 <= 1.0)#x] +- OneRowRelation -- !query select 2 <= '2.0' -- !query analysis -Project [(2 <= cast(2.0 as int)) AS (2 <= 2.0)#x] +Project [(cast(2 as bigint) <= cast(2.0 as bigint)) AS (2 <= 2.0)#x] +- OneRowRelation @@ -359,49 +359,49 @@ Project [(cast(0.5 as double) <= cast(1.5 as double)) AS (0.5 <= 1.5)#x] -- !query select to_date('2009-07-30 04:17:52') <= to_date('2009-07-30 04:17:52') -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) <= to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false)) AS (to_date(2009-07-30 04:17:52) <= to_date(2009-07-30 04:17:52))#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) <= to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true)) AS (to_date(2009-07-30 04:17:52) <= to_date(2009-07-30 04:17:52))#x] +- OneRowRelation -- !query select to_date('2009-07-30 04:17:52') <= '2009-07-30 04:17:52' -- !query analysis -Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), false) <= cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) <= 2009-07-30 04:17:52)#x] +Project [(to_date(2009-07-30 04:17:52, None, Some(America/Los_Angeles), true) <= cast(2009-07-30 04:17:52 as date)) AS (to_date(2009-07-30 04:17:52) <= 2009-07-30 04:17:52)#x] +- OneRowRelation -- !query select to_date('2017-03-01') = to_timestamp('2017-03-01 00:00:00') -- !query analysis -Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), false) as timestamp) = to_timestamp(2017-03-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), false)) AS (to_date(2017-03-01) = to_timestamp(2017-03-01 00:00:00))#x] +Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), true) as timestamp) = to_timestamp(2017-03-01 00:00:00, None, TimestampType, Some(America/Los_Angeles), true)) AS (to_date(2017-03-01) = to_timestamp(2017-03-01 00:00:00))#x] +- OneRowRelation -- !query select to_timestamp('2017-03-01 00:00:01') > to_date('2017-03-01') -- !query analysis -Project [(to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), false) > cast(to_date(2017-03-01, None, Some(America/Los_Angeles), false) as timestamp)) AS (to_timestamp(2017-03-01 00:00:01) > to_date(2017-03-01))#x] +Project [(to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), true) > cast(to_date(2017-03-01, None, Some(America/Los_Angeles), true) as timestamp)) AS (to_timestamp(2017-03-01 00:00:01) > to_date(2017-03-01))#x] +- OneRowRelation -- !query select to_timestamp('2017-03-01 00:00:01') >= to_date('2017-03-01') -- !query analysis -Project [(to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), false) >= cast(to_date(2017-03-01, None, Some(America/Los_Angeles), false) as timestamp)) AS (to_timestamp(2017-03-01 00:00:01) >= to_date(2017-03-01))#x] +Project [(to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), true) >= cast(to_date(2017-03-01, None, Some(America/Los_Angeles), true) as timestamp)) AS (to_timestamp(2017-03-01 00:00:01) >= to_date(2017-03-01))#x] +- OneRowRelation -- !query select to_date('2017-03-01') < to_timestamp('2017-03-01 00:00:01') -- !query analysis -Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), false) as timestamp) < to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), false)) AS (to_date(2017-03-01) < to_timestamp(2017-03-01 00:00:01))#x] +Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), true) as timestamp) < to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), true)) AS (to_date(2017-03-01) < to_timestamp(2017-03-01 00:00:01))#x] +- OneRowRelation -- !query select to_date('2017-03-01') <= to_timestamp('2017-03-01 00:00:01') -- !query analysis -Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), false) as timestamp) <= to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), false)) AS (to_date(2017-03-01) <= to_timestamp(2017-03-01 00:00:01))#x] +Project [(cast(to_date(2017-03-01, None, Some(America/Los_Angeles), true) as timestamp) <= to_timestamp(2017-03-01 00:00:01, None, TimestampType, Some(America/Los_Angeles), true)) AS (to_date(2017-03-01) <= to_timestamp(2017-03-01 00:00:01))#x] +- OneRowRelation @@ -436,14 +436,14 @@ Project [cast(1 as decimal(11,1)) IN (cast(1.0 as decimal(11,1)),cast(2.0 as dec -- !query select 1 in ('2', '3', '4') -- !query analysis -Project [cast(1 as string) IN (cast(2 as string),cast(3 as string),cast(4 as string)) AS (1 IN (2, 3, 4))#x] +Project [cast(1 as bigint) IN (cast(2 as bigint),cast(3 as bigint),cast(4 as bigint)) AS (1 IN (2, 3, 4))#x] +- OneRowRelation -- !query select 1 in ('2', '3', '4', null) -- !query analysis -Project [cast(1 as string) IN (cast(2 as string),cast(3 as string),cast(4 as string),cast(null as string)) AS (1 IN (2, 3, 4, NULL))#x] +Project [cast(1 as bigint) IN (cast(2 as bigint),cast(3 as bigint),cast(4 as bigint),cast(null as bigint)) AS (1 IN (2, 3, 4, NULL))#x] +- OneRowRelation @@ -492,14 +492,14 @@ Project [NOT cast(1 as decimal(11,1)) IN (cast(1.0 as decimal(11,1)),cast(2.0 as -- !query select 1 not in ('2', '3', '4') -- !query analysis -Project [NOT cast(1 as string) IN (cast(2 as string),cast(3 as string),cast(4 as string)) AS (NOT (1 IN (2, 3, 4)))#x] +Project [NOT cast(1 as bigint) IN (cast(2 as bigint),cast(3 as bigint),cast(4 as bigint)) AS (NOT (1 IN (2, 3, 4)))#x] +- OneRowRelation -- !query select 1 not in ('2', '3', '4', null) -- !query analysis -Project [NOT cast(1 as string) IN (cast(2 as string),cast(3 as string),cast(4 as string),cast(null as string)) AS (NOT (1 IN (2, 3, 4, NULL)))#x] +Project [NOT cast(1 as bigint) IN (cast(2 as bigint),cast(3 as bigint),cast(4 as bigint),cast(null as bigint)) AS (NOT (1 IN (2, 3, 4, NULL)))#x] +- OneRowRelation @@ -548,7 +548,7 @@ Project [between(b, a, c) AS between(b, a, c)#x] -- !query select to_timestamp('2022-12-26 00:00:01') between to_date('2022-03-01') and to_date('2022-12-31') -- !query analysis -Project [between(to_timestamp(2022-12-26 00:00:01, None, TimestampType, Some(America/Los_Angeles), false), to_date(2022-03-01, None, Some(America/Los_Angeles), false), to_date(2022-12-31, None, Some(America/Los_Angeles), false)) AS between(to_timestamp(2022-12-26 00:00:01), to_date(2022-03-01), to_date(2022-12-31))#x] +Project [between(to_timestamp(2022-12-26 00:00:01, None, TimestampType, Some(America/Los_Angeles), true), to_date(2022-03-01, None, Some(America/Los_Angeles), true), to_date(2022-12-31, None, Some(America/Los_Angeles), true)) AS between(to_timestamp(2022-12-26 00:00:01), to_date(2022-03-01), to_date(2022-12-31))#x] +- OneRowRelation @@ -589,7 +589,7 @@ Project [NOT between(b, a, c) AS (NOT between(b, a, c))#x] -- !query select to_timestamp('2022-12-26 00:00:01') not between to_date('2022-03-01') and to_date('2022-12-31') -- !query analysis -Project [NOT between(to_timestamp(2022-12-26 00:00:01, None, TimestampType, Some(America/Los_Angeles), false), to_date(2022-03-01, None, Some(America/Los_Angeles), false), to_date(2022-12-31, None, Some(America/Los_Angeles), false)) AS (NOT between(to_timestamp(2022-12-26 00:00:01), to_date(2022-03-01), to_date(2022-12-31)))#x] +Project [NOT between(to_timestamp(2022-12-26 00:00:01, None, TimestampType, Some(America/Los_Angeles), true), to_date(2022-03-01, None, Some(America/Los_Angeles), true), to_date(2022-12-31, None, Some(America/Los_Angeles), true)) AS (NOT between(to_timestamp(2022-12-26 00:00:01), to_date(2022-03-01), to_date(2022-12-31)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/subexp-elimination.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/subexp-elimination.sql.out index 754b05bfa6fed..41807814adfb6 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/subexp-elimination.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/subexp-elimination.sql.out @@ -51,7 +51,7 @@ Project [if (isnull(from_json(StructField(a,IntegerType,true), StructField(b,Str -- !query SELECT case when from_json(a, 'struct').a > 5 then from_json(a, 'struct').b when from_json(a, 'struct').a > 4 then from_json(a, 'struct').b + 1 else from_json(a, 'struct').b + 2 end FROM testData -- !query analysis -Project [CASE WHEN (from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).a > 5) THEN from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b WHEN (from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).a > 4) THEN cast((cast(from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b as double) + cast(1 as double)) as string) ELSE cast((cast(from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b as double) + cast(2 as double)) as string) END AS CASE WHEN (from_json(a).a > 5) THEN from_json(a).b WHEN (from_json(a).a > 4) THEN (from_json(a).b + 1) ELSE (from_json(a).b + 2) END#x] +Project [CASE WHEN (from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).a > 5) THEN cast(from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b as bigint) WHEN (from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).a > 4) THEN (cast(from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b as bigint) + cast(1 as bigint)) ELSE (cast(from_json(StructField(a,IntegerType,true), StructField(b,StringType,true), a#x, Some(America/Los_Angeles), false).b as bigint) + cast(2 as bigint)) END AS CASE WHEN (from_json(a).a > 5) THEN from_json(a).b WHEN (from_json(a).a > 4) THEN (from_json(a).b + 1) ELSE (from_json(a).b + 2) END#xL] +- SubqueryAlias testdata +- View (`testData`, [a#x, b#x]) +- Project [cast(a#x as string) AS a#x, cast(b#x as string) AS b#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out index 4ff0222d6e965..f8f23009258ab 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out @@ -315,7 +315,7 @@ WHERE t1c + 5 = (SELECT max(t2e) FROM t2) -- !query analysis Project [t1a#x, t1b#x, t1g#x] -+- Filter (cast((t1c#x + 5) as float) = scalar-subquery#x []) ++- Filter (cast((t1c#x + 5) as double) = cast(scalar-subquery#x [] as double)) : +- Aggregate [max(t2e#x) AS max(t2e)#x] : +- SubqueryAlias t2 : +- View (`t2`, [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x]) diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ltz.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ltz.sql.out index c371876fde6a6..039556b59b0ba 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ltz.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ltz.sql.out @@ -8,42 +8,42 @@ select timestamp_ltz'2016-12-31 00:12:00', timestamp_ltz'2016-12-31' -- !query select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_timestamp_ltz(cast(null as string), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(NULL)#x, to_timestamp_ltz(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(2016-12-31 00:12:00)#x, to_timestamp_ltz(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(2016-12-31, yyyy-MM-dd)#x] +Project [to_timestamp_ltz(cast(null as string), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(NULL)#x, to_timestamp_ltz(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(2016-12-31 00:12:00)#x, to_timestamp_ltz(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp_ltz(to_date(null)), to_timestamp_ltz(to_date('2016-12-31')) -- !query analysis -Project [to_timestamp_ltz(to_date(cast(null as string), None, Some(America/Los_Angeles), false), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(to_date(NULL))#x, to_timestamp_ltz(to_date(2016-12-31, None, Some(America/Los_Angeles), false), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(to_date(2016-12-31))#x] +Project [to_timestamp_ltz(to_date(cast(null as string), None, Some(America/Los_Angeles), true), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(to_date(NULL))#x, to_timestamp_ltz(to_date(2016-12-31, None, Some(America/Los_Angeles), true), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(to_date(2016-12-31))#x] +- OneRowRelation -- !query select to_timestamp_ltz(to_timestamp_ntz(null)), to_timestamp_ltz(to_timestamp_ntz('2016-12-31 00:12:00')) -- !query analysis -Project [to_timestamp_ltz(to_timestamp_ntz(cast(null as string), None, TimestampNTZType, Some(America/Los_Angeles), false), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(to_timestamp_ntz(NULL))#x, to_timestamp_ltz(to_timestamp_ntz(2016-12-31 00:12:00, None, TimestampNTZType, Some(America/Los_Angeles), false), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp_ltz(to_timestamp_ntz(2016-12-31 00:12:00))#x] +Project [to_timestamp_ltz(to_timestamp_ntz(cast(null as string), None, TimestampNTZType, Some(America/Los_Angeles), true), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(to_timestamp_ntz(NULL))#x, to_timestamp_ltz(to_timestamp_ntz(2016-12-31 00:12:00, None, TimestampNTZType, Some(America/Los_Angeles), true), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp_ltz(to_timestamp_ntz(2016-12-31 00:12:00))#x] +- OneRowRelation -- !query SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678) -- !query analysis -Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678)#x] +Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678)#x] +- OneRowRelation -- !query SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678, 'CET') -- !query analysis -Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), false, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678, CET)#x] +Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), true, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678, CET)#x] +- OneRowRelation -- !query SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007) -- !query analysis -Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 60.007)#x] +Project [make_timestamp_ltz(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp_ltz(2021, 7, 11, 6, 30, 60.007)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out index c285169c572e5..e92a392e22b67 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out @@ -9,28 +9,28 @@ Project [2016-12-31 00:12:00 AS TIMESTAMP_NTZ '2016-12-31 00:12:00'#x, 2016-12-3 -- !query select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_timestamp_ntz(cast(null as string), None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(NULL)#x, to_timestamp_ntz(2016-12-31 00:12:00, None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(2016-12-31 00:12:00)#x, to_timestamp_ntz(2016-12-31, Some(yyyy-MM-dd), TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(2016-12-31, yyyy-MM-dd)#x] +Project [to_timestamp_ntz(cast(null as string), None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(NULL)#x, to_timestamp_ntz(2016-12-31 00:12:00, None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(2016-12-31 00:12:00)#x, to_timestamp_ntz(2016-12-31, Some(yyyy-MM-dd), TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp_ntz(to_date(null)), to_timestamp_ntz(to_date('2016-12-31')) -- !query analysis -Project [to_timestamp_ntz(to_date(cast(null as string), None, Some(America/Los_Angeles), false), None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(to_date(NULL))#x, to_timestamp_ntz(to_date(2016-12-31, None, Some(America/Los_Angeles), false), None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(to_date(2016-12-31))#x] +Project [to_timestamp_ntz(to_date(cast(null as string), None, Some(America/Los_Angeles), true), None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(to_date(NULL))#x, to_timestamp_ntz(to_date(2016-12-31, None, Some(America/Los_Angeles), true), None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(to_date(2016-12-31))#x] +- OneRowRelation -- !query select to_timestamp_ntz(to_timestamp_ltz(null)), to_timestamp_ntz(to_timestamp_ltz('2016-12-31 00:12:00')) -- !query analysis -Project [to_timestamp_ntz(to_timestamp_ltz(cast(null as string), None, TimestampType, Some(America/Los_Angeles), false), None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(to_timestamp_ltz(NULL))#x, to_timestamp_ntz(to_timestamp_ltz(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), false), None, TimestampNTZType, Some(America/Los_Angeles), false) AS to_timestamp_ntz(to_timestamp_ltz(2016-12-31 00:12:00))#x] +Project [to_timestamp_ntz(to_timestamp_ltz(cast(null as string), None, TimestampType, Some(America/Los_Angeles), true), None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(to_timestamp_ltz(NULL))#x, to_timestamp_ntz(to_timestamp_ltz(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), true), None, TimestampNTZType, Some(America/Los_Angeles), true) AS to_timestamp_ntz(to_timestamp_ltz(2016-12-31 00:12:00))#x] +- OneRowRelation -- !query SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678) -- !query analysis -Project [make_timestamp_ntz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampNTZType) AS make_timestamp_ntz(2021, 7, 11, 6, 30, 45.678)#x] +Project [make_timestamp_ntz(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampNTZType) AS make_timestamp_ntz(2021, 7, 11, 6, 30, 45.678)#x] +- OneRowRelation @@ -60,7 +60,7 @@ org.apache.spark.sql.AnalysisException -- !query SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007) -- !query analysis -Project [make_timestamp_ntz(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampNTZType) AS make_timestamp_ntz(2021, 7, 11, 6, 30, 60.007)#x] +Project [make_timestamp_ntz(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampNTZType) AS make_timestamp_ntz(2021, 7, 11, 6, 30, 60.007)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out index dcfd783b648f8..560974d28c545 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out @@ -90,70 +90,70 @@ Project [(localtimestamp(Some(America/Los_Angeles)) = localtimestamp(Some(Americ -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET') -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(45.678 as decimal(16,6)), Some(CET), Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 45.678, CET)#x] +- OneRowRelation -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query analysis -Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +Project [make_timestamp(2021, 7, 11, 6, 30, cast(60.007 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(2021, 7, 11, 6, 30, 60.007)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 1) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(1 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 1)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 60) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(60 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 60)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(61 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 61)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, null) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(null as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, NULL)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 59.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(59.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 59.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(99.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 99.999999)#x] +- OneRowRelation -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query analysis -Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), false, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +Project [make_timestamp(1, 1, 1, 1, 1, cast(999.999999 as decimal(16,6)), None, Some(America/Los_Angeles), true, TimestampType) AS make_timestamp(1, 1, 1, 1, 1, 999.999999)#x] +- OneRowRelation @@ -302,231 +302,231 @@ select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020- -- !query select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd') -- !query analysis -Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +Project [to_timestamp(cast(null as string), None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(NULL)#x, to_timestamp(2016-12-31 00:12:00, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31 00:12:00)#x, to_timestamp(2016-12-31, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2016-12-31, yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp(1) -- !query analysis -Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(1)#x] +Project [to_timestamp(1, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(1)#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12., Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.0, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123UTC, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.12345CST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.123456PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234567PST, Some(yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(123456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query analysis -Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +Project [to_timestamp(223456 2019-10-06 10:11:12.123456PST, Some(SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.1234, Some(yyyy-MM-dd HH:mm:ss.[SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.123, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12, Some(yyyy-MM-dd HH:mm:ss[.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11:12.12, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query analysis -Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +Project [to_timestamp(2019-10-06 10:11, Some(yyyy-MM-dd HH:mm[:ss.SSSSSS]), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS])#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS") -- !query analysis -Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +Project [to_timestamp(2019-10-06S10:11:12.12345, Some(yyyy-MM-dd'S'HH:mm:ss.SSSSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS)#x] +- OneRowRelation -- !query select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.12342019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyyyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1232019-10-06S10:11, Some(ss.SSSSyy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm") -- !query analysis -Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +Project [to_timestamp(12.1234019-10-06S10:11, Some(ss.SSSSy-MM-dd'S'HH:mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'") -- !query analysis -Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +Project [to_timestamp(2019-10-06S, Some(yyyy-MM-dd'S'), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06S, yyyy-MM-dd'S')#x] +- OneRowRelation -- !query select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd") -- !query analysis -Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +Project [to_timestamp(S2019-10-06, Some('S'yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(S2019-10-06, 'S'yyyy-MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +Project [to_timestamp(2019-10-06T10:11:12'12, Some(yyyy-MM-dd'T'HH:mm:ss''SSSS), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS)#x] +- OneRowRelation -- !query select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''") -- !query analysis -Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +Project [to_timestamp(2019-10-06T10:11:12', Some(yyyy-MM-dd'T'HH:mm:ss''), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss'')#x] +- OneRowRelation -- !query select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp('2019-10-06T10:11:12, Some(''yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss") -- !query analysis -Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +Project [to_timestamp(P2019-10-06T10:11:12, Some('P'yyyy-MM-dd'T'HH:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss)#x] +- OneRowRelation -- !query select to_timestamp("16", "dd") -- !query analysis -Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(16, dd)#x] +Project [to_timestamp(16, Some(dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(16, dd)#x] +- OneRowRelation -- !query select to_timestamp("02-29", "MM-dd") -- !query analysis -Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(02-29, MM-dd)#x] +Project [to_timestamp(02-29, Some(MM-dd), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(02-29, MM-dd)#x] +- OneRowRelation -- !query select to_timestamp("2019 40", "yyyy mm") -- !query analysis -Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 40, yyyy mm)#x] +Project [to_timestamp(2019 40, Some(yyyy mm), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 40, yyyy mm)#x] +- OneRowRelation -- !query select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss") -- !query analysis -Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +Project [to_timestamp(2019 10:10:10, Some(yyyy hh:mm:ss), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019 10:10:10, yyyy hh:mm:ss)#x] +- OneRowRelation @@ -551,49 +551,13 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:10\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:11\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -619,49 +583,13 @@ CreateViewCommand `ts_view`, select '2011-11-11 11:11:11' str, false, false, Loc -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "str - timestamp'2011-11-11 11:11:11'" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "timestamp'2011-11-11 11:11:11' - str" - } ] -} +[Analyzer test output redacted due to nondeterminism] -- !query @@ -669,11 +597,11 @@ select timestamp'2011-11-11 11:11:11' + '1' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -691,11 +619,11 @@ select '1' + timestamp'2011-11-11 11:11:11' -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { @@ -774,28 +702,28 @@ select date '2012-01-01' - interval 3 hours, -- !query select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') -- !query analysis -Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +Project [to_timestamp(2019-10-06 A, Some(yyyy-MM-dd GGGGG), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2019-10-06 A, yyyy-MM-dd GGGGG)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEEE)#x] +- OneRowRelation -- !query select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +Project [to_timestamp(22 05 2020 Friday, Some(dd MM yyyy EEEEE), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#x] +- OneRowRelation -- !query select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') -- !query analysis -Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), false) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +Project [unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE, Some(America/Los_Angeles), true) AS unix_timestamp(22 05 2020 Friday, dd MM yyyy EEEEE)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/transform.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/transform.sql.out index 7cf8a2886069d..f0d3b8b999a29 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/transform.sql.out @@ -786,8 +786,8 @@ FROM( SELECT (b + 1) AS result ORDER BY result -- !query analysis -Sort [result#x ASC NULLS FIRST], true -+- Project [(cast(b#x as double) + cast(1 as double)) AS result#x] +Sort [result#xL ASC NULLS FIRST], true ++- Project [(cast(b#x as bigint) + cast(1 as bigint)) AS result#xL] +- SubqueryAlias t +- ScriptTransformation cat, [a#x, b#x], ScriptInputOutputSchema(List(),List(),None,None,List(),List(),None,None,false) +- Aggregate [a#x], [a#x, sum(b#x) AS sum(b)#xL] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out index caf997f6ccbb2..30654d1d71e2b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out @@ -23,7 +23,7 @@ Project [try_add(2147483647, cast(1 as decimal(10,0))) AS try_add(2147483647, 1) -- !query SELECT try_add(2147483647, "1") -- !query analysis -Project [try_add(2147483647, 1) AS try_add(2147483647, 1)#x] +Project [try_add(2147483647, 1) AS try_add(2147483647, 1)#xL] +- OneRowRelation @@ -305,7 +305,7 @@ Project [try_subtract(2147483647, cast(-1 as decimal(10,0))) AS try_subtract(214 -- !query SELECT try_subtract(2147483647, "-1") -- !query analysis -Project [try_subtract(2147483647, -1) AS try_subtract(2147483647, -1)#x] +Project [try_subtract(2147483647, -1) AS try_subtract(2147483647, -1)#xL] +- OneRowRelation @@ -403,7 +403,7 @@ Project [try_multiply(2147483647, cast(-2 as decimal(10,0))) AS try_multiply(214 -- !query SELECT try_multiply(2147483647, "-2") -- !query analysis -Project [try_multiply(2147483647, -2) AS try_multiply(2147483647, -2)#x] +Project [try_multiply(2147483647, -2) AS try_multiply(2147483647, -2)#xL] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/binaryComparison.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/binaryComparison.sql.out index d15418c17b730..1395cc350db72 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/binaryComparison.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/binaryComparison.sql.out @@ -10,271 +10,583 @@ CreateViewCommand `t`, SELECT 1, false, false, LocalTempView, UNSUPPORTED, true -- !query SELECT cast(1 as binary) = '1' FROM t -- !query analysis -Project [(cast(1 as binary) = cast(1 as binary)) AS (CAST(1 AS BINARY) = 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) > '2' FROM t -- !query analysis -Project [(cast(1 as binary) > cast(2 as binary)) AS (CAST(1 AS BINARY) > 2)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) >= '2' FROM t -- !query analysis -Project [(cast(1 as binary) >= cast(2 as binary)) AS (CAST(1 AS BINARY) >= 2)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) < '2' FROM t -- !query analysis -Project [(cast(1 as binary) < cast(2 as binary)) AS (CAST(1 AS BINARY) < 2)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <= '2' FROM t -- !query analysis -Project [(cast(1 as binary) <= cast(2 as binary)) AS (CAST(1 AS BINARY) <= 2)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as binary) = cast(2 as binary)) AS (NOT (CAST(1 AS BINARY) = 2))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as binary) = cast(cast(null as string) as binary)) AS (CAST(1 AS BINARY) = CAST(NULL AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as binary) > cast(cast(null as string) as binary)) AS (CAST(1 AS BINARY) > CAST(NULL AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as binary) >= cast(cast(null as string) as binary)) AS (CAST(1 AS BINARY) >= CAST(NULL AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as binary) < cast(cast(null as string) as binary)) AS (CAST(1 AS BINARY) < CAST(NULL AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as binary) <= cast(cast(null as string) as binary)) AS (CAST(1 AS BINARY) <= CAST(NULL AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as binary) = cast(cast(null as string) as binary)) AS (NOT (CAST(1 AS BINARY) = CAST(NULL AS STRING)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '1' = cast(1 as binary) FROM t -- !query analysis -Project [(cast(1 as binary) = cast(1 as binary)) AS (1 = CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' > cast(1 as binary) FROM t -- !query analysis -Project [(cast(2 as binary) > cast(1 as binary)) AS (2 > CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' >= cast(1 as binary) FROM t -- !query analysis -Project [(cast(2 as binary) >= cast(1 as binary)) AS (2 >= CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' < cast(1 as binary) FROM t -- !query analysis -Project [(cast(2 as binary) < cast(1 as binary)) AS (2 < CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' <= cast(1 as binary) FROM t -- !query analysis -Project [(cast(2 as binary) <= cast(1 as binary)) AS (2 <= CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' <> cast(1 as binary) FROM t -- !query analysis -Project [NOT (cast(2 as binary) = cast(1 as binary)) AS (NOT (2 = CAST(1 AS BINARY)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) = cast(1 as binary) FROM t -- !query analysis -Project [(cast(cast(null as string) as binary) = cast(1 as binary)) AS (CAST(NULL AS STRING) = CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) > cast(1 as binary) FROM t -- !query analysis -Project [(cast(cast(null as string) as binary) > cast(1 as binary)) AS (CAST(NULL AS STRING) > CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) >= cast(1 as binary) FROM t -- !query analysis -Project [(cast(cast(null as string) as binary) >= cast(1 as binary)) AS (CAST(NULL AS STRING) >= CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) < cast(1 as binary) FROM t -- !query analysis -Project [(cast(cast(null as string) as binary) < cast(1 as binary)) AS (CAST(NULL AS STRING) < CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) <= cast(1 as binary) FROM t -- !query analysis -Project [(cast(cast(null as string) as binary) <= cast(1 as binary)) AS (CAST(NULL AS STRING) <= CAST(1 AS BINARY))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) <> cast(1 as binary) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as binary) = cast(1 as binary)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS BINARY)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as tinyint) = '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (CAST(1 AS TINYINT) = 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) = cast(1 as bigint)) AS (CAST(1 AS TINYINT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -285,7 +597,7 @@ Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (CAST(1 AS TINYINT) = 1)#x -- !query SELECT cast(1 as tinyint) > '2' FROM t -- !query analysis -Project [(cast(1 as tinyint) > cast(2 as tinyint)) AS (CAST(1 AS TINYINT) > 2)#x] +Project [(cast(cast(1 as tinyint) as bigint) > cast(2 as bigint)) AS (CAST(1 AS TINYINT) > 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -296,7 +608,7 @@ Project [(cast(1 as tinyint) > cast(2 as tinyint)) AS (CAST(1 AS TINYINT) > 2)#x -- !query SELECT cast(1 as tinyint) >= '2' FROM t -- !query analysis -Project [(cast(1 as tinyint) >= cast(2 as tinyint)) AS (CAST(1 AS TINYINT) >= 2)#x] +Project [(cast(cast(1 as tinyint) as bigint) >= cast(2 as bigint)) AS (CAST(1 AS TINYINT) >= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -307,7 +619,7 @@ Project [(cast(1 as tinyint) >= cast(2 as tinyint)) AS (CAST(1 AS TINYINT) >= 2) -- !query SELECT cast(1 as tinyint) < '2' FROM t -- !query analysis -Project [(cast(1 as tinyint) < cast(2 as tinyint)) AS (CAST(1 AS TINYINT) < 2)#x] +Project [(cast(cast(1 as tinyint) as bigint) < cast(2 as bigint)) AS (CAST(1 AS TINYINT) < 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -318,7 +630,7 @@ Project [(cast(1 as tinyint) < cast(2 as tinyint)) AS (CAST(1 AS TINYINT) < 2)#x -- !query SELECT cast(1 as tinyint) <= '2' FROM t -- !query analysis -Project [(cast(1 as tinyint) <= cast(2 as tinyint)) AS (CAST(1 AS TINYINT) <= 2)#x] +Project [(cast(cast(1 as tinyint) as bigint) <= cast(2 as bigint)) AS (CAST(1 AS TINYINT) <= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -329,7 +641,7 @@ Project [(cast(1 as tinyint) <= cast(2 as tinyint)) AS (CAST(1 AS TINYINT) <= 2) -- !query SELECT cast(1 as tinyint) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as tinyint) = cast(2 as tinyint)) AS (NOT (CAST(1 AS TINYINT) = 2))#x] +Project [NOT (cast(cast(1 as tinyint) as bigint) = cast(2 as bigint)) AS (NOT (CAST(1 AS TINYINT) = 2))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -340,7 +652,7 @@ Project [NOT (cast(1 as tinyint) = cast(2 as tinyint)) AS (NOT (CAST(1 AS TINYIN -- !query SELECT cast(1 as tinyint) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(cast(null as string) as tinyint)) AS (CAST(1 AS TINYINT) = CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as tinyint) as bigint) = cast(cast(null as string) as bigint)) AS (CAST(1 AS TINYINT) = CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -351,7 +663,7 @@ Project [(cast(1 as tinyint) = cast(cast(null as string) as tinyint)) AS (CAST(1 -- !query SELECT cast(1 as tinyint) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as tinyint) > cast(cast(null as string) as tinyint)) AS (CAST(1 AS TINYINT) > CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as tinyint) as bigint) > cast(cast(null as string) as bigint)) AS (CAST(1 AS TINYINT) > CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -362,7 +674,7 @@ Project [(cast(1 as tinyint) > cast(cast(null as string) as tinyint)) AS (CAST(1 -- !query SELECT cast(1 as tinyint) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as tinyint) >= cast(cast(null as string) as tinyint)) AS (CAST(1 AS TINYINT) >= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as tinyint) as bigint) >= cast(cast(null as string) as bigint)) AS (CAST(1 AS TINYINT) >= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -373,7 +685,7 @@ Project [(cast(1 as tinyint) >= cast(cast(null as string) as tinyint)) AS (CAST( -- !query SELECT cast(1 as tinyint) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as tinyint) < cast(cast(null as string) as tinyint)) AS (CAST(1 AS TINYINT) < CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as tinyint) as bigint) < cast(cast(null as string) as bigint)) AS (CAST(1 AS TINYINT) < CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -384,7 +696,7 @@ Project [(cast(1 as tinyint) < cast(cast(null as string) as tinyint)) AS (CAST(1 -- !query SELECT cast(1 as tinyint) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as tinyint) <= cast(cast(null as string) as tinyint)) AS (CAST(1 AS TINYINT) <= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as tinyint) as bigint) <= cast(cast(null as string) as bigint)) AS (CAST(1 AS TINYINT) <= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -395,7 +707,7 @@ Project [(cast(1 as tinyint) <= cast(cast(null as string) as tinyint)) AS (CAST( -- !query SELECT cast(1 as tinyint) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as tinyint) = cast(cast(null as string) as tinyint)) AS (NOT (CAST(1 AS TINYINT) = CAST(NULL AS STRING)))#x] +Project [NOT (cast(cast(1 as tinyint) as bigint) = cast(cast(null as string) as bigint)) AS (NOT (CAST(1 AS TINYINT) = CAST(NULL AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -406,7 +718,7 @@ Project [NOT (cast(1 as tinyint) = cast(cast(null as string) as tinyint)) AS (NO -- !query SELECT '1' = cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (1 = CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as tinyint) as bigint)) AS (1 = CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -417,7 +729,7 @@ Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (1 = CAST(1 AS TINYINT))#x -- !query SELECT '2' > cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(2 as tinyint) > cast(1 as tinyint)) AS (2 > CAST(1 AS TINYINT))#x] +Project [(cast(2 as bigint) > cast(cast(1 as tinyint) as bigint)) AS (2 > CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -428,7 +740,7 @@ Project [(cast(2 as tinyint) > cast(1 as tinyint)) AS (2 > CAST(1 AS TINYINT))#x -- !query SELECT '2' >= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(2 as tinyint) >= cast(1 as tinyint)) AS (2 >= CAST(1 AS TINYINT))#x] +Project [(cast(2 as bigint) >= cast(cast(1 as tinyint) as bigint)) AS (2 >= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -439,7 +751,7 @@ Project [(cast(2 as tinyint) >= cast(1 as tinyint)) AS (2 >= CAST(1 AS TINYINT)) -- !query SELECT '2' < cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(2 as tinyint) < cast(1 as tinyint)) AS (2 < CAST(1 AS TINYINT))#x] +Project [(cast(2 as bigint) < cast(cast(1 as tinyint) as bigint)) AS (2 < CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -450,7 +762,7 @@ Project [(cast(2 as tinyint) < cast(1 as tinyint)) AS (2 < CAST(1 AS TINYINT))#x -- !query SELECT '2' <= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(2 as tinyint) <= cast(1 as tinyint)) AS (2 <= CAST(1 AS TINYINT))#x] +Project [(cast(2 as bigint) <= cast(cast(1 as tinyint) as bigint)) AS (2 <= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -461,7 +773,7 @@ Project [(cast(2 as tinyint) <= cast(1 as tinyint)) AS (2 <= CAST(1 AS TINYINT)) -- !query SELECT '2' <> cast(1 as tinyint) FROM t -- !query analysis -Project [NOT (cast(2 as tinyint) = cast(1 as tinyint)) AS (NOT (2 = CAST(1 AS TINYINT)))#x] +Project [NOT (cast(2 as bigint) = cast(cast(1 as tinyint) as bigint)) AS (NOT (2 = CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -472,7 +784,7 @@ Project [NOT (cast(2 as tinyint) = cast(1 as tinyint)) AS (NOT (2 = CAST(1 AS TI -- !query SELECT cast(null as string) = cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(null as string) as tinyint) = cast(1 as tinyint)) AS (CAST(NULL AS STRING) = CAST(1 AS TINYINT))#x] +Project [(cast(cast(null as string) as bigint) = cast(cast(1 as tinyint) as bigint)) AS (CAST(NULL AS STRING) = CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -483,7 +795,7 @@ Project [(cast(cast(null as string) as tinyint) = cast(1 as tinyint)) AS (CAST(N -- !query SELECT cast(null as string) > cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(null as string) as tinyint) > cast(1 as tinyint)) AS (CAST(NULL AS STRING) > CAST(1 AS TINYINT))#x] +Project [(cast(cast(null as string) as bigint) > cast(cast(1 as tinyint) as bigint)) AS (CAST(NULL AS STRING) > CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -494,7 +806,7 @@ Project [(cast(cast(null as string) as tinyint) > cast(1 as tinyint)) AS (CAST(N -- !query SELECT cast(null as string) >= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(null as string) as tinyint) >= cast(1 as tinyint)) AS (CAST(NULL AS STRING) >= CAST(1 AS TINYINT))#x] +Project [(cast(cast(null as string) as bigint) >= cast(cast(1 as tinyint) as bigint)) AS (CAST(NULL AS STRING) >= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -505,7 +817,7 @@ Project [(cast(cast(null as string) as tinyint) >= cast(1 as tinyint)) AS (CAST( -- !query SELECT cast(null as string) < cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(null as string) as tinyint) < cast(1 as tinyint)) AS (CAST(NULL AS STRING) < CAST(1 AS TINYINT))#x] +Project [(cast(cast(null as string) as bigint) < cast(cast(1 as tinyint) as bigint)) AS (CAST(NULL AS STRING) < CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -516,7 +828,7 @@ Project [(cast(cast(null as string) as tinyint) < cast(1 as tinyint)) AS (CAST(N -- !query SELECT cast(null as string) <= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(null as string) as tinyint) <= cast(1 as tinyint)) AS (CAST(NULL AS STRING) <= CAST(1 AS TINYINT))#x] +Project [(cast(cast(null as string) as bigint) <= cast(cast(1 as tinyint) as bigint)) AS (CAST(NULL AS STRING) <= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -527,7 +839,7 @@ Project [(cast(cast(null as string) as tinyint) <= cast(1 as tinyint)) AS (CAST( -- !query SELECT cast(null as string) <> cast(1 as tinyint) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as tinyint) = cast(1 as tinyint)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS TINYINT)))#x] +Project [NOT (cast(cast(null as string) as bigint) = cast(cast(1 as tinyint) as bigint)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -538,7 +850,7 @@ Project [NOT (cast(cast(null as string) as tinyint) = cast(1 as tinyint)) AS (NO -- !query SELECT cast(1 as smallint) = '1' FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(1 as smallint)) AS (CAST(1 AS SMALLINT) = 1)#x] +Project [(cast(cast(1 as smallint) as bigint) = cast(1 as bigint)) AS (CAST(1 AS SMALLINT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -549,7 +861,7 @@ Project [(cast(1 as smallint) = cast(1 as smallint)) AS (CAST(1 AS SMALLINT) = 1 -- !query SELECT cast(1 as smallint) > '2' FROM t -- !query analysis -Project [(cast(1 as smallint) > cast(2 as smallint)) AS (CAST(1 AS SMALLINT) > 2)#x] +Project [(cast(cast(1 as smallint) as bigint) > cast(2 as bigint)) AS (CAST(1 AS SMALLINT) > 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -560,7 +872,7 @@ Project [(cast(1 as smallint) > cast(2 as smallint)) AS (CAST(1 AS SMALLINT) > 2 -- !query SELECT cast(1 as smallint) >= '2' FROM t -- !query analysis -Project [(cast(1 as smallint) >= cast(2 as smallint)) AS (CAST(1 AS SMALLINT) >= 2)#x] +Project [(cast(cast(1 as smallint) as bigint) >= cast(2 as bigint)) AS (CAST(1 AS SMALLINT) >= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -571,7 +883,7 @@ Project [(cast(1 as smallint) >= cast(2 as smallint)) AS (CAST(1 AS SMALLINT) >= -- !query SELECT cast(1 as smallint) < '2' FROM t -- !query analysis -Project [(cast(1 as smallint) < cast(2 as smallint)) AS (CAST(1 AS SMALLINT) < 2)#x] +Project [(cast(cast(1 as smallint) as bigint) < cast(2 as bigint)) AS (CAST(1 AS SMALLINT) < 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -582,7 +894,7 @@ Project [(cast(1 as smallint) < cast(2 as smallint)) AS (CAST(1 AS SMALLINT) < 2 -- !query SELECT cast(1 as smallint) <= '2' FROM t -- !query analysis -Project [(cast(1 as smallint) <= cast(2 as smallint)) AS (CAST(1 AS SMALLINT) <= 2)#x] +Project [(cast(cast(1 as smallint) as bigint) <= cast(2 as bigint)) AS (CAST(1 AS SMALLINT) <= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -593,7 +905,7 @@ Project [(cast(1 as smallint) <= cast(2 as smallint)) AS (CAST(1 AS SMALLINT) <= -- !query SELECT cast(1 as smallint) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as smallint) = cast(2 as smallint)) AS (NOT (CAST(1 AS SMALLINT) = 2))#x] +Project [NOT (cast(cast(1 as smallint) as bigint) = cast(2 as bigint)) AS (NOT (CAST(1 AS SMALLINT) = 2))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -604,7 +916,7 @@ Project [NOT (cast(1 as smallint) = cast(2 as smallint)) AS (NOT (CAST(1 AS SMAL -- !query SELECT cast(1 as smallint) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(cast(null as string) as smallint)) AS (CAST(1 AS SMALLINT) = CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as smallint) as bigint) = cast(cast(null as string) as bigint)) AS (CAST(1 AS SMALLINT) = CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -615,7 +927,7 @@ Project [(cast(1 as smallint) = cast(cast(null as string) as smallint)) AS (CAST -- !query SELECT cast(1 as smallint) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as smallint) > cast(cast(null as string) as smallint)) AS (CAST(1 AS SMALLINT) > CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as smallint) as bigint) > cast(cast(null as string) as bigint)) AS (CAST(1 AS SMALLINT) > CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -626,7 +938,7 @@ Project [(cast(1 as smallint) > cast(cast(null as string) as smallint)) AS (CAST -- !query SELECT cast(1 as smallint) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as smallint) >= cast(cast(null as string) as smallint)) AS (CAST(1 AS SMALLINT) >= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as smallint) as bigint) >= cast(cast(null as string) as bigint)) AS (CAST(1 AS SMALLINT) >= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -637,7 +949,7 @@ Project [(cast(1 as smallint) >= cast(cast(null as string) as smallint)) AS (CAS -- !query SELECT cast(1 as smallint) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as smallint) < cast(cast(null as string) as smallint)) AS (CAST(1 AS SMALLINT) < CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as smallint) as bigint) < cast(cast(null as string) as bigint)) AS (CAST(1 AS SMALLINT) < CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -648,7 +960,7 @@ Project [(cast(1 as smallint) < cast(cast(null as string) as smallint)) AS (CAST -- !query SELECT cast(1 as smallint) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as smallint) <= cast(cast(null as string) as smallint)) AS (CAST(1 AS SMALLINT) <= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as smallint) as bigint) <= cast(cast(null as string) as bigint)) AS (CAST(1 AS SMALLINT) <= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -659,7 +971,7 @@ Project [(cast(1 as smallint) <= cast(cast(null as string) as smallint)) AS (CAS -- !query SELECT cast(1 as smallint) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as smallint) = cast(cast(null as string) as smallint)) AS (NOT (CAST(1 AS SMALLINT) = CAST(NULL AS STRING)))#x] +Project [NOT (cast(cast(1 as smallint) as bigint) = cast(cast(null as string) as bigint)) AS (NOT (CAST(1 AS SMALLINT) = CAST(NULL AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -670,7 +982,7 @@ Project [NOT (cast(1 as smallint) = cast(cast(null as string) as smallint)) AS ( -- !query SELECT '1' = cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(1 as smallint)) AS (1 = CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as smallint) as bigint)) AS (1 = CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -681,7 +993,7 @@ Project [(cast(1 as smallint) = cast(1 as smallint)) AS (1 = CAST(1 AS SMALLINT) -- !query SELECT '2' > cast(1 as smallint) FROM t -- !query analysis -Project [(cast(2 as smallint) > cast(1 as smallint)) AS (2 > CAST(1 AS SMALLINT))#x] +Project [(cast(2 as bigint) > cast(cast(1 as smallint) as bigint)) AS (2 > CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -692,7 +1004,7 @@ Project [(cast(2 as smallint) > cast(1 as smallint)) AS (2 > CAST(1 AS SMALLINT) -- !query SELECT '2' >= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(2 as smallint) >= cast(1 as smallint)) AS (2 >= CAST(1 AS SMALLINT))#x] +Project [(cast(2 as bigint) >= cast(cast(1 as smallint) as bigint)) AS (2 >= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -703,7 +1015,7 @@ Project [(cast(2 as smallint) >= cast(1 as smallint)) AS (2 >= CAST(1 AS SMALLIN -- !query SELECT '2' < cast(1 as smallint) FROM t -- !query analysis -Project [(cast(2 as smallint) < cast(1 as smallint)) AS (2 < CAST(1 AS SMALLINT))#x] +Project [(cast(2 as bigint) < cast(cast(1 as smallint) as bigint)) AS (2 < CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -714,7 +1026,7 @@ Project [(cast(2 as smallint) < cast(1 as smallint)) AS (2 < CAST(1 AS SMALLINT) -- !query SELECT '2' <= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(2 as smallint) <= cast(1 as smallint)) AS (2 <= CAST(1 AS SMALLINT))#x] +Project [(cast(2 as bigint) <= cast(cast(1 as smallint) as bigint)) AS (2 <= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -725,7 +1037,7 @@ Project [(cast(2 as smallint) <= cast(1 as smallint)) AS (2 <= CAST(1 AS SMALLIN -- !query SELECT '2' <> cast(1 as smallint) FROM t -- !query analysis -Project [NOT (cast(2 as smallint) = cast(1 as smallint)) AS (NOT (2 = CAST(1 AS SMALLINT)))#x] +Project [NOT (cast(2 as bigint) = cast(cast(1 as smallint) as bigint)) AS (NOT (2 = CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -736,7 +1048,7 @@ Project [NOT (cast(2 as smallint) = cast(1 as smallint)) AS (NOT (2 = CAST(1 AS -- !query SELECT cast(null as string) = cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(null as string) as smallint) = cast(1 as smallint)) AS (CAST(NULL AS STRING) = CAST(1 AS SMALLINT))#x] +Project [(cast(cast(null as string) as bigint) = cast(cast(1 as smallint) as bigint)) AS (CAST(NULL AS STRING) = CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -747,7 +1059,7 @@ Project [(cast(cast(null as string) as smallint) = cast(1 as smallint)) AS (CAST -- !query SELECT cast(null as string) > cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(null as string) as smallint) > cast(1 as smallint)) AS (CAST(NULL AS STRING) > CAST(1 AS SMALLINT))#x] +Project [(cast(cast(null as string) as bigint) > cast(cast(1 as smallint) as bigint)) AS (CAST(NULL AS STRING) > CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -758,7 +1070,7 @@ Project [(cast(cast(null as string) as smallint) > cast(1 as smallint)) AS (CAST -- !query SELECT cast(null as string) >= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(null as string) as smallint) >= cast(1 as smallint)) AS (CAST(NULL AS STRING) >= CAST(1 AS SMALLINT))#x] +Project [(cast(cast(null as string) as bigint) >= cast(cast(1 as smallint) as bigint)) AS (CAST(NULL AS STRING) >= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -769,7 +1081,7 @@ Project [(cast(cast(null as string) as smallint) >= cast(1 as smallint)) AS (CAS -- !query SELECT cast(null as string) < cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(null as string) as smallint) < cast(1 as smallint)) AS (CAST(NULL AS STRING) < CAST(1 AS SMALLINT))#x] +Project [(cast(cast(null as string) as bigint) < cast(cast(1 as smallint) as bigint)) AS (CAST(NULL AS STRING) < CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -780,7 +1092,7 @@ Project [(cast(cast(null as string) as smallint) < cast(1 as smallint)) AS (CAST -- !query SELECT cast(null as string) <= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(null as string) as smallint) <= cast(1 as smallint)) AS (CAST(NULL AS STRING) <= CAST(1 AS SMALLINT))#x] +Project [(cast(cast(null as string) as bigint) <= cast(cast(1 as smallint) as bigint)) AS (CAST(NULL AS STRING) <= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -791,7 +1103,7 @@ Project [(cast(cast(null as string) as smallint) <= cast(1 as smallint)) AS (CAS -- !query SELECT cast(null as string) <> cast(1 as smallint) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as smallint) = cast(1 as smallint)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS SMALLINT)))#x] +Project [NOT (cast(cast(null as string) as bigint) = cast(cast(1 as smallint) as bigint)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -802,7 +1114,7 @@ Project [NOT (cast(cast(null as string) as smallint) = cast(1 as smallint)) AS ( -- !query SELECT cast(1 as int) = '1' FROM t -- !query analysis -Project [(cast(1 as int) = cast(1 as int)) AS (CAST(1 AS INT) = 1)#x] +Project [(cast(cast(1 as int) as bigint) = cast(1 as bigint)) AS (CAST(1 AS INT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -813,7 +1125,7 @@ Project [(cast(1 as int) = cast(1 as int)) AS (CAST(1 AS INT) = 1)#x] -- !query SELECT cast(1 as int) > '2' FROM t -- !query analysis -Project [(cast(1 as int) > cast(2 as int)) AS (CAST(1 AS INT) > 2)#x] +Project [(cast(cast(1 as int) as bigint) > cast(2 as bigint)) AS (CAST(1 AS INT) > 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -824,7 +1136,7 @@ Project [(cast(1 as int) > cast(2 as int)) AS (CAST(1 AS INT) > 2)#x] -- !query SELECT cast(1 as int) >= '2' FROM t -- !query analysis -Project [(cast(1 as int) >= cast(2 as int)) AS (CAST(1 AS INT) >= 2)#x] +Project [(cast(cast(1 as int) as bigint) >= cast(2 as bigint)) AS (CAST(1 AS INT) >= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -835,7 +1147,7 @@ Project [(cast(1 as int) >= cast(2 as int)) AS (CAST(1 AS INT) >= 2)#x] -- !query SELECT cast(1 as int) < '2' FROM t -- !query analysis -Project [(cast(1 as int) < cast(2 as int)) AS (CAST(1 AS INT) < 2)#x] +Project [(cast(cast(1 as int) as bigint) < cast(2 as bigint)) AS (CAST(1 AS INT) < 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -846,7 +1158,7 @@ Project [(cast(1 as int) < cast(2 as int)) AS (CAST(1 AS INT) < 2)#x] -- !query SELECT cast(1 as int) <= '2' FROM t -- !query analysis -Project [(cast(1 as int) <= cast(2 as int)) AS (CAST(1 AS INT) <= 2)#x] +Project [(cast(cast(1 as int) as bigint) <= cast(2 as bigint)) AS (CAST(1 AS INT) <= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -857,7 +1169,7 @@ Project [(cast(1 as int) <= cast(2 as int)) AS (CAST(1 AS INT) <= 2)#x] -- !query SELECT cast(1 as int) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as int) = cast(2 as int)) AS (NOT (CAST(1 AS INT) = 2))#x] +Project [NOT (cast(cast(1 as int) as bigint) = cast(2 as bigint)) AS (NOT (CAST(1 AS INT) = 2))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -868,7 +1180,7 @@ Project [NOT (cast(1 as int) = cast(2 as int)) AS (NOT (CAST(1 AS INT) = 2))#x] -- !query SELECT cast(1 as int) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as int) = cast(cast(null as string) as int)) AS (CAST(1 AS INT) = CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as int) as bigint) = cast(cast(null as string) as bigint)) AS (CAST(1 AS INT) = CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -879,7 +1191,7 @@ Project [(cast(1 as int) = cast(cast(null as string) as int)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as int) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as int) > cast(cast(null as string) as int)) AS (CAST(1 AS INT) > CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as int) as bigint) > cast(cast(null as string) as bigint)) AS (CAST(1 AS INT) > CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -890,7 +1202,7 @@ Project [(cast(1 as int) > cast(cast(null as string) as int)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as int) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as int) >= cast(cast(null as string) as int)) AS (CAST(1 AS INT) >= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as int) as bigint) >= cast(cast(null as string) as bigint)) AS (CAST(1 AS INT) >= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -901,7 +1213,7 @@ Project [(cast(1 as int) >= cast(cast(null as string) as int)) AS (CAST(1 AS INT -- !query SELECT cast(1 as int) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as int) < cast(cast(null as string) as int)) AS (CAST(1 AS INT) < CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as int) as bigint) < cast(cast(null as string) as bigint)) AS (CAST(1 AS INT) < CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -912,7 +1224,7 @@ Project [(cast(1 as int) < cast(cast(null as string) as int)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as int) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as int) <= cast(cast(null as string) as int)) AS (CAST(1 AS INT) <= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as int) as bigint) <= cast(cast(null as string) as bigint)) AS (CAST(1 AS INT) <= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -923,7 +1235,7 @@ Project [(cast(1 as int) <= cast(cast(null as string) as int)) AS (CAST(1 AS INT -- !query SELECT cast(1 as int) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as int) = cast(cast(null as string) as int)) AS (NOT (CAST(1 AS INT) = CAST(NULL AS STRING)))#x] +Project [NOT (cast(cast(1 as int) as bigint) = cast(cast(null as string) as bigint)) AS (NOT (CAST(1 AS INT) = CAST(NULL AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -934,7 +1246,7 @@ Project [NOT (cast(1 as int) = cast(cast(null as string) as int)) AS (NOT (CAST( -- !query SELECT '1' = cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) = cast(1 as int)) AS (1 = CAST(1 AS INT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as int) as bigint)) AS (1 = CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -945,7 +1257,7 @@ Project [(cast(1 as int) = cast(1 as int)) AS (1 = CAST(1 AS INT))#x] -- !query SELECT '2' > cast(1 as int) FROM t -- !query analysis -Project [(cast(2 as int) > cast(1 as int)) AS (2 > CAST(1 AS INT))#x] +Project [(cast(2 as bigint) > cast(cast(1 as int) as bigint)) AS (2 > CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -956,7 +1268,7 @@ Project [(cast(2 as int) > cast(1 as int)) AS (2 > CAST(1 AS INT))#x] -- !query SELECT '2' >= cast(1 as int) FROM t -- !query analysis -Project [(cast(2 as int) >= cast(1 as int)) AS (2 >= CAST(1 AS INT))#x] +Project [(cast(2 as bigint) >= cast(cast(1 as int) as bigint)) AS (2 >= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -967,7 +1279,7 @@ Project [(cast(2 as int) >= cast(1 as int)) AS (2 >= CAST(1 AS INT))#x] -- !query SELECT '2' < cast(1 as int) FROM t -- !query analysis -Project [(cast(2 as int) < cast(1 as int)) AS (2 < CAST(1 AS INT))#x] +Project [(cast(2 as bigint) < cast(cast(1 as int) as bigint)) AS (2 < CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -978,7 +1290,7 @@ Project [(cast(2 as int) < cast(1 as int)) AS (2 < CAST(1 AS INT))#x] -- !query SELECT '2' <> cast(1 as int) FROM t -- !query analysis -Project [NOT (cast(2 as int) = cast(1 as int)) AS (NOT (2 = CAST(1 AS INT)))#x] +Project [NOT (cast(2 as bigint) = cast(cast(1 as int) as bigint)) AS (NOT (2 = CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -989,7 +1301,7 @@ Project [NOT (cast(2 as int) = cast(1 as int)) AS (NOT (2 = CAST(1 AS INT)))#x] -- !query SELECT '2' <= cast(1 as int) FROM t -- !query analysis -Project [(cast(2 as int) <= cast(1 as int)) AS (2 <= CAST(1 AS INT))#x] +Project [(cast(2 as bigint) <= cast(cast(1 as int) as bigint)) AS (2 <= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1000,7 +1312,7 @@ Project [(cast(2 as int) <= cast(1 as int)) AS (2 <= CAST(1 AS INT))#x] -- !query SELECT cast(null as string) = cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(null as string) as int) = cast(1 as int)) AS (CAST(NULL AS STRING) = CAST(1 AS INT))#x] +Project [(cast(cast(null as string) as bigint) = cast(cast(1 as int) as bigint)) AS (CAST(NULL AS STRING) = CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1011,7 +1323,7 @@ Project [(cast(cast(null as string) as int) = cast(1 as int)) AS (CAST(NULL AS S -- !query SELECT cast(null as string) > cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(null as string) as int) > cast(1 as int)) AS (CAST(NULL AS STRING) > CAST(1 AS INT))#x] +Project [(cast(cast(null as string) as bigint) > cast(cast(1 as int) as bigint)) AS (CAST(NULL AS STRING) > CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1022,7 +1334,7 @@ Project [(cast(cast(null as string) as int) > cast(1 as int)) AS (CAST(NULL AS S -- !query SELECT cast(null as string) >= cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(null as string) as int) >= cast(1 as int)) AS (CAST(NULL AS STRING) >= CAST(1 AS INT))#x] +Project [(cast(cast(null as string) as bigint) >= cast(cast(1 as int) as bigint)) AS (CAST(NULL AS STRING) >= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1033,7 +1345,7 @@ Project [(cast(cast(null as string) as int) >= cast(1 as int)) AS (CAST(NULL AS -- !query SELECT cast(null as string) < cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(null as string) as int) < cast(1 as int)) AS (CAST(NULL AS STRING) < CAST(1 AS INT))#x] +Project [(cast(cast(null as string) as bigint) < cast(cast(1 as int) as bigint)) AS (CAST(NULL AS STRING) < CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1044,7 +1356,7 @@ Project [(cast(cast(null as string) as int) < cast(1 as int)) AS (CAST(NULL AS S -- !query SELECT cast(null as string) <> cast(1 as int) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as int) = cast(1 as int)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS INT)))#x] +Project [NOT (cast(cast(null as string) as bigint) = cast(cast(1 as int) as bigint)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1055,7 +1367,7 @@ Project [NOT (cast(cast(null as string) as int) = cast(1 as int)) AS (NOT (CAST( -- !query SELECT cast(null as string) <= cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(null as string) as int) <= cast(1 as int)) AS (CAST(NULL AS STRING) <= CAST(1 AS INT))#x] +Project [(cast(cast(null as string) as bigint) <= cast(cast(1 as int) as bigint)) AS (CAST(NULL AS STRING) <= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1330,7 +1642,7 @@ Project [NOT (cast(cast(null as string) as bigint) = cast(1 as bigint)) AS (NOT -- !query SELECT cast(1 as decimal(10, 0)) = '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) = cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1341,7 +1653,7 @@ Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (CAST(1 AS DEC -- !query SELECT cast(1 as decimal(10, 0)) > '2' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) > cast(2 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) > 2)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) > cast(2 as double)) AS (CAST(1 AS DECIMAL(10,0)) > 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1352,7 +1664,7 @@ Project [(cast(1 as decimal(10,0)) > cast(2 as decimal(10,0))) AS (CAST(1 AS DEC -- !query SELECT cast(1 as decimal(10, 0)) >= '2' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) >= cast(2 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) >= 2)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) >= cast(2 as double)) AS (CAST(1 AS DECIMAL(10,0)) >= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1363,7 +1675,7 @@ Project [(cast(1 as decimal(10,0)) >= cast(2 as decimal(10,0))) AS (CAST(1 AS DE -- !query SELECT cast(1 as decimal(10, 0)) < '2' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) < cast(2 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) < 2)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) < cast(2 as double)) AS (CAST(1 AS DECIMAL(10,0)) < 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1374,7 +1686,7 @@ Project [(cast(1 as decimal(10,0)) < cast(2 as decimal(10,0))) AS (CAST(1 AS DEC -- !query SELECT cast(1 as decimal(10, 0)) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(2 as decimal(10,0))) AS (NOT (CAST(1 AS DECIMAL(10,0)) = 2))#x] +Project [NOT (cast(cast(1 as decimal(10,0)) as double) = cast(2 as double)) AS (NOT (CAST(1 AS DECIMAL(10,0)) = 2))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1385,7 +1697,7 @@ Project [NOT (cast(1 as decimal(10,0)) = cast(2 as decimal(10,0))) AS (NOT (CAST -- !query SELECT cast(1 as decimal(10, 0)) <= '2' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <= cast(2 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <= 2)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <= cast(2 as double)) AS (CAST(1 AS DECIMAL(10,0)) <= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1396,7 +1708,7 @@ Project [(cast(1 as decimal(10,0)) <= cast(2 as decimal(10,0))) AS (CAST(1 AS DE -- !query SELECT cast(1 as decimal(10, 0)) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(cast(null as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) = cast(cast(null as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) = CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1407,7 +1719,7 @@ Project [(cast(1 as decimal(10,0)) = cast(cast(null as string) as decimal(10,0)) -- !query SELECT cast(1 as decimal(10, 0)) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) > cast(cast(null as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) > CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) > cast(cast(null as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) > CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1418,7 +1730,7 @@ Project [(cast(1 as decimal(10,0)) > cast(cast(null as string) as decimal(10,0)) -- !query SELECT cast(1 as decimal(10, 0)) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) >= cast(cast(null as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) >= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) >= cast(cast(null as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) >= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1429,7 +1741,7 @@ Project [(cast(1 as decimal(10,0)) >= cast(cast(null as string) as decimal(10,0) -- !query SELECT cast(1 as decimal(10, 0)) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) < cast(cast(null as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) < CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) < cast(cast(null as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) < CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1440,7 +1752,7 @@ Project [(cast(1 as decimal(10,0)) < cast(cast(null as string) as decimal(10,0)) -- !query SELECT cast(1 as decimal(10, 0)) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(cast(null as string) as decimal(10,0))) AS (NOT (CAST(1 AS DECIMAL(10,0)) = CAST(NULL AS STRING)))#x] +Project [NOT (cast(cast(1 as decimal(10,0)) as double) = cast(cast(null as string) as double)) AS (NOT (CAST(1 AS DECIMAL(10,0)) = CAST(NULL AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1451,7 +1763,7 @@ Project [NOT (cast(1 as decimal(10,0)) = cast(cast(null as string) as decimal(10 -- !query SELECT cast(1 as decimal(10, 0)) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <= cast(cast(null as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <= cast(cast(null as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) <= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1462,7 +1774,7 @@ Project [(cast(1 as decimal(10,0)) <= cast(cast(null as string) as decimal(10,0) -- !query SELECT '1' = cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (1 = CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) = cast(cast(1 as decimal(10,0)) as double)) AS (1 = CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1473,7 +1785,7 @@ Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (1 = CAST(1 AS -- !query SELECT '2' > cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(2 as decimal(10,0)) > cast(1 as decimal(10,0))) AS (2 > CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(2 as double) > cast(cast(1 as decimal(10,0)) as double)) AS (2 > CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1484,7 +1796,7 @@ Project [(cast(2 as decimal(10,0)) > cast(1 as decimal(10,0))) AS (2 > CAST(1 AS -- !query SELECT '2' >= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(2 as decimal(10,0)) >= cast(1 as decimal(10,0))) AS (2 >= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(2 as double) >= cast(cast(1 as decimal(10,0)) as double)) AS (2 >= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1495,7 +1807,7 @@ Project [(cast(2 as decimal(10,0)) >= cast(1 as decimal(10,0))) AS (2 >= CAST(1 -- !query SELECT '2' < cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(2 as decimal(10,0)) < cast(1 as decimal(10,0))) AS (2 < CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(2 as double) < cast(cast(1 as decimal(10,0)) as double)) AS (2 < CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1506,7 +1818,7 @@ Project [(cast(2 as decimal(10,0)) < cast(1 as decimal(10,0))) AS (2 < CAST(1 AS -- !query SELECT '2' <= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(2 as decimal(10,0)) <= cast(1 as decimal(10,0))) AS (2 <= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(2 as double) <= cast(cast(1 as decimal(10,0)) as double)) AS (2 <= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1517,7 +1829,7 @@ Project [(cast(2 as decimal(10,0)) <= cast(1 as decimal(10,0))) AS (2 <= CAST(1 -- !query SELECT '2' <> cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [NOT (cast(2 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (NOT (2 = CAST(1 AS DECIMAL(10,0))))#x] +Project [NOT (cast(2 as double) = cast(cast(1 as decimal(10,0)) as double)) AS (NOT (2 = CAST(1 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1528,7 +1840,7 @@ Project [NOT (cast(2 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (NOT (2 = -- !query SELECT cast(null as string) = cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(cast(null as string) as decimal(10,0)) = cast(1 as decimal(10,0))) AS (CAST(NULL AS STRING) = CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(cast(null as string) as double) = cast(cast(1 as decimal(10,0)) as double)) AS (CAST(NULL AS STRING) = CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1539,7 +1851,7 @@ Project [(cast(cast(null as string) as decimal(10,0)) = cast(1 as decimal(10,0)) -- !query SELECT cast(null as string) > cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(cast(null as string) as decimal(10,0)) > cast(1 as decimal(10,0))) AS (CAST(NULL AS STRING) > CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(cast(null as string) as double) > cast(cast(1 as decimal(10,0)) as double)) AS (CAST(NULL AS STRING) > CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1550,7 +1862,7 @@ Project [(cast(cast(null as string) as decimal(10,0)) > cast(1 as decimal(10,0)) -- !query SELECT cast(null as string) >= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(cast(null as string) as decimal(10,0)) >= cast(1 as decimal(10,0))) AS (CAST(NULL AS STRING) >= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(cast(null as string) as double) >= cast(cast(1 as decimal(10,0)) as double)) AS (CAST(NULL AS STRING) >= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1561,7 +1873,7 @@ Project [(cast(cast(null as string) as decimal(10,0)) >= cast(1 as decimal(10,0) -- !query SELECT cast(null as string) < cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(cast(null as string) as decimal(10,0)) < cast(1 as decimal(10,0))) AS (CAST(NULL AS STRING) < CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(cast(null as string) as double) < cast(cast(1 as decimal(10,0)) as double)) AS (CAST(NULL AS STRING) < CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1572,7 +1884,7 @@ Project [(cast(cast(null as string) as decimal(10,0)) < cast(1 as decimal(10,0)) -- !query SELECT cast(null as string) <= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(cast(null as string) as decimal(10,0)) <= cast(1 as decimal(10,0))) AS (CAST(NULL AS STRING) <= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(cast(null as string) as double) <= cast(cast(1 as decimal(10,0)) as double)) AS (CAST(NULL AS STRING) <= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1583,7 +1895,7 @@ Project [(cast(cast(null as string) as decimal(10,0)) <= cast(1 as decimal(10,0) -- !query SELECT cast(null as string) <> cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as decimal(10,0)) = cast(1 as decimal(10,0))) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS DECIMAL(10,0))))#x] +Project [NOT (cast(cast(null as string) as double) = cast(cast(1 as decimal(10,0)) as double)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1858,7 +2170,7 @@ Project [NOT (cast(cast(null as string) as double) = cast(1 as double)) AS (NOT -- !query SELECT cast(1 as float) = '1' FROM t -- !query analysis -Project [(cast(1 as float) = cast(1 as float)) AS (CAST(1 AS FLOAT) = 1)#x] +Project [(cast(cast(1 as float) as double) = cast(1 as double)) AS (CAST(1 AS FLOAT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1869,7 +2181,7 @@ Project [(cast(1 as float) = cast(1 as float)) AS (CAST(1 AS FLOAT) = 1)#x] -- !query SELECT cast(1 as float) > '2' FROM t -- !query analysis -Project [(cast(1 as float) > cast(2 as float)) AS (CAST(1 AS FLOAT) > 2)#x] +Project [(cast(cast(1 as float) as double) > cast(2 as double)) AS (CAST(1 AS FLOAT) > 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1880,7 +2192,7 @@ Project [(cast(1 as float) > cast(2 as float)) AS (CAST(1 AS FLOAT) > 2)#x] -- !query SELECT cast(1 as float) >= '2' FROM t -- !query analysis -Project [(cast(1 as float) >= cast(2 as float)) AS (CAST(1 AS FLOAT) >= 2)#x] +Project [(cast(cast(1 as float) as double) >= cast(2 as double)) AS (CAST(1 AS FLOAT) >= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1891,7 +2203,7 @@ Project [(cast(1 as float) >= cast(2 as float)) AS (CAST(1 AS FLOAT) >= 2)#x] -- !query SELECT cast(1 as float) < '2' FROM t -- !query analysis -Project [(cast(1 as float) < cast(2 as float)) AS (CAST(1 AS FLOAT) < 2)#x] +Project [(cast(cast(1 as float) as double) < cast(2 as double)) AS (CAST(1 AS FLOAT) < 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1902,7 +2214,7 @@ Project [(cast(1 as float) < cast(2 as float)) AS (CAST(1 AS FLOAT) < 2)#x] -- !query SELECT cast(1 as float) <= '2' FROM t -- !query analysis -Project [(cast(1 as float) <= cast(2 as float)) AS (CAST(1 AS FLOAT) <= 2)#x] +Project [(cast(cast(1 as float) as double) <= cast(2 as double)) AS (CAST(1 AS FLOAT) <= 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1913,7 +2225,7 @@ Project [(cast(1 as float) <= cast(2 as float)) AS (CAST(1 AS FLOAT) <= 2)#x] -- !query SELECT cast(1 as float) <> '2' FROM t -- !query analysis -Project [NOT (cast(1 as float) = cast(2 as float)) AS (NOT (CAST(1 AS FLOAT) = 2))#x] +Project [NOT (cast(cast(1 as float) as double) = cast(2 as double)) AS (NOT (CAST(1 AS FLOAT) = 2))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1924,7 +2236,7 @@ Project [NOT (cast(1 as float) = cast(2 as float)) AS (NOT (CAST(1 AS FLOAT) = 2 -- !query SELECT cast(1 as float) = cast(null as string) FROM t -- !query analysis -Project [(cast(1 as float) = cast(cast(null as string) as float)) AS (CAST(1 AS FLOAT) = CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as float) as double) = cast(cast(null as string) as double)) AS (CAST(1 AS FLOAT) = CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1935,7 +2247,7 @@ Project [(cast(1 as float) = cast(cast(null as string) as float)) AS (CAST(1 AS -- !query SELECT cast(1 as float) > cast(null as string) FROM t -- !query analysis -Project [(cast(1 as float) > cast(cast(null as string) as float)) AS (CAST(1 AS FLOAT) > CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as float) as double) > cast(cast(null as string) as double)) AS (CAST(1 AS FLOAT) > CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1946,7 +2258,7 @@ Project [(cast(1 as float) > cast(cast(null as string) as float)) AS (CAST(1 AS -- !query SELECT cast(1 as float) >= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as float) >= cast(cast(null as string) as float)) AS (CAST(1 AS FLOAT) >= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as float) as double) >= cast(cast(null as string) as double)) AS (CAST(1 AS FLOAT) >= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1957,7 +2269,7 @@ Project [(cast(1 as float) >= cast(cast(null as string) as float)) AS (CAST(1 AS -- !query SELECT cast(1 as float) < cast(null as string) FROM t -- !query analysis -Project [(cast(1 as float) < cast(cast(null as string) as float)) AS (CAST(1 AS FLOAT) < CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as float) as double) < cast(cast(null as string) as double)) AS (CAST(1 AS FLOAT) < CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1968,7 +2280,7 @@ Project [(cast(1 as float) < cast(cast(null as string) as float)) AS (CAST(1 AS -- !query SELECT cast(1 as float) <= cast(null as string) FROM t -- !query analysis -Project [(cast(1 as float) <= cast(cast(null as string) as float)) AS (CAST(1 AS FLOAT) <= CAST(NULL AS STRING))#x] +Project [(cast(cast(1 as float) as double) <= cast(cast(null as string) as double)) AS (CAST(1 AS FLOAT) <= CAST(NULL AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1979,7 +2291,7 @@ Project [(cast(1 as float) <= cast(cast(null as string) as float)) AS (CAST(1 AS -- !query SELECT cast(1 as float) <> cast(null as string) FROM t -- !query analysis -Project [NOT (cast(1 as float) = cast(cast(null as string) as float)) AS (NOT (CAST(1 AS FLOAT) = CAST(NULL AS STRING)))#x] +Project [NOT (cast(cast(1 as float) as double) = cast(cast(null as string) as double)) AS (NOT (CAST(1 AS FLOAT) = CAST(NULL AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1990,7 +2302,7 @@ Project [NOT (cast(1 as float) = cast(cast(null as string) as float)) AS (NOT (C -- !query SELECT '1' = cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) = cast(1 as float)) AS (1 = CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) = cast(cast(1 as float) as double)) AS (1 = CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2001,7 +2313,7 @@ Project [(cast(1 as float) = cast(1 as float)) AS (1 = CAST(1 AS FLOAT))#x] -- !query SELECT '2' > cast(1 as float) FROM t -- !query analysis -Project [(cast(2 as float) > cast(1 as float)) AS (2 > CAST(1 AS FLOAT))#x] +Project [(cast(2 as double) > cast(cast(1 as float) as double)) AS (2 > CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2012,7 +2324,7 @@ Project [(cast(2 as float) > cast(1 as float)) AS (2 > CAST(1 AS FLOAT))#x] -- !query SELECT '2' >= cast(1 as float) FROM t -- !query analysis -Project [(cast(2 as float) >= cast(1 as float)) AS (2 >= CAST(1 AS FLOAT))#x] +Project [(cast(2 as double) >= cast(cast(1 as float) as double)) AS (2 >= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2023,7 +2335,7 @@ Project [(cast(2 as float) >= cast(1 as float)) AS (2 >= CAST(1 AS FLOAT))#x] -- !query SELECT '2' < cast(1 as float) FROM t -- !query analysis -Project [(cast(2 as float) < cast(1 as float)) AS (2 < CAST(1 AS FLOAT))#x] +Project [(cast(2 as double) < cast(cast(1 as float) as double)) AS (2 < CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2034,7 +2346,7 @@ Project [(cast(2 as float) < cast(1 as float)) AS (2 < CAST(1 AS FLOAT))#x] -- !query SELECT '2' <= cast(1 as float) FROM t -- !query analysis -Project [(cast(2 as float) <= cast(1 as float)) AS (2 <= CAST(1 AS FLOAT))#x] +Project [(cast(2 as double) <= cast(cast(1 as float) as double)) AS (2 <= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2045,7 +2357,7 @@ Project [(cast(2 as float) <= cast(1 as float)) AS (2 <= CAST(1 AS FLOAT))#x] -- !query SELECT '2' <> cast(1 as float) FROM t -- !query analysis -Project [NOT (cast(2 as float) = cast(1 as float)) AS (NOT (2 = CAST(1 AS FLOAT)))#x] +Project [NOT (cast(2 as double) = cast(cast(1 as float) as double)) AS (NOT (2 = CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2056,7 +2368,7 @@ Project [NOT (cast(2 as float) = cast(1 as float)) AS (NOT (2 = CAST(1 AS FLOAT) -- !query SELECT cast(null as string) = cast(1 as float) FROM t -- !query analysis -Project [(cast(cast(null as string) as float) = cast(1 as float)) AS (CAST(NULL AS STRING) = CAST(1 AS FLOAT))#x] +Project [(cast(cast(null as string) as double) = cast(cast(1 as float) as double)) AS (CAST(NULL AS STRING) = CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2067,7 +2379,7 @@ Project [(cast(cast(null as string) as float) = cast(1 as float)) AS (CAST(NULL -- !query SELECT cast(null as string) > cast(1 as float) FROM t -- !query analysis -Project [(cast(cast(null as string) as float) > cast(1 as float)) AS (CAST(NULL AS STRING) > CAST(1 AS FLOAT))#x] +Project [(cast(cast(null as string) as double) > cast(cast(1 as float) as double)) AS (CAST(NULL AS STRING) > CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2078,7 +2390,7 @@ Project [(cast(cast(null as string) as float) > cast(1 as float)) AS (CAST(NULL -- !query SELECT cast(null as string) >= cast(1 as float) FROM t -- !query analysis -Project [(cast(cast(null as string) as float) >= cast(1 as float)) AS (CAST(NULL AS STRING) >= CAST(1 AS FLOAT))#x] +Project [(cast(cast(null as string) as double) >= cast(cast(1 as float) as double)) AS (CAST(NULL AS STRING) >= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2089,7 +2401,7 @@ Project [(cast(cast(null as string) as float) >= cast(1 as float)) AS (CAST(NULL -- !query SELECT cast(null as string) < cast(1 as float) FROM t -- !query analysis -Project [(cast(cast(null as string) as float) < cast(1 as float)) AS (CAST(NULL AS STRING) < CAST(1 AS FLOAT))#x] +Project [(cast(cast(null as string) as double) < cast(cast(1 as float) as double)) AS (CAST(NULL AS STRING) < CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2100,7 +2412,7 @@ Project [(cast(cast(null as string) as float) < cast(1 as float)) AS (CAST(NULL -- !query SELECT cast(null as string) <= cast(1 as float) FROM t -- !query analysis -Project [(cast(cast(null as string) as float) <= cast(1 as float)) AS (CAST(NULL AS STRING) <= CAST(1 AS FLOAT))#x] +Project [(cast(cast(null as string) as double) <= cast(cast(1 as float) as double)) AS (CAST(NULL AS STRING) <= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2111,7 +2423,7 @@ Project [(cast(cast(null as string) as float) <= cast(1 as float)) AS (CAST(NULL -- !query SELECT cast(null as string) <> cast(1 as float) FROM t -- !query analysis -Project [NOT (cast(cast(null as string) as float) = cast(1 as float)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS FLOAT)))#x] +Project [NOT (cast(cast(null as string) as double) = cast(cast(1 as float) as double)) AS (NOT (CAST(NULL AS STRING) = CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/booleanEquality.sql.out index de6c0b72c1c79..4fe85374f4b1b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/booleanEquality.sql.out @@ -10,78 +10,155 @@ CreateViewCommand `t`, SELECT 1, false, false, LocalTempView, UNSUPPORTED, true -- !query SELECT true = cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(true as tinyint) = cast(1 as tinyint)) AS (true = CAST(1 AS TINYINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(true = CAST(1 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "true = cast(1 as tinyint)" + } ] +} -- !query SELECT true = cast(1 as smallint) FROM t -- !query analysis -Project [(cast(true as smallint) = cast(1 as smallint)) AS (true = CAST(1 AS SMALLINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(true = CAST(1 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true = cast(1 as smallint)" + } ] +} -- !query SELECT true = cast(1 as int) FROM t -- !query analysis -Project [(cast(true as int) = cast(1 as int)) AS (true = CAST(1 AS INT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(true = CAST(1 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "true = cast(1 as int)" + } ] +} -- !query SELECT true = cast(1 as bigint) FROM t -- !query analysis -Project [(cast(true as bigint) = cast(1 as bigint)) AS (true = CAST(1 AS BIGINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(true = CAST(1 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "true = cast(1 as bigint)" + } ] +} -- !query SELECT true = cast(1 as float) FROM t -- !query analysis -Project [(cast(true as float) = cast(1 as float)) AS (true = CAST(1 AS FLOAT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(true = CAST(1 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "true = cast(1 as float)" + } ] +} -- !query SELECT true = cast(1 as double) FROM t -- !query analysis -Project [(cast(true as double) = cast(1 as double)) AS (true = CAST(1 AS DOUBLE))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(true = CAST(1 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "true = cast(1 as double)" + } ] +} -- !query SELECT true = cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(true as decimal(10,0)) = cast(1 as decimal(10,0))) AS (true = CAST(1 AS DECIMAL(10,0)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(true = CAST(1 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "true = cast(1 as decimal(10, 0))" + } ] +} -- !query @@ -175,126 +252,71 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT true <=> cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(true as tinyint) <=> cast(1 as tinyint)) AS (true <=> CAST(1 AS TINYINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "true <=> cast(1 as tinyint)" + } ] +} -- !query SELECT true <=> cast(1 as smallint) FROM t -- !query analysis -Project [(cast(true as smallint) <=> cast(1 as smallint)) AS (true <=> CAST(1 AS SMALLINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as int) FROM t --- !query analysis -Project [(cast(true as int) <=> cast(1 as int)) AS (true <=> CAST(1 AS INT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as bigint) FROM t --- !query analysis -Project [(cast(true as bigint) <=> cast(1 as bigint)) AS (true <=> CAST(1 AS BIGINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as float) FROM t --- !query analysis -Project [(cast(true as float) <=> cast(1 as float)) AS (true <=> CAST(1 AS FLOAT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as double) FROM t --- !query analysis -Project [(cast(true as double) <=> cast(1 as double)) AS (true <=> CAST(1 AS DOUBLE))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as decimal(10, 0)) FROM t --- !query analysis -Project [(cast(true as decimal(10,0)) <=> cast(1 as decimal(10,0))) AS (true <=> CAST(1 AS DECIMAL(10,0)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast(1 as string) FROM t --- !query analysis -Project [(true <=> cast(cast(1 as string) as boolean)) AS (true <=> CAST(1 AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT true <=> cast('1' as binary) FROM t --- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { "left" : "\"BOOLEAN\"", - "right" : "\"BINARY\"", - "sqlExpr" : "\"(true <=> CAST(1 AS BINARY))\"" + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS SMALLINT))\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, "stopIndex" : 35, - "fragment" : "true <=> cast('1' as binary)" + "fragment" : "true <=> cast(1 as smallint)" } ] } -- !query -SELECT true <=> cast(1 as boolean) FROM t +SELECT true <=> cast(1 as int) FROM t -- !query analysis -Project [(true <=> cast(1 as boolean)) AS (true <=> CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "true <=> cast(1 as int)" + } ] +} -- !query -SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t +SELECT true <=> cast(1 as bigint) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { @@ -302,21 +324,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "left" : "\"BOOLEAN\"", - "right" : "\"TIMESTAMP\"", - "sqlExpr" : "\"(true <=> CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS BIGINT))\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 58, - "fragment" : "true <=> cast('2017-12-11 09:30:00.0' as timestamp)" + "stopIndex" : 33, + "fragment" : "true <=> cast(1 as bigint)" } ] } -- !query -SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t +SELECT true <=> cast(1 as float) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { @@ -324,23 +346,67 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "left" : "\"BOOLEAN\"", - "right" : "\"DATE\"", - "sqlExpr" : "\"(true <=> CAST(2017-12-11 09:30:00 AS DATE))\"" + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS FLOAT))\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 51, - "fragment" : "true <=> cast('2017-12-11 09:30:00' as date)" + "stopIndex" : 32, + "fragment" : "true <=> cast(1 as float)" } ] } -- !query -SELECT cast(1 as tinyint) = true FROM t +SELECT true <=> cast(1 as double) FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(true as tinyint)) AS (CAST(1 AS TINYINT) = true)#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(true <=> CAST(1 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true <=> cast(1 as double)" + } ] +} + + +-- !query +SELECT true <=> cast(1 as decimal(10, 0)) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(true <=> CAST(1 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 41, + "fragment" : "true <=> cast(1 as decimal(10, 0))" + } ] +} + + +-- !query +SELECT true <=> cast(1 as string) FROM t +-- !query analysis +Project [(true <=> cast(cast(1 as string) as boolean)) AS (true <=> CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -349,9 +415,31 @@ Project [(cast(1 as tinyint) = cast(true as tinyint)) AS (CAST(1 AS TINYINT) = t -- !query -SELECT cast(1 as smallint) = true FROM t +SELECT true <=> cast('1' as binary) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BINARY\"", + "sqlExpr" : "\"(true <=> CAST(1 AS BINARY))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "true <=> cast('1' as binary)" + } ] +} + + +-- !query +SELECT true <=> cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(true as smallint)) AS (CAST(1 AS SMALLINT) = true)#x] +Project [(true <=> cast(1 as boolean)) AS (true <=> CAST(1 AS BOOLEAN))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -359,59 +447,202 @@ Project [(cast(1 as smallint) = cast(true as smallint)) AS (CAST(1 AS SMALLINT) +- OneRowRelation +-- !query +SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TIMESTAMP\"", + "sqlExpr" : "\"(true <=> CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "true <=> cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} + + +-- !query +SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DATE\"", + "sqlExpr" : "\"(true <=> CAST(2017-12-11 09:30:00 AS DATE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "true <=> cast('2017-12-11 09:30:00' as date)" + } ] +} + + +-- !query +SELECT cast(1 as tinyint) = true FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS TINYINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(1 as tinyint) = true" + } ] +} + + +-- !query +SELECT cast(1 as smallint) = true FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS SMALLINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as smallint) = true" + } ] +} + + -- !query SELECT cast(1 as int) = true FROM t -- !query analysis -Project [(cast(1 as int) = cast(true as int)) AS (CAST(1 AS INT) = true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS INT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "cast(1 as int) = true" + } ] +} -- !query SELECT cast(1 as bigint) = true FROM t -- !query analysis -Project [(cast(1 as bigint) = cast(true as bigint)) AS (CAST(1 AS BIGINT) = true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS BIGINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(1 as bigint) = true" + } ] +} -- !query SELECT cast(1 as float) = true FROM t -- !query analysis -Project [(cast(1 as float) = cast(true as float)) AS (CAST(1 AS FLOAT) = true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS FLOAT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "cast(1 as float) = true" + } ] +} -- !query SELECT cast(1 as double) = true FROM t -- !query analysis -Project [(cast(1 as double) = cast(true as double)) AS (CAST(1 AS DOUBLE) = true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DOUBLE) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(1 as double) = true" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) = true FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(true as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "cast(1 as decimal(10, 0)) = true" + } ] +} -- !query @@ -505,78 +736,155 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) <=> true FROM t -- !query analysis -Project [(cast(1 as tinyint) <=> cast(true as tinyint)) AS (CAST(1 AS TINYINT) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS TINYINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(1 as tinyint) <=> true" + } ] +} -- !query SELECT cast(1 as smallint) <=> true FROM t -- !query analysis -Project [(cast(1 as smallint) <=> cast(true as smallint)) AS (CAST(1 AS SMALLINT) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS SMALLINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast(1 as smallint) <=> true" + } ] +} -- !query SELECT cast(1 as int) <=> true FROM t -- !query analysis -Project [(cast(1 as int) <=> cast(true as int)) AS (CAST(1 AS INT) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS INT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "cast(1 as int) <=> true" + } ] +} -- !query SELECT cast(1 as bigint) <=> true FROM t -- !query analysis -Project [(cast(1 as bigint) <=> cast(true as bigint)) AS (CAST(1 AS BIGINT) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS BIGINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as bigint) <=> true" + } ] +} -- !query SELECT cast(1 as float) <=> true FROM t -- !query analysis -Project [(cast(1 as float) <=> cast(true as float)) AS (CAST(1 AS FLOAT) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS FLOAT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(1 as float) <=> true" + } ] +} -- !query SELECT cast(1 as double) <=> true FROM t -- !query analysis -Project [(cast(1 as double) <=> cast(true as double)) AS (CAST(1 AS DOUBLE) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DOUBLE) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as double) <=> true" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <=> true FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <=> cast(true as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <=> true)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 41, + "fragment" : "cast(1 as decimal(10, 0)) <=> true" + } ] +} -- !query @@ -670,78 +978,155 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT false = cast(0 as tinyint) FROM t -- !query analysis -Project [(cast(false as tinyint) = cast(0 as tinyint)) AS (false = CAST(0 AS TINYINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(false = CAST(0 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "false = cast(0 as tinyint)" + } ] +} -- !query SELECT false = cast(0 as smallint) FROM t -- !query analysis -Project [(cast(false as smallint) = cast(0 as smallint)) AS (false = CAST(0 AS SMALLINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(false = CAST(0 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false = cast(0 as smallint)" + } ] +} -- !query SELECT false = cast(0 as int) FROM t -- !query analysis -Project [(cast(false as int) = cast(0 as int)) AS (false = CAST(0 AS INT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(false = CAST(0 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "false = cast(0 as int)" + } ] +} -- !query SELECT false = cast(0 as bigint) FROM t -- !query analysis -Project [(cast(false as bigint) = cast(0 as bigint)) AS (false = CAST(0 AS BIGINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(false = CAST(0 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "false = cast(0 as bigint)" + } ] +} -- !query SELECT false = cast(0 as float) FROM t -- !query analysis -Project [(cast(false as float) = cast(0 as float)) AS (false = CAST(0 AS FLOAT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(false = CAST(0 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "false = cast(0 as float)" + } ] +} -- !query SELECT false = cast(0 as double) FROM t -- !query analysis -Project [(cast(false as double) = cast(0 as double)) AS (false = CAST(0 AS DOUBLE))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(false = CAST(0 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "false = cast(0 as double)" + } ] +} -- !query SELECT false = cast(0 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(false as decimal(10,0)) = cast(0 as decimal(10,0))) AS (false = CAST(0 AS DECIMAL(10,0)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(false = CAST(0 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "false = cast(0 as decimal(10, 0))" + } ] +} -- !query @@ -835,73 +1220,161 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT false <=> cast(0 as tinyint) FROM t -- !query analysis -Project [(cast(false as tinyint) <=> cast(0 as tinyint)) AS (false <=> CAST(0 AS TINYINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "false <=> cast(0 as tinyint)" + } ] +} -- !query SELECT false <=> cast(0 as smallint) FROM t -- !query analysis -Project [(cast(false as smallint) <=> cast(0 as smallint)) AS (false <=> CAST(0 AS SMALLINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "false <=> cast(0 as smallint)" + } ] +} -- !query SELECT false <=> cast(0 as int) FROM t -- !query analysis -Project [(cast(false as int) <=> cast(0 as int)) AS (false <=> CAST(0 AS INT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "false <=> cast(0 as int)" + } ] +} -- !query SELECT false <=> cast(0 as bigint) FROM t -- !query analysis -Project [(cast(false as bigint) <=> cast(0 as bigint)) AS (false <=> CAST(0 AS BIGINT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false <=> cast(0 as bigint)" + } ] +} -- !query SELECT false <=> cast(0 as float) FROM t -- !query analysis -Project [(cast(false as float) <=> cast(0 as float)) AS (false <=> CAST(0 AS FLOAT))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "false <=> cast(0 as float)" + } ] +} -- !query SELECT false <=> cast(0 as double) FROM t -- !query analysis -Project [(cast(false as double) <=> cast(0 as double)) AS (false <=> CAST(0 AS DOUBLE))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(false <=> CAST(0 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false <=> cast(0 as double)" + } ] +} -- !query SELECT false <=> cast(0 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(false as decimal(10,0)) <=> cast(0 as decimal(10,0))) AS (false <=> CAST(0 AS DECIMAL(10,0)))#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(false <=> CAST(0 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "false <=> cast(0 as decimal(10, 0))" + } ] +} + + +-- !query +SELECT false <=> cast(0 as string) FROM t +-- !query analysis +Project [(false <=> cast(cast(0 as string) as boolean)) AS (false <=> CAST(0 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -910,9 +1383,31 @@ Project [(cast(false as decimal(10,0)) <=> cast(0 as decimal(10,0))) AS (false < -- !query -SELECT false <=> cast(0 as string) FROM t +SELECT false <=> cast('0' as binary) FROM t -- !query analysis -Project [(false <=> cast(cast(0 as string) as boolean)) AS (false <=> CAST(0 AS STRING))#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BINARY\"", + "sqlExpr" : "\"(false <=> CAST(0 AS BINARY))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "false <=> cast('0' as binary)" + } ] +} + + +-- !query +SELECT false <=> cast(0 as boolean) FROM t +-- !query analysis +Project [(false <=> cast(0 as boolean)) AS (false <=> CAST(0 AS BOOLEAN))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -921,157 +1416,201 @@ Project [(false <=> cast(cast(0 as string) as boolean)) AS (false <=> CAST(0 AS -- !query -SELECT false <=> cast('0' as binary) FROM t +SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TIMESTAMP\"", + "sqlExpr" : "\"(false <=> CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "false <=> cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} + + +-- !query +SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DATE\"", + "sqlExpr" : "\"(false <=> CAST(2017-12-11 09:30:00 AS DATE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 52, + "fragment" : "false <=> cast('2017-12-11 09:30:00' as date)" + } ] +} + + +-- !query +SELECT cast(0 as tinyint) = false FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS TINYINT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(0 as tinyint) = false" + } ] +} + + +-- !query +SELECT cast(0 as smallint) = false FROM t +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS SMALLINT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as smallint) = false" + } ] +} + + +-- !query +SELECT cast(0 as int) = false FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"BINARY\"", - "sqlExpr" : "\"(false <=> CAST(0 AS BINARY))\"" + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS INT) = false)\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 36, - "fragment" : "false <=> cast('0' as binary)" + "stopIndex" : 29, + "fragment" : "cast(0 as int) = false" } ] } -- !query -SELECT false <=> cast(0 as boolean) FROM t --- !query analysis -Project [(false <=> cast(0 as boolean)) AS (false <=> CAST(0 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t +SELECT cast(0 as bigint) = false FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"TIMESTAMP\"", - "sqlExpr" : "\"(false <=> CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS BIGINT) = false)\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 59, - "fragment" : "false <=> cast('2017-12-11 09:30:00.0' as timestamp)" + "stopIndex" : 32, + "fragment" : "cast(0 as bigint) = false" } ] } -- !query -SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t +SELECT cast(0 as float) = false FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DATE\"", - "sqlExpr" : "\"(false <=> CAST(2017-12-11 09:30:00 AS DATE))\"" + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS FLOAT) = false)\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 52, - "fragment" : "false <=> cast('2017-12-11 09:30:00' as date)" + "stopIndex" : 31, + "fragment" : "cast(0 as float) = false" } ] } --- !query -SELECT cast(0 as tinyint) = false FROM t --- !query analysis -Project [(cast(0 as tinyint) = cast(false as tinyint)) AS (CAST(0 AS TINYINT) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT cast(0 as smallint) = false FROM t --- !query analysis -Project [(cast(0 as smallint) = cast(false as smallint)) AS (CAST(0 AS SMALLINT) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT cast(0 as int) = false FROM t --- !query analysis -Project [(cast(0 as int) = cast(false as int)) AS (CAST(0 AS INT) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT cast(0 as bigint) = false FROM t --- !query analysis -Project [(cast(0 as bigint) = cast(false as bigint)) AS (CAST(0 AS BIGINT) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - --- !query -SELECT cast(0 as float) = false FROM t --- !query analysis -Project [(cast(0 as float) = cast(false as float)) AS (CAST(0 AS FLOAT) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation - - -- !query SELECT cast(0 as double) = false FROM t -- !query analysis -Project [(cast(0 as double) = cast(false as double)) AS (CAST(0 AS DOUBLE) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DOUBLE) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(0 as double) = false" + } ] +} -- !query SELECT cast(0 as decimal(10, 0)) = false FROM t -- !query analysis -Project [(cast(0 as decimal(10,0)) = cast(false as decimal(10,0))) AS (CAST(0 AS DECIMAL(10,0)) = false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DECIMAL(10,0)) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "cast(0 as decimal(10, 0)) = false" + } ] +} -- !query @@ -1165,78 +1704,155 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(0 as tinyint) <=> false FROM t -- !query analysis -Project [(cast(0 as tinyint) <=> cast(false as tinyint)) AS (CAST(0 AS TINYINT) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS TINYINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast(0 as tinyint) <=> false" + } ] +} -- !query SELECT cast(0 as smallint) <=> false FROM t -- !query analysis -Project [(cast(0 as smallint) <=> cast(false as smallint)) AS (CAST(0 AS SMALLINT) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS SMALLINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "cast(0 as smallint) <=> false" + } ] +} -- !query SELECT cast(0 as int) <=> false FROM t -- !query analysis -Project [(cast(0 as int) <=> cast(false as int)) AS (CAST(0 AS INT) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS INT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(0 as int) <=> false" + } ] +} -- !query SELECT cast(0 as bigint) <=> false FROM t -- !query analysis -Project [(cast(0 as bigint) <=> cast(false as bigint)) AS (CAST(0 AS BIGINT) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS BIGINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as bigint) <=> false" + } ] +} -- !query SELECT cast(0 as float) <=> false FROM t -- !query analysis -Project [(cast(0 as float) <=> cast(false as float)) AS (CAST(0 AS FLOAT) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS FLOAT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(0 as float) <=> false" + } ] +} -- !query SELECT cast(0 as double) <=> false FROM t -- !query analysis -Project [(cast(0 as double) <=> cast(false as double)) AS (CAST(0 AS DOUBLE) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DOUBLE) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as double) <=> false" + } ] +} -- !query SELECT cast(0 as decimal(10, 0)) <=> false FROM t -- !query analysis -Project [(cast(0 as decimal(10,0)) <=> cast(false as decimal(10,0))) AS (CAST(0 AS DECIMAL(10,0)) <=> false)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DECIMAL(10,0)) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "cast(0 as decimal(10, 0)) <=> false" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/caseWhenCoercion.sql.out index 4124fc57996cd..45ce3bdd96d90 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/caseWhenCoercion.sql.out @@ -54,7 +54,7 @@ Project [CASE WHEN true THEN cast(cast(1 as tinyint) as bigint) ELSE cast(2 as b -- !query SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as float) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as tinyint) as float) ELSE cast(2 as float) END AS CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS FLOAT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as tinyint) as double) ELSE cast(cast(2 as float) as double) END AS CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS FLOAT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -87,7 +87,7 @@ Project [CASE WHEN true THEN cast(cast(1 as tinyint) as decimal(10,0)) ELSE cast -- !query SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as tinyint) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(cast(1 as tinyint) as bigint) ELSE cast(cast(2 as string) as bigint) END AS CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS STRING) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -230,7 +230,7 @@ Project [CASE WHEN true THEN cast(cast(1 as smallint) as bigint) ELSE cast(2 as -- !query SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as float) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as smallint) as float) ELSE cast(2 as float) END AS CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS FLOAT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as smallint) as double) ELSE cast(cast(2 as float) as double) END AS CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS FLOAT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -263,7 +263,7 @@ Project [CASE WHEN true THEN cast(cast(1 as smallint) as decimal(10,0)) ELSE cas -- !query SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as smallint) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(cast(1 as smallint) as bigint) ELSE cast(cast(2 as string) as bigint) END AS CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS STRING) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -406,7 +406,7 @@ Project [CASE WHEN true THEN cast(cast(1 as int) as bigint) ELSE cast(2 as bigin -- !query SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as float) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as int) as float) ELSE cast(2 as float) END AS CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS FLOAT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as int) as double) ELSE cast(cast(2 as float) as double) END AS CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS FLOAT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -439,7 +439,7 @@ Project [CASE WHEN true THEN cast(cast(1 as int) as decimal(10,0)) ELSE cast(2 a -- !query SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as int) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(cast(1 as int) as bigint) ELSE cast(cast(2 as string) as bigint) END AS CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS STRING) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -582,7 +582,7 @@ Project [CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as bigint) END AS CAS -- !query SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as float) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as bigint) as float) ELSE cast(2 as float) END AS CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS FLOAT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as bigint) as double) ELSE cast(cast(2 as float) as double) END AS CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS FLOAT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -615,7 +615,7 @@ Project [CASE WHEN true THEN cast(cast(1 as bigint) as decimal(20,0)) ELSE cast( -- !query SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as bigint) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(1 as bigint) ELSE cast(cast(2 as string) as bigint) END AS CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS STRING) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -714,7 +714,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as tinyint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as tinyint) as float) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS TINYINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 as tinyint) as double) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS TINYINT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -725,7 +725,7 @@ Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as tinyint) as fl -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as smallint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as smallint) as float) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS SMALLINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 as smallint) as double) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS SMALLINT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -736,7 +736,7 @@ Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as smallint) as f -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as int) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as int) as float) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS INT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 as int) as double) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS INT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -747,7 +747,7 @@ Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as int) as float) -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as bigint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as float) ELSE cast(cast(2 as bigint) as float) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BIGINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 as bigint) as double) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BIGINT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -791,7 +791,7 @@ Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 a -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as float) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(cast(1 as float) as double) ELSE cast(cast(2 as string) as double) END AS CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS STRING) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -967,7 +967,7 @@ Project [CASE WHEN true THEN cast(1 as double) ELSE cast(cast(2 as decimal(10,0) -- !query SELECT CASE WHEN true THEN cast(1 as double) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as double) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(1 as double) ELSE cast(cast(2 as string) as double) END AS CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS STRING) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1143,7 +1143,7 @@ Project [CASE WHEN true THEN cast(1 as decimal(10,0)) ELSE cast(2 as decimal(10, -- !query SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(1 as decimal(10,0)) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(cast(1 as decimal(10,0)) as double) ELSE cast(cast(2 as string) as double) END AS CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS STRING) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1242,7 +1242,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as tinyint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as tinyint) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS TINYINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as bigint) ELSE cast(cast(2 as tinyint) as bigint) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS TINYINT) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1253,7 +1253,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as tinyint) as s -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as smallint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as smallint) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS SMALLINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as bigint) ELSE cast(cast(2 as smallint) as bigint) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS SMALLINT) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1264,7 +1264,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as smallint) as -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as int) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as int) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS INT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as bigint) ELSE cast(cast(2 as int) as bigint) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS INT) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1275,7 +1275,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as int) as strin -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as bigint) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as bigint) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BIGINT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as bigint) ELSE cast(2 as bigint) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BIGINT) END#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1286,7 +1286,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as bigint) as st -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as float) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as float) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS FLOAT) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as double) ELSE cast(cast(2 as float) as double) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS FLOAT) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1297,7 +1297,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as float) as str -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as double) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as double) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS DOUBLE) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as double) ELSE cast(2 as double) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS DOUBLE) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1308,7 +1308,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as double) as st -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as decimal(10, 0)) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2 as decimal(10,0)) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS DECIMAL(10,0)) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as double) ELSE cast(cast(2 as decimal(10,0)) as double) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS DECIMAL(10,0)) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1330,51 +1330,29 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(2 as string) END AS CAS -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BINARY) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 73, - "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END" - } ] -} +Project [CASE WHEN true THEN cast(cast(1 as string) as binary) ELSE cast(2 as binary) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BINARY) END#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 72, - "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END" - } ] -} +Project [CASE WHEN true THEN cast(cast(1 as string) as boolean) ELSE cast(2 as boolean) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00.0' as timestamp) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2017-12-11 09:30:00.0 as timestamp) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as timestamp) ELSE cast(2017-12-11 09:30:00.0 as timestamp) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1385,7 +1363,7 @@ Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2017-12-11 09:30:0 -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00' as date) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(1 as string) ELSE cast(cast(2017-12-11 09:30:00 as date) as string) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2017-12-11 09:30:00 AS DATE) END#x] +Project [CASE WHEN true THEN cast(cast(1 as string) as date) ELSE cast(2017-12-11 09:30:00 as date) END AS CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2017-12-11 09:30:00 AS DATE) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1550,23 +1528,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS BINARY) ELSE CAST(2 AS STRING) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 73, - "fragment" : "CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END" - } ] -} +Project [CASE WHEN true THEN cast(1 as binary) ELSE cast(cast(2 as string) as binary) END AS CASE WHEN true THEN CAST(1 AS BINARY) ELSE CAST(2 AS STRING) END#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1803,23 +1770,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 72, - "fragment" : "CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END" - } ] -} +Project [CASE WHEN true THEN cast(1 as boolean) ELSE cast(cast(2 as string) as boolean) END AS CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2056,7 +2012,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(2017-12-12 09:30:00.0 as timestamp) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(2017-12-12 09:30:00.0 as timestamp) ELSE cast(cast(2 as string) as timestamp) END AS CASE WHEN true THEN CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) ELSE CAST(2 AS STRING) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2287,7 +2243,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as string) END FROM t -- !query analysis -Project [CASE WHEN true THEN cast(cast(2017-12-12 09:30:00 as date) as string) ELSE cast(2 as string) END AS CASE WHEN true THEN CAST(2017-12-12 09:30:00 AS DATE) ELSE CAST(2 AS STRING) END#x] +Project [CASE WHEN true THEN cast(2017-12-12 09:30:00 as date) ELSE cast(cast(2 as string) as date) END AS CASE WHEN true THEN CAST(2017-12-12 09:30:00 AS DATE) ELSE CAST(2 AS STRING) END#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/concat.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/concat.sql.out index 62e3a87473263..6f3bc9ccb66f3 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/concat.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/concat.sql.out @@ -271,7 +271,7 @@ SELECT (string_array1 || int_array2) sti_array FROM various_arrays -- !query analysis -Project [concat(cast(tinyint_array1#x as array), smallint_array2#x) AS ts_array#x, concat(cast(smallint_array1#x as array), int_array2#x) AS si_array#x, concat(cast(int_array1#x as array), bigint_array2#x) AS ib_array#x, concat(cast(bigint_array1#x as array), cast(decimal_array2#x as array)) AS bd_array#x, concat(cast(decimal_array1#x as array), double_array2#x) AS dd_array#x, concat(double_array1#x, cast(float_array2#x as array)) AS df_array#x, concat(string_array1#x, cast(data_array2#x as array)) AS std_array#x, concat(cast(timestamp_array1#x as array), string_array2#x) AS tst_array#x, concat(string_array1#x, cast(int_array2#x as array)) AS sti_array#x] +Project [concat(cast(tinyint_array1#x as array), smallint_array2#x) AS ts_array#x, concat(cast(smallint_array1#x as array), int_array2#x) AS si_array#x, concat(cast(int_array1#x as array), bigint_array2#x) AS ib_array#x, concat(cast(bigint_array1#x as array), cast(decimal_array2#x as array)) AS bd_array#x, concat(cast(decimal_array1#x as array), double_array2#x) AS dd_array#x, concat(double_array1#x, cast(float_array2#x as array)) AS df_array#x, concat(cast(string_array1#x as array), data_array2#x) AS std_array#x, concat(timestamp_array1#x, cast(string_array2#x as array)) AS tst_array#x, concat(cast(string_array1#x as array), cast(int_array2#x as array)) AS sti_array#x] +- SubqueryAlias various_arrays +- View (`various_arrays`, [boolean_array1#x, boolean_array2#x, tinyint_array1#x, tinyint_array2#x, smallint_array1#x, smallint_array2#x, int_array1#x, int_array2#x, bigint_array1#x, bigint_array2#x, decimal_array1#x, decimal_array2#x, double_array1#x, double_array2#x, float_array1#x, float_array2#x, date_array1#x, data_array2#x, timestamp_array1#x, timestamp_array2#x, string_array1#x, string_array2#x, array_array1#x, array_array2#x, ... 4 more fields]) +- Project [cast(boolean_array1#x as array) AS boolean_array1#x, cast(boolean_array2#x as array) AS boolean_array2#x, cast(tinyint_array1#x as array) AS tinyint_array1#x, cast(tinyint_array2#x as array) AS tinyint_array2#x, cast(smallint_array1#x as array) AS smallint_array1#x, cast(smallint_array2#x as array) AS smallint_array2#x, cast(int_array1#x as array) AS int_array1#x, cast(int_array2#x as array) AS int_array2#x, cast(bigint_array1#x as array) AS bigint_array1#x, cast(bigint_array2#x as array) AS bigint_array2#x, cast(decimal_array1#x as array) AS decimal_array1#x, cast(decimal_array2#x as array) AS decimal_array2#x, cast(double_array1#x as array) AS double_array1#x, cast(double_array2#x as array) AS double_array2#x, cast(float_array1#x as array) AS float_array1#x, cast(float_array2#x as array) AS float_array2#x, cast(date_array1#x as array) AS date_array1#x, cast(data_array2#x as array) AS data_array2#x, cast(timestamp_array1#x as array) AS timestamp_array1#x, cast(timestamp_array2#x as array) AS timestamp_array2#x, cast(string_array1#x as array) AS string_array1#x, cast(string_array2#x as array) AS string_array2#x, cast(array_array1#x as array>) AS array_array1#x, cast(array_array2#x as array>) AS array_array2#x, ... 4 more fields] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out index 093297f03edb7..4458e15e53cf7 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out @@ -8594,7 +8594,7 @@ Project [(cast(1 as decimal(20,0)) = cast(cast(1 as decimal(10,0)) as decimal(20 -- !query SELECT cast(1 as decimal(3, 0)) = cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) = cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) = cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -8605,7 +8605,7 @@ Project [(cast(1 as decimal(3,0)) = cast(cast(1 as string) as decimal(3,0))) AS -- !query SELECT cast(1 as decimal(5, 0)) = cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) = cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) = cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -8616,7 +8616,7 @@ Project [(cast(1 as decimal(5,0)) = cast(cast(1 as string) as decimal(5,0))) AS -- !query SELECT cast(1 as decimal(10, 0)) = cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) = cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -8627,7 +8627,7 @@ Project [(cast(1 as decimal(10,0)) = cast(cast(1 as string) as decimal(10,0))) A -- !query SELECT cast(1 as decimal(20, 0)) = cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) = cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) = cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -8726,45 +8726,89 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) = cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) = cast(cast(1 as boolean) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) = cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) = cast(cast(1 as boolean) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) = cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(cast(1 as boolean) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) = cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) = cast(cast(1 as boolean) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) = cast(1 as boolean)" + } ] +} -- !query @@ -9826,7 +9870,7 @@ Project [(cast(1 as decimal(20,0)) <=> cast(cast(1 as decimal(10,0)) as decimal( -- !query SELECT cast(1 as decimal(3, 0)) <=> cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) <=> cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) <=> cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -9837,7 +9881,7 @@ Project [(cast(1 as decimal(3,0)) <=> cast(cast(1 as string) as decimal(3,0))) A -- !query SELECT cast(1 as decimal(5, 0)) <=> cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) <=> cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) <=> cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -9848,7 +9892,7 @@ Project [(cast(1 as decimal(5,0)) <=> cast(cast(1 as string) as decimal(5,0))) A -- !query SELECT cast(1 as decimal(10, 0)) <=> cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <=> cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <=> cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -9859,7 +9903,7 @@ Project [(cast(1 as decimal(10,0)) <=> cast(cast(1 as string) as decimal(10,0))) -- !query SELECT cast(1 as decimal(20, 0)) <=> cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) <=> cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) <=> cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -9958,45 +10002,89 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) <=> cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) <=> cast(cast(1 as boolean) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(3, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) <=> cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) <=> cast(cast(1 as boolean) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(5, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <=> cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <=> cast(cast(1 as boolean) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(10, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) <=> cast(1 as boolean) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) <=> cast(cast(1 as boolean) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS BOOLEAN))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(20, 0)) <=> cast(1 as boolean)" + } ] +} -- !query @@ -11058,7 +11146,7 @@ Project [(cast(1 as decimal(20,0)) < cast(cast(1 as decimal(10,0)) as decimal(20 -- !query SELECT cast(1 as decimal(3, 0)) < cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) < cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) < CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) < cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) < CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -11069,7 +11157,7 @@ Project [(cast(1 as decimal(3,0)) < cast(cast(1 as string) as decimal(3,0))) AS -- !query SELECT cast(1 as decimal(5, 0)) < cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) < cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) < CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) < cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) < CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -11080,7 +11168,7 @@ Project [(cast(1 as decimal(5,0)) < cast(cast(1 as string) as decimal(5,0))) AS -- !query SELECT cast(1 as decimal(10, 0)) < cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) < cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) < CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) < cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) < CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -11091,7 +11179,7 @@ Project [(cast(1 as decimal(10,0)) < cast(cast(1 as string) as decimal(10,0))) A -- !query SELECT cast(1 as decimal(20, 0)) < cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) < cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) < CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) < cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) < CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -12334,7 +12422,7 @@ Project [(cast(1 as decimal(20,0)) <= cast(cast(1 as decimal(10,0)) as decimal(2 -- !query SELECT cast(1 as decimal(3, 0)) <= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) <= cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) <= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -12345,7 +12433,7 @@ Project [(cast(1 as decimal(3,0)) <= cast(cast(1 as string) as decimal(3,0))) AS -- !query SELECT cast(1 as decimal(5, 0)) <= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) <= cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) <= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -12356,7 +12444,7 @@ Project [(cast(1 as decimal(5,0)) <= cast(cast(1 as string) as decimal(5,0))) AS -- !query SELECT cast(1 as decimal(10, 0)) <= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <= cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -12367,7 +12455,7 @@ Project [(cast(1 as decimal(10,0)) <= cast(cast(1 as string) as decimal(10,0))) -- !query SELECT cast(1 as decimal(20, 0)) <= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) <= cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) <= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -13610,7 +13698,7 @@ Project [(cast(1 as decimal(20,0)) > cast(cast(1 as decimal(10,0)) as decimal(20 -- !query SELECT cast(1 as decimal(3, 0)) > cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) > cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) > CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) > cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) > CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -13621,7 +13709,7 @@ Project [(cast(1 as decimal(3,0)) > cast(cast(1 as string) as decimal(3,0))) AS -- !query SELECT cast(1 as decimal(5, 0)) > cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) > cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) > CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) > cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) > CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -13632,7 +13720,7 @@ Project [(cast(1 as decimal(5,0)) > cast(cast(1 as string) as decimal(5,0))) AS -- !query SELECT cast(1 as decimal(10, 0)) > cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) > cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) > CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) > cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) > CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -13643,7 +13731,7 @@ Project [(cast(1 as decimal(10,0)) > cast(cast(1 as string) as decimal(10,0))) A -- !query SELECT cast(1 as decimal(20, 0)) > cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) > cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) > CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) > cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) > CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -14886,7 +14974,7 @@ Project [(cast(1 as decimal(20,0)) >= cast(cast(1 as decimal(10,0)) as decimal(2 -- !query SELECT cast(1 as decimal(3, 0)) >= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(3,0)) >= cast(cast(1 as string) as decimal(3,0))) AS (CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(3,0)) as double) >= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -14897,7 +14985,7 @@ Project [(cast(1 as decimal(3,0)) >= cast(cast(1 as string) as decimal(3,0))) AS -- !query SELECT cast(1 as decimal(5, 0)) >= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(5,0)) >= cast(cast(1 as string) as decimal(5,0))) AS (CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(5,0)) as double) >= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -14908,7 +14996,7 @@ Project [(cast(1 as decimal(5,0)) >= cast(cast(1 as string) as decimal(5,0))) AS -- !query SELECT cast(1 as decimal(10, 0)) >= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) >= cast(cast(1 as string) as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(10,0)) as double) >= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -14919,7 +15007,7 @@ Project [(cast(1 as decimal(10,0)) >= cast(cast(1 as string) as decimal(10,0))) -- !query SELECT cast(1 as decimal(20, 0)) >= cast(1 as string) FROM t -- !query analysis -Project [(cast(1 as decimal(20,0)) >= cast(cast(1 as string) as decimal(20,0))) AS (CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS STRING))#x] +Project [(cast(cast(1 as decimal(20,0)) as double) >= cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -16162,7 +16250,7 @@ Project [NOT (cast(1 as decimal(20,0)) = cast(cast(1 as decimal(10,0)) as decima -- !query SELECT cast(1 as decimal(3, 0)) <> cast(1 as string) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(3,0)) = cast(cast(1 as string) as decimal(3,0))) AS (NOT (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS STRING)))#x] +Project [NOT (cast(cast(1 as decimal(3,0)) as double) = cast(cast(1 as string) as double)) AS (NOT (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -16173,7 +16261,7 @@ Project [NOT (cast(1 as decimal(3,0)) = cast(cast(1 as string) as decimal(3,0))) -- !query SELECT cast(1 as decimal(5, 0)) <> cast(1 as string) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(5,0)) = cast(cast(1 as string) as decimal(5,0))) AS (NOT (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS STRING)))#x] +Project [NOT (cast(cast(1 as decimal(5,0)) as double) = cast(cast(1 as string) as double)) AS (NOT (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -16184,7 +16272,7 @@ Project [NOT (cast(1 as decimal(5,0)) = cast(cast(1 as string) as decimal(5,0))) -- !query SELECT cast(1 as decimal(10, 0)) <> cast(1 as string) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(cast(1 as string) as decimal(10,0))) AS (NOT (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS STRING)))#x] +Project [NOT (cast(cast(1 as decimal(10,0)) as double) = cast(cast(1 as string) as double)) AS (NOT (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -16195,7 +16283,7 @@ Project [NOT (cast(1 as decimal(10,0)) = cast(cast(1 as string) as decimal(10,0) -- !query SELECT cast(1 as decimal(20, 0)) <> cast(1 as string) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(20,0)) = cast(cast(1 as string) as decimal(20,0))) AS (NOT (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS STRING)))#x] +Project [NOT (cast(cast(1 as decimal(20,0)) as double) = cast(cast(1 as string) as double)) AS (NOT (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -16294,45 +16382,89 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) <> cast(1 as boolean) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(3,0)) = cast(cast(1 as boolean) as decimal(3,0))) AS (NOT (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) <> cast(1 as boolean) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(5,0)) = cast(cast(1 as boolean) as decimal(5,0))) AS (NOT (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <> cast(1 as boolean) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(cast(1 as boolean) as decimal(10,0))) AS (NOT (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) <> cast(1 as boolean) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(20,0)) = cast(cast(1 as boolean) as decimal(20,0))) AS (NOT (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN)))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) <> cast(1 as boolean)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/division.sql.out index 22b870bc0b420..f168b3221150e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/division.sql.out @@ -87,7 +87,7 @@ Project [(cast(cast(1 as tinyint) as decimal(3,0)) / cast(1 as decimal(10,0))) A -- !query SELECT cast(1 as tinyint) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) / cast(cast(cast(1 as string) as double) as double)) AS (CAST(1 AS TINYINT) / CAST(1 AS STRING))#x] +Project [(cast(cast(cast(1 as tinyint) as bigint) as double) / cast(cast(cast(1 as string) as bigint) as double)) AS (CAST(1 AS TINYINT) / CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -263,7 +263,7 @@ Project [(cast(cast(1 as smallint) as decimal(5,0)) / cast(1 as decimal(10,0))) -- !query SELECT cast(1 as smallint) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) / cast(cast(cast(1 as string) as double) as double)) AS (CAST(1 AS SMALLINT) / CAST(1 AS STRING))#x] +Project [(cast(cast(cast(1 as smallint) as bigint) as double) / cast(cast(cast(1 as string) as bigint) as double)) AS (CAST(1 AS SMALLINT) / CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -439,7 +439,7 @@ Project [(cast(cast(1 as int) as decimal(10,0)) / cast(1 as decimal(10,0))) AS ( -- !query SELECT cast(1 as int) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) / cast(cast(cast(1 as string) as double) as double)) AS (CAST(1 AS INT) / CAST(1 AS STRING))#x] +Project [(cast(cast(cast(1 as int) as bigint) as double) / cast(cast(cast(1 as string) as bigint) as double)) AS (CAST(1 AS INT) / CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -615,7 +615,7 @@ Project [(cast(cast(1 as bigint) as decimal(20,0)) / cast(1 as decimal(10,0))) A -- !query SELECT cast(1 as bigint) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) / cast(cast(cast(1 as string) as double) as double)) AS (CAST(1 AS BIGINT) / CAST(1 AS STRING))#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(cast(1 as string) as bigint) as double)) AS (CAST(1 AS BIGINT) / CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -791,7 +791,7 @@ Project [(cast(cast(1 as float) as double) / cast(cast(cast(1 as decimal(10,0)) -- !query SELECT cast(1 as float) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as float) as double) / cast(cast(cast(1 as string) as double) as double)) AS (CAST(1 AS FLOAT) / CAST(1 AS STRING))#x] +Project [(cast(cast(1 as float) as double) / cast(cast(1 as string) as double)) AS (CAST(1 AS FLOAT) / CAST(1 AS STRING))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1242,7 +1242,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as string) / cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(cast(1 as string) as double) / cast(cast(1 as tinyint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS TINYINT))#x] +Project [(cast(cast(cast(1 as string) as bigint) as double) / cast(cast(cast(1 as tinyint) as bigint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1253,7 +1253,7 @@ Project [(cast(cast(1 as string) as double) / cast(cast(1 as tinyint) as double) -- !query SELECT cast(1 as string) / cast(1 as smallint) FROM t -- !query analysis -Project [(cast(cast(1 as string) as double) / cast(cast(1 as smallint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS SMALLINT))#x] +Project [(cast(cast(cast(1 as string) as bigint) as double) / cast(cast(cast(1 as smallint) as bigint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1264,7 +1264,7 @@ Project [(cast(cast(1 as string) as double) / cast(cast(1 as smallint) as double -- !query SELECT cast(1 as string) / cast(1 as int) FROM t -- !query analysis -Project [(cast(cast(1 as string) as double) / cast(cast(1 as int) as double)) AS (CAST(1 AS STRING) / CAST(1 AS INT))#x] +Project [(cast(cast(cast(1 as string) as bigint) as double) / cast(cast(cast(1 as int) as bigint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1275,7 +1275,7 @@ Project [(cast(cast(1 as string) as double) / cast(cast(1 as int) as double)) AS -- !query SELECT cast(1 as string) / cast(1 as bigint) FROM t -- !query analysis -Project [(cast(cast(1 as string) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS BIGINT))#x] +Project [(cast(cast(cast(1 as string) as bigint) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS STRING) / CAST(1 AS BIGINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1319,12 +1319,23 @@ Project [(cast(cast(1 as string) as double) / cast(cast(1 as decimal(10,0)) as d -- !query SELECT cast(1 as string) / cast(1 as string) FROM t -- !query analysis -Project [(cast(cast(1 as string) as double) / cast(cast(1 as string) as double)) AS (CAST(1 AS STRING) / CAST(1 AS STRING))#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", + "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS STRING))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 44, + "fragment" : "cast(1 as string) / cast(1 as string)" + } ] +} -- !query @@ -1332,11 +1343,11 @@ SELECT cast(1 as string) / cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -1354,11 +1365,11 @@ SELECT cast(1 as string) / cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -1376,11 +1387,11 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -1398,11 +1409,11 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -1574,11 +1585,11 @@ SELECT cast('1' as binary) / cast(1 as string) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -1838,11 +1849,11 @@ SELECT cast(1 as boolean) / cast(1 as string) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -2102,11 +2113,11 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -2366,11 +2377,11 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) / CAST(1 AS STRING))\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/elt.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/elt.sql.out index f4902012f0f96..f5b0740f2b462 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/elt.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/elt.sql.out @@ -11,7 +11,7 @@ FROM ( FROM range(10) ) -- !query analysis -Project [elt(2, col1#x, cast(col2#xL as string), col3#x, cast(col4#x as string), cast(col5#x as string), false) AS col#x] +Project [elt(2, col1#x, cast(col2#xL as string), col3#x, cast(col4#x as string), cast(col5#x as string), true) AS col#x] +- SubqueryAlias __auto_generated_subquery_name +- Project [prefix_ AS col1#x, id#xL AS col2#xL, cast((id#xL + cast(1 as bigint)) as string) AS col3#x, encode(cast((id#xL + cast(2 as bigint)) as string), utf-8) AS col4#x, cast(id#xL as double) AS col5#x] +- Range (0, 10, step=1) @@ -28,7 +28,7 @@ FROM ( FROM range(10) ) -- !query analysis -Project [elt(3, col1#x, col2#x, cast(col3#x as string), cast(col4#x as string), false) AS col#x] +Project [elt(3, col1#x, col2#x, cast(col3#x as string), cast(col4#x as string), true) AS col#x] +- SubqueryAlias __auto_generated_subquery_name +- Project [cast(id#xL as string) AS col1#x, cast((id#xL + cast(1 as bigint)) as string) AS col2#x, encode(cast((id#xL + cast(2 as bigint)) as string), utf-8) AS col3#x, encode(cast((id#xL + cast(3 as bigint)) as string), utf-8) AS col4#x] +- Range (0, 10, step=1) @@ -49,7 +49,7 @@ FROM ( FROM range(10) ) -- !query analysis -Project [elt(1, cast(col1#x as string), cast(col2#x as string), false) AS col#x] +Project [elt(1, cast(col1#x as string), cast(col2#x as string), true) AS col#x] +- SubqueryAlias __auto_generated_subquery_name +- Project [encode(cast(id#xL as string), utf-8) AS col1#x, encode(cast((id#xL + cast(1 as bigint)) as string), utf-8) AS col2#x] +- Range (0, 10, step=1) @@ -70,7 +70,7 @@ FROM ( FROM range(10) ) -- !query analysis -Project [elt(2, col1#x, col2#x, false) AS col#x] +Project [elt(2, col1#x, col2#x, true) AS col#x] +- SubqueryAlias __auto_generated_subquery_name +- Project [encode(cast(id#xL as string), utf-8) AS col1#x, encode(cast((id#xL + cast(1 as bigint)) as string), utf-8) AS col2#x] +- Range (0, 10, step=1) diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/ifCoercion.sql.out index b1d07bd7be902..791b75a869718 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/ifCoercion.sql.out @@ -54,7 +54,7 @@ Project [if (true) cast(cast(1 as tinyint) as bigint) else cast(2 as bigint) AS -- !query SELECT IF(true, cast(1 as tinyint), cast(2 as float)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as tinyint) as float) else cast(2 as float) AS (IF(true, CAST(1 AS TINYINT), CAST(2 AS FLOAT)))#x] +Project [if (true) cast(cast(1 as tinyint) as double) else cast(cast(2 as float) as double) AS (IF(true, CAST(1 AS TINYINT), CAST(2 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -87,7 +87,7 @@ Project [if (true) cast(cast(1 as tinyint) as decimal(10,0)) else cast(2 as deci -- !query SELECT IF(true, cast(1 as tinyint), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as tinyint) as string) else cast(2 as string) AS (IF(true, CAST(1 AS TINYINT), CAST(2 AS STRING)))#x] +Project [if (true) cast(cast(1 as tinyint) as bigint) else cast(cast(2 as string) as bigint) AS (IF(true, CAST(1 AS TINYINT), CAST(2 AS STRING)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -230,7 +230,7 @@ Project [if (true) cast(cast(1 as smallint) as bigint) else cast(2 as bigint) AS -- !query SELECT IF(true, cast(1 as smallint), cast(2 as float)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as smallint) as float) else cast(2 as float) AS (IF(true, CAST(1 AS SMALLINT), CAST(2 AS FLOAT)))#x] +Project [if (true) cast(cast(1 as smallint) as double) else cast(cast(2 as float) as double) AS (IF(true, CAST(1 AS SMALLINT), CAST(2 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -263,7 +263,7 @@ Project [if (true) cast(cast(1 as smallint) as decimal(10,0)) else cast(2 as dec -- !query SELECT IF(true, cast(1 as smallint), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as smallint) as string) else cast(2 as string) AS (IF(true, CAST(1 AS SMALLINT), CAST(2 AS STRING)))#x] +Project [if (true) cast(cast(1 as smallint) as bigint) else cast(cast(2 as string) as bigint) AS (IF(true, CAST(1 AS SMALLINT), CAST(2 AS STRING)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -406,7 +406,7 @@ Project [if (true) cast(cast(1 as int) as bigint) else cast(2 as bigint) AS (IF( -- !query SELECT IF(true, cast(1 as int), cast(2 as float)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as int) as float) else cast(2 as float) AS (IF(true, CAST(1 AS INT), CAST(2 AS FLOAT)))#x] +Project [if (true) cast(cast(1 as int) as double) else cast(cast(2 as float) as double) AS (IF(true, CAST(1 AS INT), CAST(2 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -439,7 +439,7 @@ Project [if (true) cast(cast(1 as int) as decimal(10,0)) else cast(2 as decimal( -- !query SELECT IF(true, cast(1 as int), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as int) as string) else cast(2 as string) AS (IF(true, CAST(1 AS INT), CAST(2 AS STRING)))#x] +Project [if (true) cast(cast(1 as int) as bigint) else cast(cast(2 as string) as bigint) AS (IF(true, CAST(1 AS INT), CAST(2 AS STRING)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -582,7 +582,7 @@ Project [if (true) cast(1 as bigint) else cast(2 as bigint) AS (IF(true, CAST(1 -- !query SELECT IF(true, cast(1 as bigint), cast(2 as float)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as bigint) as float) else cast(2 as float) AS (IF(true, CAST(1 AS BIGINT), CAST(2 AS FLOAT)))#x] +Project [if (true) cast(cast(1 as bigint) as double) else cast(cast(2 as float) as double) AS (IF(true, CAST(1 AS BIGINT), CAST(2 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -615,7 +615,7 @@ Project [if (true) cast(cast(1 as bigint) as decimal(20,0)) else cast(cast(2 as -- !query SELECT IF(true, cast(1 as bigint), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as bigint) as string) else cast(2 as string) AS (IF(true, CAST(1 AS BIGINT), CAST(2 AS STRING)))#x] +Project [if (true) cast(1 as bigint) else cast(cast(2 as string) as bigint) AS (IF(true, CAST(1 AS BIGINT), CAST(2 AS STRING)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -714,7 +714,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as float), cast(2 as tinyint)) FROM t -- !query analysis -Project [if (true) cast(1 as float) else cast(cast(2 as tinyint) as float) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS TINYINT)))#x] +Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as tinyint) as double) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -725,7 +725,7 @@ Project [if (true) cast(1 as float) else cast(cast(2 as tinyint) as float) AS (I -- !query SELECT IF(true, cast(1 as float), cast(2 as smallint)) FROM t -- !query analysis -Project [if (true) cast(1 as float) else cast(cast(2 as smallint) as float) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS SMALLINT)))#x] +Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as smallint) as double) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -736,7 +736,7 @@ Project [if (true) cast(1 as float) else cast(cast(2 as smallint) as float) AS ( -- !query SELECT IF(true, cast(1 as float), cast(2 as int)) FROM t -- !query analysis -Project [if (true) cast(1 as float) else cast(cast(2 as int) as float) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS INT)))#x] +Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as int) as double) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -747,7 +747,7 @@ Project [if (true) cast(1 as float) else cast(cast(2 as int) as float) AS (IF(tr -- !query SELECT IF(true, cast(1 as float), cast(2 as bigint)) FROM t -- !query analysis -Project [if (true) cast(1 as float) else cast(cast(2 as bigint) as float) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS BIGINT)))#x] +Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as bigint) as double) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS BIGINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -791,7 +791,7 @@ Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as decimal( -- !query SELECT IF(true, cast(1 as float), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as float) as string) else cast(2 as string) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS STRING)))#x] +Project [if (true) cast(cast(1 as float) as double) else cast(cast(2 as string) as double) AS (IF(true, CAST(1 AS FLOAT), CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -967,7 +967,7 @@ Project [if (true) cast(1 as double) else cast(cast(2 as decimal(10,0)) as doubl -- !query SELECT IF(true, cast(1 as double), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as double) as string) else cast(2 as string) AS (IF(true, CAST(1 AS DOUBLE), CAST(2 AS STRING)))#x] +Project [if (true) cast(1 as double) else cast(cast(2 as string) as double) AS (IF(true, CAST(1 AS DOUBLE), CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1143,7 +1143,7 @@ Project [if (true) cast(1 as decimal(10,0)) else cast(2 as decimal(10,0)) AS (IF -- !query SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(1 as decimal(10,0)) as string) else cast(2 as string) AS (IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS STRING)))#x] +Project [if (true) cast(cast(1 as decimal(10,0)) as double) else cast(cast(2 as string) as double) AS (IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1242,7 +1242,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as string), cast(2 as tinyint)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as tinyint) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS TINYINT)))#x] +Project [if (true) cast(cast(1 as string) as bigint) else cast(cast(2 as tinyint) as bigint) AS (IF(true, CAST(1 AS STRING), CAST(2 AS TINYINT)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1253,7 +1253,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as tinyint) as string) AS -- !query SELECT IF(true, cast(1 as string), cast(2 as smallint)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as smallint) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS SMALLINT)))#x] +Project [if (true) cast(cast(1 as string) as bigint) else cast(cast(2 as smallint) as bigint) AS (IF(true, CAST(1 AS STRING), CAST(2 AS SMALLINT)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1264,7 +1264,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as smallint) as string) AS -- !query SELECT IF(true, cast(1 as string), cast(2 as int)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as int) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS INT)))#x] +Project [if (true) cast(cast(1 as string) as bigint) else cast(cast(2 as int) as bigint) AS (IF(true, CAST(1 AS STRING), CAST(2 AS INT)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1275,7 +1275,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as int) as string) AS (IF( -- !query SELECT IF(true, cast(1 as string), cast(2 as bigint)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as bigint) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS BIGINT)))#x] +Project [if (true) cast(cast(1 as string) as bigint) else cast(2 as bigint) AS (IF(true, CAST(1 AS STRING), CAST(2 AS BIGINT)))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1286,7 +1286,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as bigint) as string) AS ( -- !query SELECT IF(true, cast(1 as string), cast(2 as float)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as float) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS FLOAT)))#x] +Project [if (true) cast(cast(1 as string) as double) else cast(cast(2 as float) as double) AS (IF(true, CAST(1 AS STRING), CAST(2 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1297,7 +1297,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as float) as string) AS (I -- !query SELECT IF(true, cast(1 as string), cast(2 as double)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as double) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS DOUBLE)))#x] +Project [if (true) cast(cast(1 as string) as double) else cast(2 as double) AS (IF(true, CAST(1 AS STRING), CAST(2 AS DOUBLE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1308,7 +1308,7 @@ Project [if (true) cast(1 as string) else cast(cast(2 as double) as string) AS ( -- !query SELECT IF(true, cast(1 as string), cast(2 as decimal(10, 0))) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2 as decimal(10,0)) as string) AS (IF(true, CAST(1 AS STRING), CAST(2 AS DECIMAL(10,0))))#x] +Project [if (true) cast(cast(1 as string) as double) else cast(cast(2 as decimal(10,0)) as double) AS (IF(true, CAST(1 AS STRING), CAST(2 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1330,51 +1330,29 @@ Project [if (true) cast(1 as string) else cast(2 as string) AS (IF(true, CAST(1 -- !query SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS STRING), CAST(2 AS BINARY)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "IF(true, cast(1 as string), cast('2' as binary))" - } ] -} +Project [if (true) cast(cast(1 as string) as binary) else cast(2 as binary) AS (IF(true, CAST(1 AS STRING), CAST(2 AS BINARY)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "IF(true, cast(1 as string), cast(2 as boolean))" - } ] -} +Project [if (true) cast(cast(1 as string) as boolean) else cast(2 as boolean) AS (IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2017-12-11 09:30:00.0 as timestamp) as string) AS (IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +Project [if (true) cast(cast(1 as string) as timestamp) else cast(2017-12-11 09:30:00.0 as timestamp) AS (IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1385,7 +1363,7 @@ Project [if (true) cast(1 as string) else cast(cast(2017-12-11 09:30:00.0 as tim -- !query SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t -- !query analysis -Project [if (true) cast(1 as string) else cast(cast(2017-12-11 09:30:00 as date) as string) AS (IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE)))#x] +Project [if (true) cast(cast(1 as string) as date) else cast(2017-12-11 09:30:00 as date) AS (IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1550,23 +1528,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS BINARY), CAST(2 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "IF(true, cast('1' as binary), cast(2 as string))" - } ] -} +Project [if (true) cast(1 as binary) else cast(cast(2 as string) as binary) AS (IF(true, CAST(1 AS BINARY), CAST(2 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1803,23 +1770,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "IF(true, cast(1 as boolean), cast(2 as string))" - } ] -} +Project [if (true) cast(1 as boolean) else cast(cast(2 as string) as boolean) AS (IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2056,7 +2012,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(2017-12-12 09:30:00.0 as timestamp) as string) else cast(2 as string) AS (IF(true, CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(2 AS STRING)))#x] +Project [if (true) cast(2017-12-12 09:30:00.0 as timestamp) else cast(cast(2 as string) as timestamp) AS (IF(true, CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2287,7 +2243,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as string)) FROM t -- !query analysis -Project [if (true) cast(cast(2017-12-12 09:30:00 as date) as string) else cast(2 as string) AS (IF(true, CAST(2017-12-12 09:30:00 AS DATE), CAST(2 AS STRING)))#x] +Project [if (true) cast(2017-12-12 09:30:00 as date) else cast(cast(2 as string) as date) AS (IF(true, CAST(2017-12-12 09:30:00 AS DATE), CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/implicitTypeCasts.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/implicitTypeCasts.sql.out index 43aaea63fd045..977b1e1459c3e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/implicitTypeCasts.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/implicitTypeCasts.sql.out @@ -10,7 +10,7 @@ CreateViewCommand `t`, SELECT 1, false, false, LocalTempView, UNSUPPORTED, true -- !query SELECT 1 + '2' FROM t -- !query analysis -Project [(cast(1 as double) + cast(2 as double)) AS (1 + 2)#x] +Project [(cast(1 as bigint) + cast(2 as bigint)) AS (1 + 2)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -21,7 +21,7 @@ Project [(cast(1 as double) + cast(2 as double)) AS (1 + 2)#x] -- !query SELECT 1 - '2' FROM t -- !query analysis -Project [(cast(1 as double) - cast(2 as double)) AS (1 - 2)#x] +Project [(cast(1 as bigint) - cast(2 as bigint)) AS (1 - 2)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -32,7 +32,7 @@ Project [(cast(1 as double) - cast(2 as double)) AS (1 - 2)#x] -- !query SELECT 1 * '2' FROM t -- !query analysis -Project [(cast(1 as double) * cast(2 as double)) AS (1 * 2)#x] +Project [(cast(1 as bigint) * cast(2 as bigint)) AS (1 * 2)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -43,7 +43,7 @@ Project [(cast(1 as double) * cast(2 as double)) AS (1 * 2)#x] -- !query SELECT 4 / '2' FROM t -- !query analysis -Project [(cast(4 as double) / cast(cast(2 as double) as double)) AS (4 / 2)#x] +Project [(cast(cast(4 as bigint) as double) / cast(cast(2 as bigint) as double)) AS (4 / 2)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/inConversion.sql.out index 0db96719a3fb0..71bc2fef3ab8e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/inConversion.sql.out @@ -54,7 +54,7 @@ Project [cast(cast(1 as tinyint) as bigint) IN (cast(cast(1 as bigint) as bigint -- !query SELECT cast(1 as tinyint) in (cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as tinyint) as float) IN (cast(cast(1 as float) as float)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as tinyint) as double) IN (cast(cast(1 as float) as double)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -87,7 +87,7 @@ Project [cast(cast(1 as tinyint) as decimal(10,0)) IN (cast(cast(1 as decimal(10 -- !query SELECT cast(1 as tinyint) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as tinyint) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as tinyint) as bigint) IN (cast(cast(1 as string) as bigint)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -230,7 +230,7 @@ Project [cast(cast(1 as smallint) as bigint) IN (cast(cast(1 as bigint) as bigin -- !query SELECT cast(1 as smallint) in (cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as smallint) as float) IN (cast(cast(1 as float) as float)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as smallint) as double) IN (cast(cast(1 as float) as double)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -263,7 +263,7 @@ Project [cast(cast(1 as smallint) as decimal(10,0)) IN (cast(cast(1 as decimal(1 -- !query SELECT cast(1 as smallint) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as smallint) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as smallint) as bigint) IN (cast(cast(1 as string) as bigint)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -406,7 +406,7 @@ Project [cast(cast(1 as int) as bigint) IN (cast(cast(1 as bigint) as bigint)) A -- !query SELECT cast(1 as int) in (cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as int) as float) IN (cast(cast(1 as float) as float)) AS (CAST(1 AS INT) IN (CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as int) as double) IN (cast(cast(1 as float) as double)) AS (CAST(1 AS INT) IN (CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -439,7 +439,7 @@ Project [cast(cast(1 as int) as decimal(10,0)) IN (cast(cast(1 as decimal(10,0)) -- !query SELECT cast(1 as int) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as int) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS INT) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as int) as bigint) IN (cast(cast(1 as string) as bigint)) AS (CAST(1 AS INT) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -582,7 +582,7 @@ Project [cast(1 as bigint) IN (cast(1 as bigint)) AS (CAST(1 AS BIGINT) IN (CAST -- !query SELECT cast(1 as bigint) in (cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as bigint) as float) IN (cast(cast(1 as float) as float)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as bigint) as double) IN (cast(cast(1 as float) as double)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -615,7 +615,7 @@ Project [cast(cast(1 as bigint) as decimal(20,0)) IN (cast(cast(1 as decimal(10, -- !query SELECT cast(1 as bigint) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as bigint) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as bigint) as bigint) IN (cast(cast(1 as string) as bigint)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -714,7 +714,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as float) in (cast(1 as tinyint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as tinyint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS TINYINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as tinyint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -725,7 +725,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as tinyint) as float)) -- !query SELECT cast(1 as float) in (cast(1 as smallint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as smallint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS SMALLINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as smallint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -736,7 +736,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as smallint) as float)) -- !query SELECT cast(1 as float) in (cast(1 as int)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as int) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS INT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as int) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -747,7 +747,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as int) as float)) AS ( -- !query SELECT cast(1 as float) in (cast(1 as bigint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as bigint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS BIGINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as bigint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS BIGINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -791,7 +791,7 @@ Project [cast(cast(1 as float) as double) IN (cast(cast(1 as decimal(10,0)) as d -- !query SELECT cast(1 as float) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as float) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as string) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -967,7 +967,7 @@ Project [cast(cast(1 as double) as double) IN (cast(cast(1 as decimal(10,0)) as -- !query SELECT cast(1 as double) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as double) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS DOUBLE) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as double) as double) IN (cast(cast(1 as string) as double)) AS (CAST(1 AS DOUBLE) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1143,7 +1143,7 @@ Project [cast(1 as decimal(10,0)) IN (cast(1 as decimal(10,0))) AS (CAST(1 AS DE -- !query SELECT cast(1 as decimal(10, 0)) in (cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as decimal(10,0)) as string) IN (cast(cast(1 as string) as string)) AS (CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS STRING)))#x] +Project [cast(cast(1 as decimal(10,0)) as double) IN (cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1242,7 +1242,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as string) in (cast(1 as tinyint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as tinyint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS TINYINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as tinyint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1253,7 +1253,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as tinyint) as string -- !query SELECT cast(1 as string) in (cast(1 as smallint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as smallint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS SMALLINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as smallint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1264,7 +1264,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as smallint) as strin -- !query SELECT cast(1 as string) in (cast(1 as int)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as int) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS INT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as int) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1275,7 +1275,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as int) as string)) A -- !query SELECT cast(1 as string) in (cast(1 as bigint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as bigint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS BIGINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as bigint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS BIGINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1286,7 +1286,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as bigint) as string) -- !query SELECT cast(1 as string) in (cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as float) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as float) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1297,7 +1297,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as float) as string)) -- !query SELECT cast(1 as string) in (cast(1 as double)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as double) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS DOUBLE)))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as double) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS DOUBLE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1308,7 +1308,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as double) as string) -- !query SELECT cast(1 as string) in (cast(1 as decimal(10, 0))) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as decimal(10,0)) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS DECIMAL(10,0))))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as decimal(10,0)) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1330,51 +1330,29 @@ Project [cast(1 as string) IN (cast(1 as string)) AS (CAST(1 AS STRING) IN (CAST -- !query SELECT cast(1 as string) in (cast('1' as binary)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS BINARY)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 49, - "fragment" : "in (cast('1' as binary))" - } ] -} +Project [cast(cast(1 as string) as binary) IN (cast(cast(1 as binary) as binary)) AS (CAST(1 AS STRING) IN (CAST(1 AS BINARY)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as string) in (cast(1 as boolean)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 48, - "fragment" : "in (cast(1 as boolean))" - } ] -} +Project [cast(cast(1 as string) as boolean) IN (cast(cast(1 as boolean) as boolean)) AS (CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as string) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(2017-12-11 09:30:00.0 as timestamp) as string)) AS (CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +Project [cast(cast(1 as string) as timestamp) IN (cast(cast(2017-12-11 09:30:00.0 as timestamp) as timestamp)) AS (CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1385,7 +1363,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(2017-12-11 09:30:00.0 a -- !query SELECT cast(1 as string) in (cast('2017-12-11 09:30:00' as date)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(2017-12-11 09:30:00 as date) as string)) AS (CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00 AS DATE)))#x] +Project [cast(cast(1 as string) as date) IN (cast(cast(2017-12-11 09:30:00 as date) as date)) AS (CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00 AS DATE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1550,23 +1528,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) in (cast(1 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BINARY) IN (CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 28, - "stopIndex" : 49, - "fragment" : "in (cast(1 as string))" - } ] -} +Project [cast(cast(1 as binary) as binary) IN (cast(cast(1 as string) as binary)) AS (CAST(1 AS BINARY) IN (CAST(1 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1803,23 +1770,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT true in (cast(1 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(true IN (CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 34, - "fragment" : "in (cast(1 as string))" - } ] -} +Project [cast(true as boolean) IN (cast(cast(1 as string) as boolean)) AS (true IN (CAST(1 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2056,7 +2012,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as string)) FROM t -- !query analysis -Project [cast(cast(2017-12-12 09:30:00.0 as timestamp) as string) IN (cast(cast(2 as string) as string)) AS (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) IN (CAST(2 AS STRING)))#x] +Project [cast(cast(2017-12-12 09:30:00.0 as timestamp) as timestamp) IN (cast(cast(2 as string) as timestamp)) AS (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) IN (CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2287,7 +2243,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as string)) FROM t -- !query analysis -Project [cast(cast(2017-12-12 09:30:00 as date) as string) IN (cast(cast(2 as string) as string)) AS (CAST(2017-12-12 09:30:00 AS DATE) IN (CAST(2 AS STRING)))#x] +Project [cast(cast(2017-12-12 09:30:00 as date) as date) IN (cast(cast(2 as string) as date)) AS (CAST(2017-12-12 09:30:00 AS DATE) IN (CAST(2 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2408,7 +2364,7 @@ Project [cast(cast(1 as tinyint) as bigint) IN (cast(cast(1 as tinyint) as bigin -- !query SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as tinyint) as float) IN (cast(cast(1 as tinyint) as float),cast(cast(1 as float) as float)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as tinyint) as double) IN (cast(cast(1 as tinyint) as double),cast(cast(1 as float) as double)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2441,7 +2397,7 @@ Project [cast(cast(1 as tinyint) as decimal(10,0)) IN (cast(cast(1 as tinyint) a -- !query SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as tinyint) as string) IN (cast(cast(1 as tinyint) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as tinyint) as bigint) IN (cast(cast(1 as tinyint) as bigint),cast(cast(1 as string) as bigint)) AS (CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2584,7 +2540,7 @@ Project [cast(cast(1 as smallint) as bigint) IN (cast(cast(1 as smallint) as big -- !query SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as smallint) as float) IN (cast(cast(1 as smallint) as float),cast(cast(1 as float) as float)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as smallint) as double) IN (cast(cast(1 as smallint) as double),cast(cast(1 as float) as double)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2617,7 +2573,7 @@ Project [cast(cast(1 as smallint) as decimal(10,0)) IN (cast(cast(1 as smallint) -- !query SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as smallint) as string) IN (cast(cast(1 as smallint) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as smallint) as bigint) IN (cast(cast(1 as smallint) as bigint),cast(cast(1 as string) as bigint)) AS (CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2760,7 +2716,7 @@ Project [cast(cast(1 as int) as bigint) IN (cast(cast(1 as int) as bigint),cast( -- !query SELECT cast(1 as int) in (cast(1 as int), cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as int) as float) IN (cast(cast(1 as int) as float),cast(cast(1 as float) as float)) AS (CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as int) as double) IN (cast(cast(1 as int) as double),cast(cast(1 as float) as double)) AS (CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2793,7 +2749,7 @@ Project [cast(cast(1 as int) as decimal(10,0)) IN (cast(cast(1 as int) as decima -- !query SELECT cast(1 as int) in (cast(1 as int), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as int) as string) IN (cast(cast(1 as int) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as int) as bigint) IN (cast(cast(1 as int) as bigint),cast(cast(1 as string) as bigint)) AS (CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2936,7 +2892,7 @@ Project [cast(1 as bigint) IN (cast(1 as bigint),cast(1 as bigint)) AS (CAST(1 A -- !query SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as bigint) as float) IN (cast(cast(1 as bigint) as float),cast(cast(1 as float) as float)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as bigint) as double) IN (cast(cast(1 as bigint) as double),cast(cast(1 as float) as double)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2969,7 +2925,7 @@ Project [cast(cast(1 as bigint) as decimal(20,0)) IN (cast(cast(1 as bigint) as -- !query SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as bigint) as string) IN (cast(cast(1 as bigint) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as bigint) as bigint) IN (cast(cast(1 as bigint) as bigint),cast(cast(1 as string) as bigint)) AS (CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3068,7 +3024,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as float) in (cast(1 as float), cast(1 as tinyint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cast(cast(1 as tinyint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS TINYINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),cast(cast(1 as tinyint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3079,7 +3035,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cas -- !query SELECT cast(1 as float) in (cast(1 as float), cast(1 as smallint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cast(cast(1 as smallint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS SMALLINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),cast(cast(1 as smallint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3090,7 +3046,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cas -- !query SELECT cast(1 as float) in (cast(1 as float), cast(1 as int)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cast(cast(1 as int) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS INT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),cast(cast(1 as int) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3101,7 +3057,7 @@ Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cas -- !query SELECT cast(1 as float) in (cast(1 as float), cast(1 as bigint)) FROM t -- !query analysis -Project [cast(cast(1 as float) as float) IN (cast(cast(1 as float) as float),cast(cast(1 as bigint) as float)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BIGINT)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),cast(cast(1 as bigint) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BIGINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3145,7 +3101,7 @@ Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),c -- !query SELECT cast(1 as float) in (cast(1 as float), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as float) as string) IN (cast(cast(1 as float) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as float) as double) IN (cast(cast(1 as float) as double),cast(cast(1 as string) as double)) AS (CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3321,7 +3277,7 @@ Project [cast(cast(1 as double) as double) IN (cast(cast(1 as double) as double) -- !query SELECT cast(1 as double) in (cast(1 as double), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as double) as string) IN (cast(cast(1 as double) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as double) as double) IN (cast(cast(1 as double) as double),cast(cast(1 as string) as double)) AS (CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3497,7 +3453,7 @@ Project [cast(1 as decimal(10,0)) IN (cast(1 as decimal(10,0)),cast(1 as decimal -- !query SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(1 as decimal(10,0)) as string) IN (cast(cast(1 as decimal(10,0)) as string),cast(cast(1 as string) as string)) AS (CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS STRING)))#x] +Project [cast(cast(1 as decimal(10,0)) as double) IN (cast(cast(1 as decimal(10,0)) as double),cast(cast(1 as string) as double)) AS (CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3596,7 +3552,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as tinyint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as tinyint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS TINYINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as string) as bigint),cast(cast(1 as tinyint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3607,7 +3563,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as smallint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as smallint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS SMALLINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as string) as bigint),cast(cast(1 as smallint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3618,7 +3574,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as int)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as int) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS INT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as string) as bigint),cast(cast(1 as int) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3629,7 +3585,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as bigint)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as bigint) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BIGINT)))#x] +Project [cast(cast(1 as string) as bigint) IN (cast(cast(1 as string) as bigint),cast(cast(1 as bigint) as bigint)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BIGINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3640,7 +3596,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as float)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as float) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS FLOAT)))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as string) as double),cast(cast(1 as float) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3651,7 +3607,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as double)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as double) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS DOUBLE)))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as string) as double),cast(cast(1 as double) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS DOUBLE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3662,7 +3618,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as decimal(10, 0))) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(1 as decimal(10,0)) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS DECIMAL(10,0))))#x] +Project [cast(cast(1 as string) as double) IN (cast(cast(1 as string) as double),cast(cast(1 as decimal(10,0)) as double)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3684,51 +3640,29 @@ Project [cast(1 as string) IN (cast(1 as string),cast(1 as string)) AS (CAST(1 A -- !query SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"STRING\", \"BINARY\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BINARY)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 68, - "fragment" : "in (cast(1 as string), cast('1' as binary))" - } ] -} +Project [cast(cast(1 as string) as binary) IN (cast(cast(1 as string) as binary),cast(cast(1 as binary) as binary)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BINARY)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"STRING\", \"STRING\", \"BOOLEAN\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 67, - "fragment" : "in (cast(1 as string), cast(1 as boolean))" - } ] -} +Project [cast(cast(1 as string) as boolean) IN (cast(cast(1 as string) as boolean),cast(cast(1 as boolean) as boolean)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(2017-12-11 09:30:00.0 as timestamp) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +Project [cast(cast(1 as string) as timestamp) IN (cast(cast(1 as string) as timestamp),cast(cast(2017-12-11 09:30:00.0 as timestamp) as timestamp)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3739,7 +3673,7 @@ Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string) -- !query SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t -- !query analysis -Project [cast(cast(1 as string) as string) IN (cast(cast(1 as string) as string),cast(cast(2017-12-11 09:30:00 as date) as string)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE)))#x] +Project [cast(cast(1 as string) as date) IN (cast(cast(1 as string) as date),cast(cast(2017-12-11 09:30:00 as date) as date)) AS (CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3904,23 +3838,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"BINARY\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BINARY) IN (CAST(1 AS BINARY), CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 28, - "stopIndex" : 70, - "fragment" : "in (cast('1' as binary), cast(1 as string))" - } ] -} +Project [cast(cast(1 as binary) as binary) IN (cast(cast(1 as binary) as binary),cast(cast(1 as string) as binary)) AS (CAST(1 AS BINARY) IN (CAST(1 AS BINARY), CAST(1 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -4157,23 +4080,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"BOOLEAN\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BOOLEAN) IN (CAST(1 AS BOOLEAN), CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 29, - "stopIndex" : 72, - "fragment" : "in (cast('1' as boolean), cast(1 as string))" - } ] -} +Project [cast(cast(1 as boolean) as boolean) IN (cast(cast(1 as boolean) as boolean),cast(cast(1 as string) as boolean)) AS (CAST(1 AS BOOLEAN) IN (CAST(1 AS BOOLEAN), CAST(1 AS STRING)))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -4410,7 +4322,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00.0' as timestamp), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(2017-12-12 09:30:00.0 as timestamp) as string) IN (cast(cast(2017-12-12 09:30:00.0 as timestamp) as string),cast(cast(1 as string) as string)) AS (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) IN (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(1 AS STRING)))#x] +Project [cast(cast(2017-12-12 09:30:00.0 as timestamp) as timestamp) IN (cast(cast(2017-12-12 09:30:00.0 as timestamp) as timestamp),cast(cast(1 as string) as timestamp)) AS (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) IN (CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -4641,7 +4553,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as date), cast(1 as string)) FROM t -- !query analysis -Project [cast(cast(2017-12-12 09:30:00 as date) as string) IN (cast(cast(2017-12-12 09:30:00 as date) as string),cast(cast(1 as string) as string)) AS (CAST(2017-12-12 09:30:00 AS DATE) IN (CAST(2017-12-12 09:30:00 AS DATE), CAST(1 AS STRING)))#x] +Project [cast(cast(2017-12-12 09:30:00 as date) as date) IN (cast(cast(2017-12-12 09:30:00 as date) as date),cast(cast(1 as string) as date)) AS (CAST(2017-12-12 09:30:00 AS DATE) IN (CAST(2017-12-12 09:30:00 AS DATE), CAST(1 AS STRING)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapZipWith.sql.out index 0f72b0cf8a0e3..13cca708b8cbc 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapZipWith.sql.out @@ -193,52 +193,96 @@ Project [map_zip_with(cast(decimal_map2#x as map), double SELECT map_zip_with(string_map1, int_map, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query analysis -Project [map_zip_with(string_map1#x, cast(int_map#x as map), lambdafunction(struct(k, lambda k#x, v1, lambda v1#x, v2, lambda v2#x), lambda k#x, lambda v1#x, lambda v2#x, false)) AS m#x] -+- SubqueryAlias various_maps - +- View (`various_maps`, [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x]) - +- Project [cast(boolean_map#x as map) AS boolean_map#x, cast(tinyint_map#x as map) AS tinyint_map#x, cast(smallint_map#x as map) AS smallint_map#x, cast(int_map#x as map) AS int_map#x, cast(bigint_map#x as map) AS bigint_map#x, cast(decimal_map1#x as map) AS decimal_map1#x, cast(decimal_map2#x as map) AS decimal_map2#x, cast(double_map#x as map) AS double_map#x, cast(float_map#x as map) AS float_map#x, cast(date_map#x as map) AS date_map#x, cast(timestamp_map#x as map) AS timestamp_map#x, cast(string_map1#x as map) AS string_map1#x, cast(string_map2#x as map) AS string_map2#x, cast(string_map3#x as map) AS string_map3#x, cast(string_map4#x as map) AS string_map4#x, cast(array_map1#x as map,array>) AS array_map1#x, cast(array_map2#x as map,array>) AS array_map2#x, cast(struct_map1#x as map,struct>) AS struct_map1#x, cast(struct_map2#x as map,struct>) AS struct_map2#x] - +- Project [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] - +- SubqueryAlias various_maps - +- LocalRelation [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"STRING\"", + "rightType" : "\"INT\"", + "sqlExpr" : "\"map_zip_with(string_map1, int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "map_zip_with(string_map1, int_map, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(string_map2, date_map, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query analysis -Project [map_zip_with(string_map2#x, cast(date_map#x as map), lambdafunction(struct(k, lambda k#x, v1, lambda v1#x, v2, lambda v2#x), lambda k#x, lambda v1#x, lambda v2#x, false)) AS m#x] -+- SubqueryAlias various_maps - +- View (`various_maps`, [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x]) - +- Project [cast(boolean_map#x as map) AS boolean_map#x, cast(tinyint_map#x as map) AS tinyint_map#x, cast(smallint_map#x as map) AS smallint_map#x, cast(int_map#x as map) AS int_map#x, cast(bigint_map#x as map) AS bigint_map#x, cast(decimal_map1#x as map) AS decimal_map1#x, cast(decimal_map2#x as map) AS decimal_map2#x, cast(double_map#x as map) AS double_map#x, cast(float_map#x as map) AS float_map#x, cast(date_map#x as map) AS date_map#x, cast(timestamp_map#x as map) AS timestamp_map#x, cast(string_map1#x as map) AS string_map1#x, cast(string_map2#x as map) AS string_map2#x, cast(string_map3#x as map) AS string_map3#x, cast(string_map4#x as map) AS string_map4#x, cast(array_map1#x as map,array>) AS array_map1#x, cast(array_map2#x as map,array>) AS array_map2#x, cast(struct_map1#x as map,struct>) AS struct_map1#x, cast(struct_map2#x as map,struct>) AS struct_map2#x] - +- Project [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] - +- SubqueryAlias various_maps - +- LocalRelation [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"STRING\"", + "rightType" : "\"DATE\"", + "sqlExpr" : "\"map_zip_with(string_map2, date_map, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "map_zip_with(string_map2, date_map, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(timestamp_map, string_map3, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query analysis -Project [map_zip_with(cast(timestamp_map#x as map), string_map3#x, lambdafunction(struct(k, lambda k#x, v1, lambda v1#x, v2, lambda v2#x), lambda k#x, lambda v1#x, lambda v2#x, false)) AS m#x] -+- SubqueryAlias various_maps - +- View (`various_maps`, [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x]) - +- Project [cast(boolean_map#x as map) AS boolean_map#x, cast(tinyint_map#x as map) AS tinyint_map#x, cast(smallint_map#x as map) AS smallint_map#x, cast(int_map#x as map) AS int_map#x, cast(bigint_map#x as map) AS bigint_map#x, cast(decimal_map1#x as map) AS decimal_map1#x, cast(decimal_map2#x as map) AS decimal_map2#x, cast(double_map#x as map) AS double_map#x, cast(float_map#x as map) AS float_map#x, cast(date_map#x as map) AS date_map#x, cast(timestamp_map#x as map) AS timestamp_map#x, cast(string_map1#x as map) AS string_map1#x, cast(string_map2#x as map) AS string_map2#x, cast(string_map3#x as map) AS string_map3#x, cast(string_map4#x as map) AS string_map4#x, cast(array_map1#x as map,array>) AS array_map1#x, cast(array_map2#x as map,array>) AS array_map2#x, cast(struct_map1#x as map,struct>) AS struct_map1#x, cast(struct_map2#x as map,struct>) AS struct_map2#x] - +- Project [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] - +- SubqueryAlias various_maps - +- LocalRelation [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"TIMESTAMP\"", + "rightType" : "\"STRING\"", + "sqlExpr" : "\"map_zip_with(timestamp_map, string_map3, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 81, + "fragment" : "map_zip_with(timestamp_map, string_map3, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(decimal_map1, string_map4, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query analysis -Project [map_zip_with(cast(decimal_map1#x as map), string_map4#x, lambdafunction(struct(k, lambda k#x, v1, lambda v1#x, v2, lambda v2#x), lambda k#x, lambda v1#x, lambda v2#x, false)) AS m#x] -+- SubqueryAlias various_maps - +- View (`various_maps`, [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x]) - +- Project [cast(boolean_map#x as map) AS boolean_map#x, cast(tinyint_map#x as map) AS tinyint_map#x, cast(smallint_map#x as map) AS smallint_map#x, cast(int_map#x as map) AS int_map#x, cast(bigint_map#x as map) AS bigint_map#x, cast(decimal_map1#x as map) AS decimal_map1#x, cast(decimal_map2#x as map) AS decimal_map2#x, cast(double_map#x as map) AS double_map#x, cast(float_map#x as map) AS float_map#x, cast(date_map#x as map) AS date_map#x, cast(timestamp_map#x as map) AS timestamp_map#x, cast(string_map1#x as map) AS string_map1#x, cast(string_map2#x as map) AS string_map2#x, cast(string_map3#x as map) AS string_map3#x, cast(string_map4#x as map) AS string_map4#x, cast(array_map1#x as map,array>) AS array_map1#x, cast(array_map2#x as map,array>) AS array_map2#x, cast(struct_map1#x as map,struct>) AS struct_map1#x, cast(struct_map2#x as map,struct>) AS struct_map2#x] - +- Project [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] - +- SubqueryAlias various_maps - +- LocalRelation [boolean_map#x, tinyint_map#x, smallint_map#x, int_map#x, bigint_map#x, decimal_map1#x, decimal_map2#x, double_map#x, float_map#x, date_map#x, timestamp_map#x, string_map1#x, string_map2#x, string_map3#x, string_map4#x, array_map1#x, array_map2#x, struct_map1#x, struct_map2#x] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"DECIMAL(36,0)\"", + "rightType" : "\"STRING\"", + "sqlExpr" : "\"map_zip_with(decimal_map1, string_map4, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 80, + "fragment" : "map_zip_with(decimal_map1, string_map4, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out index dd3e56fe9322d..b1a3dc46dabac 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out @@ -117,13 +117,23 @@ SELECT map_concat(int_string_map1, tinyint_map2) istt_map FROM various_maps -- !query analysis -Project [map_concat(cast(tinyint_map1#x as map), smallint_map2#x) AS ts_map#x, map_concat(cast(smallint_map1#x as map), int_map2#x) AS si_map#x, map_concat(cast(int_map1#x as map), bigint_map2#x) AS ib_map#x, map_concat(cast(bigint_map1#x as map), cast(decimal_map2#x as map)) AS bd_map#x, map_concat(cast(decimal_map1#x as map), cast(float_map2#x as map)) AS df_map#x, map_concat(string_map1#x, cast(date_map2#x as map)) AS std_map#x, map_concat(cast(timestamp_map1#x as map), string_map2#x) AS tst_map#x, map_concat(string_map1#x, cast(int_map2#x as map)) AS sti_map#x, map_concat(int_string_map1#x, cast(tinyint_map2#x as map)) AS istt_map#x] -+- SubqueryAlias various_maps - +- View (`various_maps`, [boolean_map1#x, boolean_map2#x, tinyint_map1#x, tinyint_map2#x, smallint_map1#x, smallint_map2#x, int_map1#x, int_map2#x, bigint_map1#x, bigint_map2#x, decimal_map1#x, decimal_map2#x, double_map1#x, double_map2#x, float_map1#x, float_map2#x, date_map1#x, date_map2#x, timestamp_map1#x, timestamp_map2#x, string_map1#x, string_map2#x, array_map1#x, array_map2#x, ... 6 more fields]) - +- Project [cast(boolean_map1#x as map) AS boolean_map1#x, cast(boolean_map2#x as map) AS boolean_map2#x, cast(tinyint_map1#x as map) AS tinyint_map1#x, cast(tinyint_map2#x as map) AS tinyint_map2#x, cast(smallint_map1#x as map) AS smallint_map1#x, cast(smallint_map2#x as map) AS smallint_map2#x, cast(int_map1#x as map) AS int_map1#x, cast(int_map2#x as map) AS int_map2#x, cast(bigint_map1#x as map) AS bigint_map1#x, cast(bigint_map2#x as map) AS bigint_map2#x, cast(decimal_map1#x as map) AS decimal_map1#x, cast(decimal_map2#x as map) AS decimal_map2#x, cast(double_map1#x as map) AS double_map1#x, cast(double_map2#x as map) AS double_map2#x, cast(float_map1#x as map) AS float_map1#x, cast(float_map2#x as map) AS float_map2#x, cast(date_map1#x as map) AS date_map1#x, cast(date_map2#x as map) AS date_map2#x, cast(timestamp_map1#x as map) AS timestamp_map1#x, cast(timestamp_map2#x as map) AS timestamp_map2#x, cast(string_map1#x as map) AS string_map1#x, cast(string_map2#x as map) AS string_map2#x, cast(array_map1#x as map,array>) AS array_map1#x, cast(array_map2#x as map,array>) AS array_map2#x, ... 6 more fields] - +- Project [boolean_map1#x, boolean_map2#x, tinyint_map1#x, tinyint_map2#x, smallint_map1#x, smallint_map2#x, int_map1#x, int_map2#x, bigint_map1#x, bigint_map2#x, decimal_map1#x, decimal_map2#x, double_map1#x, double_map2#x, float_map1#x, float_map2#x, date_map1#x, date_map2#x, timestamp_map1#x, timestamp_map2#x, string_map1#x, string_map2#x, array_map1#x, array_map2#x, ... 6 more fields] - +- SubqueryAlias various_maps - +- LocalRelation [boolean_map1#x, boolean_map2#x, tinyint_map1#x, tinyint_map2#x, smallint_map1#x, smallint_map2#x, int_map1#x, int_map2#x, bigint_map1#x, bigint_map2#x, decimal_map1#x, decimal_map2#x, double_map1#x, double_map2#x, float_map1#x, float_map2#x, date_map1#x, date_map2#x, timestamp_map1#x, timestamp_map2#x, string_map1#x, string_map2#x, array_map1#x, array_map2#x, ... 6 more fields] +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "dataType" : "(\"MAP\" or \"MAP\")", + "functionName" : "`map_concat`", + "sqlExpr" : "\"map_concat(string_map1, date_map2)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 257, + "stopIndex" : 290, + "fragment" : "map_concat(string_map1, date_map2)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out index ccd34cfaeb67f..a3420c3cb0635 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out @@ -10,7 +10,7 @@ CreateViewCommand `t`, SELECT 1, false, false, LocalTempView, UNSUPPORTED, true -- !query SELECT '1' + cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as double) + cast(cast(1 as tinyint) as double)) AS (1 + CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) + cast(cast(1 as tinyint) as bigint)) AS (1 + CAST(1 AS TINYINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -21,7 +21,7 @@ Project [(cast(1 as double) + cast(cast(1 as tinyint) as double)) AS (1 + CAST(1 -- !query SELECT '1' + cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as double) + cast(cast(1 as smallint) as double)) AS (1 + CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) + cast(cast(1 as smallint) as bigint)) AS (1 + CAST(1 AS SMALLINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -32,7 +32,7 @@ Project [(cast(1 as double) + cast(cast(1 as smallint) as double)) AS (1 + CAST( -- !query SELECT '1' + cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as double) + cast(cast(1 as int) as double)) AS (1 + CAST(1 AS INT))#x] +Project [(cast(1 as bigint) + cast(cast(1 as int) as bigint)) AS (1 + CAST(1 AS INT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -43,7 +43,7 @@ Project [(cast(1 as double) + cast(cast(1 as int) as double)) AS (1 + CAST(1 AS -- !query SELECT '1' + cast(1 as bigint) FROM t -- !query analysis -Project [(cast(1 as double) + cast(cast(1 as bigint) as double)) AS (1 + CAST(1 AS BIGINT))#x] +Project [(cast(1 as bigint) + cast(1 as bigint)) AS (1 + CAST(1 AS BIGINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -87,12 +87,23 @@ Project [(cast(1 as double) + cast(cast(1 as decimal(10,0)) as double)) AS (1 + -- !query SELECT '1' + '1' FROM t -- !query analysis -Project [(cast(1 as double) + cast(1 as double)) AS (1 + 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "sqlExpr" : "\"(1 + 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' + '1'" + } ] +} -- !query @@ -100,11 +111,11 @@ SELECT '1' + cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -122,11 +133,11 @@ SELECT '1' + cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -144,11 +155,11 @@ SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -169,11 +180,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"CAST(2017-12-11 09:30:00 AS DATE)\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" + "sqlExpr" : "\"date_add(1, CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { "objectType" : "", @@ -188,7 +199,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' - cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as double) - cast(cast(1 as tinyint) as double)) AS (1 - CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) - cast(cast(1 as tinyint) as bigint)) AS (1 - CAST(1 AS TINYINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -199,7 +210,7 @@ Project [(cast(1 as double) - cast(cast(1 as tinyint) as double)) AS (1 - CAST(1 -- !query SELECT '1' - cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as double) - cast(cast(1 as smallint) as double)) AS (1 - CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) - cast(cast(1 as smallint) as bigint)) AS (1 - CAST(1 AS SMALLINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -210,7 +221,7 @@ Project [(cast(1 as double) - cast(cast(1 as smallint) as double)) AS (1 - CAST( -- !query SELECT '1' - cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as double) - cast(cast(1 as int) as double)) AS (1 - CAST(1 AS INT))#x] +Project [(cast(1 as bigint) - cast(cast(1 as int) as bigint)) AS (1 - CAST(1 AS INT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -221,7 +232,7 @@ Project [(cast(1 as double) - cast(cast(1 as int) as double)) AS (1 - CAST(1 AS -- !query SELECT '1' - cast(1 as bigint) FROM t -- !query analysis -Project [(cast(1 as double) - cast(cast(1 as bigint) as double)) AS (1 - CAST(1 AS BIGINT))#x] +Project [(cast(1 as bigint) - cast(1 as bigint)) AS (1 - CAST(1 AS BIGINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -265,12 +276,23 @@ Project [(cast(1 as double) - cast(cast(1 as decimal(10,0)) as double)) AS (1 - -- !query SELECT '1' - '1' FROM t -- !query analysis -Project [(cast(1 as double) - cast(1 as double)) AS (1 - 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "sqlExpr" : "\"(1 - 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' - '1'" + } ] +} -- !query @@ -278,11 +300,11 @@ SELECT '1' - cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 - CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -300,11 +322,11 @@ SELECT '1' - cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 - CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -320,25 +342,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(1 - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "'1' - cast('2017-12-11 09:30:00.0' as timestamp)" - } ] -} +Project [(cast(1 as timestamp) - cast(2017-12-11 09:30:00.0 as timestamp)) AS (1 - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -355,7 +364,7 @@ Project [(cast(1 as date) - cast(2017-12-11 09:30:00 as date)) AS (1 - CAST(2017 -- !query SELECT '1' * cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as double) * cast(cast(1 as tinyint) as double)) AS (1 * CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) * cast(cast(1 as tinyint) as bigint)) AS (1 * CAST(1 AS TINYINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -366,7 +375,7 @@ Project [(cast(1 as double) * cast(cast(1 as tinyint) as double)) AS (1 * CAST(1 -- !query SELECT '1' * cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as double) * cast(cast(1 as smallint) as double)) AS (1 * CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) * cast(cast(1 as smallint) as bigint)) AS (1 * CAST(1 AS SMALLINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -377,7 +386,7 @@ Project [(cast(1 as double) * cast(cast(1 as smallint) as double)) AS (1 * CAST( -- !query SELECT '1' * cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as double) * cast(cast(1 as int) as double)) AS (1 * CAST(1 AS INT))#x] +Project [(cast(1 as bigint) * cast(cast(1 as int) as bigint)) AS (1 * CAST(1 AS INT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -388,7 +397,7 @@ Project [(cast(1 as double) * cast(cast(1 as int) as double)) AS (1 * CAST(1 AS -- !query SELECT '1' * cast(1 as bigint) FROM t -- !query analysis -Project [(cast(1 as double) * cast(cast(1 as bigint) as double)) AS (1 * CAST(1 AS BIGINT))#x] +Project [(cast(1 as bigint) * cast(1 as bigint)) AS (1 * CAST(1 AS BIGINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -432,12 +441,23 @@ Project [(cast(1 as double) * cast(cast(1 as decimal(10,0)) as double)) AS (1 * -- !query SELECT '1' * '1' FROM t -- !query analysis -Project [(cast(1 as double) * cast(1 as double)) AS (1 * 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"(1 * 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' * '1'" + } ] +} -- !query @@ -445,11 +465,11 @@ SELECT '1' * cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -467,11 +487,11 @@ SELECT '1' * cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -489,11 +509,11 @@ SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -511,11 +531,11 @@ SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -531,7 +551,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' / cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as double) / cast(cast(1 as tinyint) as double)) AS (1 / CAST(1 AS TINYINT))#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(cast(1 as tinyint) as bigint) as double)) AS (1 / CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -542,7 +562,7 @@ Project [(cast(1 as double) / cast(cast(1 as tinyint) as double)) AS (1 / CAST(1 -- !query SELECT '1' / cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as double) / cast(cast(1 as smallint) as double)) AS (1 / CAST(1 AS SMALLINT))#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(cast(1 as smallint) as bigint) as double)) AS (1 / CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -553,7 +573,7 @@ Project [(cast(1 as double) / cast(cast(1 as smallint) as double)) AS (1 / CAST( -- !query SELECT '1' / cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as double) / cast(cast(1 as int) as double)) AS (1 / CAST(1 AS INT))#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(cast(1 as int) as bigint) as double)) AS (1 / CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -564,7 +584,7 @@ Project [(cast(1 as double) / cast(cast(1 as int) as double)) AS (1 / CAST(1 AS -- !query SELECT '1' / cast(1 as bigint) FROM t -- !query analysis -Project [(cast(1 as double) / cast(cast(1 as bigint) as double)) AS (1 / CAST(1 AS BIGINT))#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(1 as bigint) as double)) AS (1 / CAST(1 AS BIGINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -608,12 +628,23 @@ Project [(cast(1 as double) / cast(cast(1 as decimal(10,0)) as double)) AS (1 / -- !query SELECT '1' / '1' FROM t -- !query analysis -Project [(cast(1 as double) / cast(1 as double)) AS (1 / 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", + "sqlExpr" : "\"(1 / 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' / '1'" + } ] +} -- !query @@ -621,11 +652,11 @@ SELECT '1' / cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -643,11 +674,11 @@ SELECT '1' / cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -665,11 +696,11 @@ SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -687,11 +718,11 @@ SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -707,7 +738,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' % cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as double) % cast(cast(1 as tinyint) as double)) AS (1 % CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) % cast(cast(1 as tinyint) as bigint)) AS (1 % CAST(1 AS TINYINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -718,7 +749,7 @@ Project [(cast(1 as double) % cast(cast(1 as tinyint) as double)) AS (1 % CAST(1 -- !query SELECT '1' % cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as double) % cast(cast(1 as smallint) as double)) AS (1 % CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) % cast(cast(1 as smallint) as bigint)) AS (1 % CAST(1 AS SMALLINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -729,7 +760,7 @@ Project [(cast(1 as double) % cast(cast(1 as smallint) as double)) AS (1 % CAST( -- !query SELECT '1' % cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as double) % cast(cast(1 as int) as double)) AS (1 % CAST(1 AS INT))#x] +Project [(cast(1 as bigint) % cast(cast(1 as int) as bigint)) AS (1 % CAST(1 AS INT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -740,7 +771,7 @@ Project [(cast(1 as double) % cast(cast(1 as int) as double)) AS (1 % CAST(1 AS -- !query SELECT '1' % cast(1 as bigint) FROM t -- !query analysis -Project [(cast(1 as double) % cast(cast(1 as bigint) as double)) AS (1 % CAST(1 AS BIGINT))#x] +Project [(cast(1 as bigint) % cast(1 as bigint)) AS (1 % CAST(1 AS BIGINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -784,12 +815,23 @@ Project [(cast(1 as double) % cast(cast(1 as decimal(10,0)) as double)) AS (1 % -- !query SELECT '1' % '1' FROM t -- !query analysis -Project [(cast(1 as double) % cast(1 as double)) AS (1 % 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"(1 % 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' % '1'" + } ] +} -- !query @@ -797,11 +839,11 @@ SELECT '1' % cast('1' as binary) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -819,11 +861,11 @@ SELECT '1' % cast(1 as boolean) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -841,11 +883,11 @@ SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -863,11 +905,11 @@ SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -883,7 +925,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT pmod('1', cast(1 as tinyint)) FROM t -- !query analysis -Project [pmod(cast(1 as double), cast(cast(1 as tinyint) as double)) AS pmod(1, CAST(1 AS TINYINT))#x] +Project [pmod(cast(1 as bigint), cast(cast(1 as tinyint) as bigint)) AS pmod(1, CAST(1 AS TINYINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -894,7 +936,7 @@ Project [pmod(cast(1 as double), cast(cast(1 as tinyint) as double)) AS pmod(1, -- !query SELECT pmod('1', cast(1 as smallint)) FROM t -- !query analysis -Project [pmod(cast(1 as double), cast(cast(1 as smallint) as double)) AS pmod(1, CAST(1 AS SMALLINT))#x] +Project [pmod(cast(1 as bigint), cast(cast(1 as smallint) as bigint)) AS pmod(1, CAST(1 AS SMALLINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -905,7 +947,7 @@ Project [pmod(cast(1 as double), cast(cast(1 as smallint) as double)) AS pmod(1, -- !query SELECT pmod('1', cast(1 as int)) FROM t -- !query analysis -Project [pmod(cast(1 as double), cast(cast(1 as int) as double)) AS pmod(1, CAST(1 AS INT))#x] +Project [pmod(cast(1 as bigint), cast(cast(1 as int) as bigint)) AS pmod(1, CAST(1 AS INT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -916,7 +958,7 @@ Project [pmod(cast(1 as double), cast(cast(1 as int) as double)) AS pmod(1, CAST -- !query SELECT pmod('1', cast(1 as bigint)) FROM t -- !query analysis -Project [pmod(cast(1 as double), cast(cast(1 as bigint) as double)) AS pmod(1, CAST(1 AS BIGINT))#x] +Project [pmod(cast(1 as bigint), cast(1 as bigint)) AS pmod(1, CAST(1 AS BIGINT))#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -960,12 +1002,23 @@ Project [pmod(cast(1 as double), cast(cast(1 as decimal(10,0)) as double)) AS pm -- !query SELECT pmod('1', '1') FROM t -- !query analysis -Project [pmod(cast(1 as double), cast(1 as double)) AS pmod(1, 1)#x] -+- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"pmod(1, 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 21, + "fragment" : "pmod('1', '1')" + } ] +} -- !query @@ -973,11 +1026,11 @@ SELECT pmod('1', cast('1' as binary)) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -995,11 +1048,11 @@ SELECT pmod('1', cast(1 as boolean)) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -1017,11 +1070,11 @@ SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -1039,11 +1092,11 @@ SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -1059,7 +1112,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) + '1' FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) + cast(1 as double)) AS (CAST(1 AS TINYINT) + 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) + cast(1 as bigint)) AS (CAST(1 AS TINYINT) + 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1070,7 +1123,7 @@ Project [(cast(cast(1 as tinyint) as double) + cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as smallint) + '1' FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) + cast(1 as double)) AS (CAST(1 AS SMALLINT) + 1)#x] +Project [(cast(cast(1 as smallint) as bigint) + cast(1 as bigint)) AS (CAST(1 AS SMALLINT) + 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1081,7 +1134,7 @@ Project [(cast(cast(1 as smallint) as double) + cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as int) + '1' FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) + cast(1 as double)) AS (CAST(1 AS INT) + 1)#x] +Project [(cast(cast(1 as int) as bigint) + cast(1 as bigint)) AS (CAST(1 AS INT) + 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1092,7 +1145,7 @@ Project [(cast(cast(1 as int) as double) + cast(1 as double)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as bigint) + '1' FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) + cast(1 as double)) AS (CAST(1 AS BIGINT) + 1)#x] +Project [(cast(1 as bigint) + cast(1 as bigint)) AS (CAST(1 AS BIGINT) + 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1138,11 +1191,11 @@ SELECT cast('1' as binary) + '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) + 1)\"" }, "queryContext" : [ { @@ -1160,11 +1213,11 @@ SELECT cast(1 as boolean) + '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) + 1)\"" }, "queryContext" : [ { @@ -1182,11 +1235,11 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + 1)\"" }, "queryContext" : [ { @@ -1208,7 +1261,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" @@ -1226,7 +1279,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) - '1' FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) - cast(1 as double)) AS (CAST(1 AS TINYINT) - 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) - cast(1 as bigint)) AS (CAST(1 AS TINYINT) - 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1237,7 +1290,7 @@ Project [(cast(cast(1 as tinyint) as double) - cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as smallint) - '1' FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) - cast(1 as double)) AS (CAST(1 AS SMALLINT) - 1)#x] +Project [(cast(cast(1 as smallint) as bigint) - cast(1 as bigint)) AS (CAST(1 AS SMALLINT) - 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1248,7 +1301,7 @@ Project [(cast(cast(1 as smallint) as double) - cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as int) - '1' FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) - cast(1 as double)) AS (CAST(1 AS INT) - 1)#x] +Project [(cast(cast(1 as int) as bigint) - cast(1 as bigint)) AS (CAST(1 AS INT) - 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1259,7 +1312,7 @@ Project [(cast(cast(1 as int) as double) - cast(1 as double)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as bigint) - '1' FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) - cast(1 as double)) AS (CAST(1 AS BIGINT) - 1)#x] +Project [(cast(1 as bigint) - cast(1 as bigint)) AS (CAST(1 AS BIGINT) - 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1305,11 +1358,11 @@ SELECT cast('1' as binary) - '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) - 1)\"" }, "queryContext" : [ { @@ -1327,11 +1380,11 @@ SELECT cast(1 as boolean) - '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) - 1)\"" }, "queryContext" : [ { @@ -1347,55 +1400,29 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 1)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) - '1'" - } ] -} +Project [(cast(2017-12-11 09:30:00.0 as timestamp) - cast(1 as timestamp)) AS (CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 1)#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "cast('2017-12-11 09:30:00' as date) - '1'" - } ] -} +Project [(cast(2017-12-11 09:30:00 as date) - cast(1 as date)) AS (CAST(2017-12-11 09:30:00 AS DATE) - 1)#x] ++- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as tinyint) * '1' FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) * cast(1 as double)) AS (CAST(1 AS TINYINT) * 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) * cast(1 as bigint)) AS (CAST(1 AS TINYINT) * 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1406,7 +1433,7 @@ Project [(cast(cast(1 as tinyint) as double) * cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as smallint) * '1' FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) * cast(1 as double)) AS (CAST(1 AS SMALLINT) * 1)#x] +Project [(cast(cast(1 as smallint) as bigint) * cast(1 as bigint)) AS (CAST(1 AS SMALLINT) * 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1417,7 +1444,7 @@ Project [(cast(cast(1 as smallint) as double) * cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as int) * '1' FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) * cast(1 as double)) AS (CAST(1 AS INT) * 1)#x] +Project [(cast(cast(1 as int) as bigint) * cast(1 as bigint)) AS (CAST(1 AS INT) * 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1428,7 +1455,7 @@ Project [(cast(cast(1 as int) as double) * cast(1 as double)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as bigint) * '1' FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) * cast(1 as double)) AS (CAST(1 AS BIGINT) * 1)#x] +Project [(cast(1 as bigint) * cast(1 as bigint)) AS (CAST(1 AS BIGINT) * 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1474,11 +1501,11 @@ SELECT cast('1' as binary) * '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BINARY) * 1)\"" }, "queryContext" : [ { @@ -1496,11 +1523,11 @@ SELECT cast(1 as boolean) * '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) * 1)\"" }, "queryContext" : [ { @@ -1518,11 +1545,11 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) * 1)\"" }, "queryContext" : [ { @@ -1540,11 +1567,11 @@ SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) * 1)\"" }, "queryContext" : [ { @@ -1560,7 +1587,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) / '1' FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) / cast(cast(1 as double) as double)) AS (CAST(1 AS TINYINT) / 1)#x] +Project [(cast(cast(cast(1 as tinyint) as bigint) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS TINYINT) / 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1571,7 +1598,7 @@ Project [(cast(cast(1 as tinyint) as double) / cast(cast(1 as double) as double) -- !query SELECT cast(1 as smallint) / '1' FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) / cast(cast(1 as double) as double)) AS (CAST(1 AS SMALLINT) / 1)#x] +Project [(cast(cast(cast(1 as smallint) as bigint) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS SMALLINT) / 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1582,7 +1609,7 @@ Project [(cast(cast(1 as smallint) as double) / cast(cast(1 as double) as double -- !query SELECT cast(1 as int) / '1' FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) / cast(cast(1 as double) as double)) AS (CAST(1 AS INT) / 1)#x] +Project [(cast(cast(cast(1 as int) as bigint) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS INT) / 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1593,7 +1620,7 @@ Project [(cast(cast(1 as int) as double) / cast(cast(1 as double) as double)) AS -- !query SELECT cast(1 as bigint) / '1' FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) / cast(cast(1 as double) as double)) AS (CAST(1 AS BIGINT) / 1)#x] +Project [(cast(cast(1 as bigint) as double) / cast(cast(1 as bigint) as double)) AS (CAST(1 AS BIGINT) / 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1604,7 +1631,7 @@ Project [(cast(cast(1 as bigint) as double) / cast(cast(1 as double) as double)) -- !query SELECT cast(1 as float) / '1' FROM t -- !query analysis -Project [(cast(cast(1 as float) as double) / cast(cast(1 as double) as double)) AS (CAST(1 AS FLOAT) / 1)#x] +Project [(cast(cast(1 as float) as double) / cast(1 as double)) AS (CAST(1 AS FLOAT) / 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1639,11 +1666,11 @@ SELECT cast('1' as binary) / '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) / 1)\"" }, "queryContext" : [ { @@ -1661,11 +1688,11 @@ SELECT cast(1 as boolean) / '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) / 1)\"" }, "queryContext" : [ { @@ -1683,11 +1710,11 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) / 1)\"" }, "queryContext" : [ { @@ -1705,11 +1732,11 @@ SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) / 1)\"" }, "queryContext" : [ { @@ -1725,7 +1752,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) % '1' FROM t -- !query analysis -Project [(cast(cast(1 as tinyint) as double) % cast(1 as double)) AS (CAST(1 AS TINYINT) % 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) % cast(1 as bigint)) AS (CAST(1 AS TINYINT) % 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1736,7 +1763,7 @@ Project [(cast(cast(1 as tinyint) as double) % cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as smallint) % '1' FROM t -- !query analysis -Project [(cast(cast(1 as smallint) as double) % cast(1 as double)) AS (CAST(1 AS SMALLINT) % 1)#x] +Project [(cast(cast(1 as smallint) as bigint) % cast(1 as bigint)) AS (CAST(1 AS SMALLINT) % 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1747,7 +1774,7 @@ Project [(cast(cast(1 as smallint) as double) % cast(1 as double)) AS (CAST(1 AS -- !query SELECT cast(1 as int) % '1' FROM t -- !query analysis -Project [(cast(cast(1 as int) as double) % cast(1 as double)) AS (CAST(1 AS INT) % 1)#x] +Project [(cast(cast(1 as int) as bigint) % cast(1 as bigint)) AS (CAST(1 AS INT) % 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1758,7 +1785,7 @@ Project [(cast(cast(1 as int) as double) % cast(1 as double)) AS (CAST(1 AS INT) -- !query SELECT cast(1 as bigint) % '1' FROM t -- !query analysis -Project [(cast(cast(1 as bigint) as double) % cast(1 as double)) AS (CAST(1 AS BIGINT) % 1)#x] +Project [(cast(1 as bigint) % cast(1 as bigint)) AS (CAST(1 AS BIGINT) % 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1804,11 +1831,11 @@ SELECT cast('1' as binary) % '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BINARY) % 1)\"" }, "queryContext" : [ { @@ -1826,11 +1853,11 @@ SELECT cast(1 as boolean) % '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) % 1)\"" }, "queryContext" : [ { @@ -1848,11 +1875,11 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) % 1)\"" }, "queryContext" : [ { @@ -1870,11 +1897,11 @@ SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) % 1)\"" }, "queryContext" : [ { @@ -1890,7 +1917,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT pmod(cast(1 as tinyint), '1') FROM t -- !query analysis -Project [pmod(cast(cast(1 as tinyint) as double), cast(1 as double)) AS pmod(CAST(1 AS TINYINT), 1)#x] +Project [pmod(cast(cast(1 as tinyint) as bigint), cast(1 as bigint)) AS pmod(CAST(1 AS TINYINT), 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1901,7 +1928,7 @@ Project [pmod(cast(cast(1 as tinyint) as double), cast(1 as double)) AS pmod(CAS -- !query SELECT pmod(cast(1 as smallint), '1') FROM t -- !query analysis -Project [pmod(cast(cast(1 as smallint) as double), cast(1 as double)) AS pmod(CAST(1 AS SMALLINT), 1)#x] +Project [pmod(cast(cast(1 as smallint) as bigint), cast(1 as bigint)) AS pmod(CAST(1 AS SMALLINT), 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1912,7 +1939,7 @@ Project [pmod(cast(cast(1 as smallint) as double), cast(1 as double)) AS pmod(CA -- !query SELECT pmod(cast(1 as int), '1') FROM t -- !query analysis -Project [pmod(cast(cast(1 as int) as double), cast(1 as double)) AS pmod(CAST(1 AS INT), 1)#x] +Project [pmod(cast(cast(1 as int) as bigint), cast(1 as bigint)) AS pmod(CAST(1 AS INT), 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1923,7 +1950,7 @@ Project [pmod(cast(cast(1 as int) as double), cast(1 as double)) AS pmod(CAST(1 -- !query SELECT pmod(cast(1 as bigint), '1') FROM t -- !query analysis -Project [pmod(cast(cast(1 as bigint) as double), cast(1 as double)) AS pmod(CAST(1 AS BIGINT), 1)#x] +Project [pmod(cast(1 as bigint), cast(1 as bigint)) AS pmod(CAST(1 AS BIGINT), 1)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -1969,11 +1996,11 @@ SELECT pmod(cast('1' as binary), '1') FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(1 AS BINARY), 1)\"" }, "queryContext" : [ { @@ -1991,11 +2018,11 @@ SELECT pmod(cast(1 as boolean), '1') FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(1 AS BOOLEAN), 1)\"" }, "queryContext" : [ { @@ -2013,11 +2040,11 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP), 1)\"" }, "queryContext" : [ { @@ -2035,11 +2062,11 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, "queryContext" : [ { @@ -2055,7 +2082,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' = cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (1 = CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as tinyint) as bigint)) AS (1 = CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2066,7 +2093,7 @@ Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (1 = CAST(1 AS TINYINT))#x -- !query SELECT '1' = cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(1 as smallint)) AS (1 = CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as smallint) as bigint)) AS (1 = CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2077,7 +2104,7 @@ Project [(cast(1 as smallint) = cast(1 as smallint)) AS (1 = CAST(1 AS SMALLINT) -- !query SELECT '1' = cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) = cast(1 as int)) AS (1 = CAST(1 AS INT))#x] +Project [(cast(1 as bigint) = cast(cast(1 as int) as bigint)) AS (1 = CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2099,7 +2126,7 @@ Project [(cast(1 as bigint) = cast(1 as bigint)) AS (1 = CAST(1 AS BIGINT))#x] -- !query SELECT '1' = cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) = cast(1 as float)) AS (1 = CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) = cast(cast(1 as float) as double)) AS (1 = CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2121,7 +2148,7 @@ Project [(cast(1 as double) = cast(1 as double)) AS (1 = CAST(1 AS DOUBLE))#x] -- !query SELECT '1' = cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (1 = CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) = cast(cast(1 as decimal(10,0)) as double)) AS (1 = CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2187,7 +2214,7 @@ Project [(cast(1 as date) = cast(2017-12-11 09:30:00 as date)) AS (1 = CAST(2017 -- !query SELECT cast(1 as tinyint) = '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (CAST(1 AS TINYINT) = 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) = cast(1 as bigint)) AS (CAST(1 AS TINYINT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2198,7 +2225,7 @@ Project [(cast(1 as tinyint) = cast(1 as tinyint)) AS (CAST(1 AS TINYINT) = 1)#x -- !query SELECT cast(1 as smallint) = '1' FROM t -- !query analysis -Project [(cast(1 as smallint) = cast(1 as smallint)) AS (CAST(1 AS SMALLINT) = 1)#x] +Project [(cast(cast(1 as smallint) as bigint) = cast(1 as bigint)) AS (CAST(1 AS SMALLINT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2209,7 +2236,7 @@ Project [(cast(1 as smallint) = cast(1 as smallint)) AS (CAST(1 AS SMALLINT) = 1 -- !query SELECT cast(1 as int) = '1' FROM t -- !query analysis -Project [(cast(1 as int) = cast(1 as int)) AS (CAST(1 AS INT) = 1)#x] +Project [(cast(cast(1 as int) as bigint) = cast(1 as bigint)) AS (CAST(1 AS INT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2231,7 +2258,7 @@ Project [(cast(1 as bigint) = cast(1 as bigint)) AS (CAST(1 AS BIGINT) = 1)#x] -- !query SELECT cast(1 as float) = '1' FROM t -- !query analysis -Project [(cast(1 as float) = cast(1 as float)) AS (CAST(1 AS FLOAT) = 1)#x] +Project [(cast(cast(1 as float) as double) = cast(1 as double)) AS (CAST(1 AS FLOAT) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2253,7 +2280,7 @@ Project [(cast(1 as double) = cast(1 as double)) AS (CAST(1 AS DOUBLE) = 1)#x] -- !query SELECT cast(1 as decimal(10, 0)) = '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) = 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) = cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) = 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2308,7 +2335,7 @@ Project [(cast(2017-12-11 09:30:00 as date) = cast(1 as date)) AS (CAST(2017-12- -- !query SELECT '1' <=> cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) <=> cast(1 as tinyint)) AS (1 <=> CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) <=> cast(cast(1 as tinyint) as bigint)) AS (1 <=> CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2319,7 +2346,7 @@ Project [(cast(1 as tinyint) <=> cast(1 as tinyint)) AS (1 <=> CAST(1 AS TINYINT -- !query SELECT '1' <=> cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) <=> cast(1 as smallint)) AS (1 <=> CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) <=> cast(cast(1 as smallint) as bigint)) AS (1 <=> CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2330,7 +2357,7 @@ Project [(cast(1 as smallint) <=> cast(1 as smallint)) AS (1 <=> CAST(1 AS SMALL -- !query SELECT '1' <=> cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) <=> cast(1 as int)) AS (1 <=> CAST(1 AS INT))#x] +Project [(cast(1 as bigint) <=> cast(cast(1 as int) as bigint)) AS (1 <=> CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2352,7 +2379,7 @@ Project [(cast(1 as bigint) <=> cast(1 as bigint)) AS (1 <=> CAST(1 AS BIGINT))# -- !query SELECT '1' <=> cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) <=> cast(1 as float)) AS (1 <=> CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) <=> cast(cast(1 as float) as double)) AS (1 <=> CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2374,7 +2401,7 @@ Project [(cast(1 as double) <=> cast(1 as double)) AS (1 <=> CAST(1 AS DOUBLE))# -- !query SELECT '1' <=> cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <=> cast(1 as decimal(10,0))) AS (1 <=> CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) <=> cast(cast(1 as decimal(10,0)) as double)) AS (1 <=> CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2440,7 +2467,7 @@ Project [(cast(1 as date) <=> cast(2017-12-11 09:30:00 as date)) AS (1 <=> CAST( -- !query SELECT cast(1 as tinyint) <=> '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) <=> cast(1 as tinyint)) AS (CAST(1 AS TINYINT) <=> 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) <=> cast(1 as bigint)) AS (CAST(1 AS TINYINT) <=> 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2451,7 +2478,7 @@ Project [(cast(1 as tinyint) <=> cast(1 as tinyint)) AS (CAST(1 AS TINYINT) <=> -- !query SELECT cast(1 as smallint) <=> '1' FROM t -- !query analysis -Project [(cast(1 as smallint) <=> cast(1 as smallint)) AS (CAST(1 AS SMALLINT) <=> 1)#x] +Project [(cast(cast(1 as smallint) as bigint) <=> cast(1 as bigint)) AS (CAST(1 AS SMALLINT) <=> 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2462,7 +2489,7 @@ Project [(cast(1 as smallint) <=> cast(1 as smallint)) AS (CAST(1 AS SMALLINT) < -- !query SELECT cast(1 as int) <=> '1' FROM t -- !query analysis -Project [(cast(1 as int) <=> cast(1 as int)) AS (CAST(1 AS INT) <=> 1)#x] +Project [(cast(cast(1 as int) as bigint) <=> cast(1 as bigint)) AS (CAST(1 AS INT) <=> 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2484,7 +2511,7 @@ Project [(cast(1 as bigint) <=> cast(1 as bigint)) AS (CAST(1 AS BIGINT) <=> 1)# -- !query SELECT cast(1 as float) <=> '1' FROM t -- !query analysis -Project [(cast(1 as float) <=> cast(1 as float)) AS (CAST(1 AS FLOAT) <=> 1)#x] +Project [(cast(cast(1 as float) as double) <=> cast(1 as double)) AS (CAST(1 AS FLOAT) <=> 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2506,7 +2533,7 @@ Project [(cast(1 as double) <=> cast(1 as double)) AS (CAST(1 AS DOUBLE) <=> 1)# -- !query SELECT cast(1 as decimal(10, 0)) <=> '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <=> cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <=> 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <=> cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) <=> 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2561,7 +2588,7 @@ Project [(cast(2017-12-11 09:30:00 as date) <=> cast(1 as date)) AS (CAST(2017-1 -- !query SELECT '1' < cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) < cast(1 as tinyint)) AS (1 < CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) < cast(cast(1 as tinyint) as bigint)) AS (1 < CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2572,7 +2599,7 @@ Project [(cast(1 as tinyint) < cast(1 as tinyint)) AS (1 < CAST(1 AS TINYINT))#x -- !query SELECT '1' < cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) < cast(1 as smallint)) AS (1 < CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) < cast(cast(1 as smallint) as bigint)) AS (1 < CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2583,7 +2610,7 @@ Project [(cast(1 as smallint) < cast(1 as smallint)) AS (1 < CAST(1 AS SMALLINT) -- !query SELECT '1' < cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) < cast(1 as int)) AS (1 < CAST(1 AS INT))#x] +Project [(cast(1 as bigint) < cast(cast(1 as int) as bigint)) AS (1 < CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2605,7 +2632,7 @@ Project [(cast(1 as bigint) < cast(1 as bigint)) AS (1 < CAST(1 AS BIGINT))#x] -- !query SELECT '1' < cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) < cast(1 as float)) AS (1 < CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) < cast(cast(1 as float) as double)) AS (1 < CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2627,7 +2654,7 @@ Project [(cast(1 as double) < cast(1 as double)) AS (1 < CAST(1 AS DOUBLE))#x] -- !query SELECT '1' < cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) < cast(1 as decimal(10,0))) AS (1 < CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) < cast(cast(1 as decimal(10,0)) as double)) AS (1 < CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2693,7 +2720,7 @@ Project [(cast(1 as date) < cast(2017-12-11 09:30:00 as date)) AS (1 < CAST(2017 -- !query SELECT '1' <= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) <= cast(1 as tinyint)) AS (1 <= CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) <= cast(cast(1 as tinyint) as bigint)) AS (1 <= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2704,7 +2731,7 @@ Project [(cast(1 as tinyint) <= cast(1 as tinyint)) AS (1 <= CAST(1 AS TINYINT)) -- !query SELECT '1' <= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) <= cast(1 as smallint)) AS (1 <= CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) <= cast(cast(1 as smallint) as bigint)) AS (1 <= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2715,7 +2742,7 @@ Project [(cast(1 as smallint) <= cast(1 as smallint)) AS (1 <= CAST(1 AS SMALLIN -- !query SELECT '1' <= cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) <= cast(1 as int)) AS (1 <= CAST(1 AS INT))#x] +Project [(cast(1 as bigint) <= cast(cast(1 as int) as bigint)) AS (1 <= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2737,7 +2764,7 @@ Project [(cast(1 as bigint) <= cast(1 as bigint)) AS (1 <= CAST(1 AS BIGINT))#x] -- !query SELECT '1' <= cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) <= cast(1 as float)) AS (1 <= CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) <= cast(cast(1 as float) as double)) AS (1 <= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2759,7 +2786,7 @@ Project [(cast(1 as double) <= cast(1 as double)) AS (1 <= CAST(1 AS DOUBLE))#x] -- !query SELECT '1' <= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <= cast(1 as decimal(10,0))) AS (1 <= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) <= cast(cast(1 as decimal(10,0)) as double)) AS (1 <= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2825,7 +2852,7 @@ Project [(cast(1 as date) <= cast(2017-12-11 09:30:00 as date)) AS (1 <= CAST(20 -- !query SELECT '1' > cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) > cast(1 as tinyint)) AS (1 > CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) > cast(cast(1 as tinyint) as bigint)) AS (1 > CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2836,7 +2863,7 @@ Project [(cast(1 as tinyint) > cast(1 as tinyint)) AS (1 > CAST(1 AS TINYINT))#x -- !query SELECT '1' > cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) > cast(1 as smallint)) AS (1 > CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) > cast(cast(1 as smallint) as bigint)) AS (1 > CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2847,7 +2874,7 @@ Project [(cast(1 as smallint) > cast(1 as smallint)) AS (1 > CAST(1 AS SMALLINT) -- !query SELECT '1' > cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) > cast(1 as int)) AS (1 > CAST(1 AS INT))#x] +Project [(cast(1 as bigint) > cast(cast(1 as int) as bigint)) AS (1 > CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2869,7 +2896,7 @@ Project [(cast(1 as bigint) > cast(1 as bigint)) AS (1 > CAST(1 AS BIGINT))#x] -- !query SELECT '1' > cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) > cast(1 as float)) AS (1 > CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) > cast(cast(1 as float) as double)) AS (1 > CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2891,7 +2918,7 @@ Project [(cast(1 as double) > cast(1 as double)) AS (1 > CAST(1 AS DOUBLE))#x] -- !query SELECT '1' > cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) > cast(1 as decimal(10,0))) AS (1 > CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) > cast(cast(1 as decimal(10,0)) as double)) AS (1 > CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2957,7 +2984,7 @@ Project [(cast(1 as date) > cast(2017-12-11 09:30:00 as date)) AS (1 > CAST(2017 -- !query SELECT '1' >= cast(1 as tinyint) FROM t -- !query analysis -Project [(cast(1 as tinyint) >= cast(1 as tinyint)) AS (1 >= CAST(1 AS TINYINT))#x] +Project [(cast(1 as bigint) >= cast(cast(1 as tinyint) as bigint)) AS (1 >= CAST(1 AS TINYINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2968,7 +2995,7 @@ Project [(cast(1 as tinyint) >= cast(1 as tinyint)) AS (1 >= CAST(1 AS TINYINT)) -- !query SELECT '1' >= cast(1 as smallint) FROM t -- !query analysis -Project [(cast(1 as smallint) >= cast(1 as smallint)) AS (1 >= CAST(1 AS SMALLINT))#x] +Project [(cast(1 as bigint) >= cast(cast(1 as smallint) as bigint)) AS (1 >= CAST(1 AS SMALLINT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -2979,7 +3006,7 @@ Project [(cast(1 as smallint) >= cast(1 as smallint)) AS (1 >= CAST(1 AS SMALLIN -- !query SELECT '1' >= cast(1 as int) FROM t -- !query analysis -Project [(cast(1 as int) >= cast(1 as int)) AS (1 >= CAST(1 AS INT))#x] +Project [(cast(1 as bigint) >= cast(cast(1 as int) as bigint)) AS (1 >= CAST(1 AS INT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3001,7 +3028,7 @@ Project [(cast(1 as bigint) >= cast(1 as bigint)) AS (1 >= CAST(1 AS BIGINT))#x] -- !query SELECT '1' >= cast(1 as float) FROM t -- !query analysis -Project [(cast(1 as float) >= cast(1 as float)) AS (1 >= CAST(1 AS FLOAT))#x] +Project [(cast(1 as double) >= cast(cast(1 as float) as double)) AS (1 >= CAST(1 AS FLOAT))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3023,7 +3050,7 @@ Project [(cast(1 as double) >= cast(1 as double)) AS (1 >= CAST(1 AS DOUBLE))#x] -- !query SELECT '1' >= cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) >= cast(1 as decimal(10,0))) AS (1 >= CAST(1 AS DECIMAL(10,0)))#x] +Project [(cast(1 as double) >= cast(cast(1 as decimal(10,0)) as double)) AS (1 >= CAST(1 AS DECIMAL(10,0)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3089,7 +3116,7 @@ Project [(cast(1 as date) >= cast(2017-12-11 09:30:00 as date)) AS (1 >= CAST(20 -- !query SELECT '1' <> cast(1 as tinyint) FROM t -- !query analysis -Project [NOT (cast(1 as tinyint) = cast(1 as tinyint)) AS (NOT (1 = CAST(1 AS TINYINT)))#x] +Project [NOT (cast(1 as bigint) = cast(cast(1 as tinyint) as bigint)) AS (NOT (1 = CAST(1 AS TINYINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3100,7 +3127,7 @@ Project [NOT (cast(1 as tinyint) = cast(1 as tinyint)) AS (NOT (1 = CAST(1 AS TI -- !query SELECT '1' <> cast(1 as smallint) FROM t -- !query analysis -Project [NOT (cast(1 as smallint) = cast(1 as smallint)) AS (NOT (1 = CAST(1 AS SMALLINT)))#x] +Project [NOT (cast(1 as bigint) = cast(cast(1 as smallint) as bigint)) AS (NOT (1 = CAST(1 AS SMALLINT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3111,7 +3138,7 @@ Project [NOT (cast(1 as smallint) = cast(1 as smallint)) AS (NOT (1 = CAST(1 AS -- !query SELECT '1' <> cast(1 as int) FROM t -- !query analysis -Project [NOT (cast(1 as int) = cast(1 as int)) AS (NOT (1 = CAST(1 AS INT)))#x] +Project [NOT (cast(1 as bigint) = cast(cast(1 as int) as bigint)) AS (NOT (1 = CAST(1 AS INT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3133,7 +3160,7 @@ Project [NOT (cast(1 as bigint) = cast(1 as bigint)) AS (NOT (1 = CAST(1 AS BIGI -- !query SELECT '1' <> cast(1 as float) FROM t -- !query analysis -Project [NOT (cast(1 as float) = cast(1 as float)) AS (NOT (1 = CAST(1 AS FLOAT)))#x] +Project [NOT (cast(1 as double) = cast(cast(1 as float) as double)) AS (NOT (1 = CAST(1 AS FLOAT)))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3155,7 +3182,7 @@ Project [NOT (cast(1 as double) = cast(1 as double)) AS (NOT (1 = CAST(1 AS DOUB -- !query SELECT '1' <> cast(1 as decimal(10, 0)) FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (NOT (1 = CAST(1 AS DECIMAL(10,0))))#x] +Project [NOT (cast(1 as double) = cast(cast(1 as decimal(10,0)) as double)) AS (NOT (1 = CAST(1 AS DECIMAL(10,0))))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3221,7 +3248,7 @@ Project [NOT (cast(1 as date) = cast(2017-12-11 09:30:00 as date)) AS (NOT (1 = -- !query SELECT cast(1 as tinyint) < '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) < cast(1 as tinyint)) AS (CAST(1 AS TINYINT) < 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) < cast(1 as bigint)) AS (CAST(1 AS TINYINT) < 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3232,7 +3259,7 @@ Project [(cast(1 as tinyint) < cast(1 as tinyint)) AS (CAST(1 AS TINYINT) < 1)#x -- !query SELECT cast(1 as smallint) < '1' FROM t -- !query analysis -Project [(cast(1 as smallint) < cast(1 as smallint)) AS (CAST(1 AS SMALLINT) < 1)#x] +Project [(cast(cast(1 as smallint) as bigint) < cast(1 as bigint)) AS (CAST(1 AS SMALLINT) < 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3243,7 +3270,7 @@ Project [(cast(1 as smallint) < cast(1 as smallint)) AS (CAST(1 AS SMALLINT) < 1 -- !query SELECT cast(1 as int) < '1' FROM t -- !query analysis -Project [(cast(1 as int) < cast(1 as int)) AS (CAST(1 AS INT) < 1)#x] +Project [(cast(cast(1 as int) as bigint) < cast(1 as bigint)) AS (CAST(1 AS INT) < 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3265,7 +3292,7 @@ Project [(cast(1 as bigint) < cast(1 as bigint)) AS (CAST(1 AS BIGINT) < 1)#x] -- !query SELECT cast(1 as float) < '1' FROM t -- !query analysis -Project [(cast(1 as float) < cast(1 as float)) AS (CAST(1 AS FLOAT) < 1)#x] +Project [(cast(cast(1 as float) as double) < cast(1 as double)) AS (CAST(1 AS FLOAT) < 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3287,7 +3314,7 @@ Project [(cast(1 as double) < cast(1 as double)) AS (CAST(1 AS DOUBLE) < 1)#x] -- !query SELECT cast(1 as decimal(10, 0)) < '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) < cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) < 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) < cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) < 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3353,7 +3380,7 @@ Project [(cast(2017-12-11 09:30:00 as date) < cast(1 as date)) AS (CAST(2017-12- -- !query SELECT cast(1 as tinyint) <= '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) <= cast(1 as tinyint)) AS (CAST(1 AS TINYINT) <= 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) <= cast(1 as bigint)) AS (CAST(1 AS TINYINT) <= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3364,7 +3391,7 @@ Project [(cast(1 as tinyint) <= cast(1 as tinyint)) AS (CAST(1 AS TINYINT) <= 1) -- !query SELECT cast(1 as smallint) <= '1' FROM t -- !query analysis -Project [(cast(1 as smallint) <= cast(1 as smallint)) AS (CAST(1 AS SMALLINT) <= 1)#x] +Project [(cast(cast(1 as smallint) as bigint) <= cast(1 as bigint)) AS (CAST(1 AS SMALLINT) <= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3375,7 +3402,7 @@ Project [(cast(1 as smallint) <= cast(1 as smallint)) AS (CAST(1 AS SMALLINT) <= -- !query SELECT cast(1 as int) <= '1' FROM t -- !query analysis -Project [(cast(1 as int) <= cast(1 as int)) AS (CAST(1 AS INT) <= 1)#x] +Project [(cast(cast(1 as int) as bigint) <= cast(1 as bigint)) AS (CAST(1 AS INT) <= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3397,7 +3424,7 @@ Project [(cast(1 as bigint) <= cast(1 as bigint)) AS (CAST(1 AS BIGINT) <= 1)#x] -- !query SELECT cast(1 as float) <= '1' FROM t -- !query analysis -Project [(cast(1 as float) <= cast(1 as float)) AS (CAST(1 AS FLOAT) <= 1)#x] +Project [(cast(cast(1 as float) as double) <= cast(1 as double)) AS (CAST(1 AS FLOAT) <= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3419,7 +3446,7 @@ Project [(cast(1 as double) <= cast(1 as double)) AS (CAST(1 AS DOUBLE) <= 1)#x] -- !query SELECT cast(1 as decimal(10, 0)) <= '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) <= cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) <= 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) <= cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) <= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3485,7 +3512,7 @@ Project [(cast(2017-12-11 09:30:00 as date) <= cast(1 as date)) AS (CAST(2017-12 -- !query SELECT cast(1 as tinyint) > '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) > cast(1 as tinyint)) AS (CAST(1 AS TINYINT) > 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) > cast(1 as bigint)) AS (CAST(1 AS TINYINT) > 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3496,7 +3523,7 @@ Project [(cast(1 as tinyint) > cast(1 as tinyint)) AS (CAST(1 AS TINYINT) > 1)#x -- !query SELECT cast(1 as smallint) > '1' FROM t -- !query analysis -Project [(cast(1 as smallint) > cast(1 as smallint)) AS (CAST(1 AS SMALLINT) > 1)#x] +Project [(cast(cast(1 as smallint) as bigint) > cast(1 as bigint)) AS (CAST(1 AS SMALLINT) > 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3507,7 +3534,7 @@ Project [(cast(1 as smallint) > cast(1 as smallint)) AS (CAST(1 AS SMALLINT) > 1 -- !query SELECT cast(1 as int) > '1' FROM t -- !query analysis -Project [(cast(1 as int) > cast(1 as int)) AS (CAST(1 AS INT) > 1)#x] +Project [(cast(cast(1 as int) as bigint) > cast(1 as bigint)) AS (CAST(1 AS INT) > 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3529,7 +3556,7 @@ Project [(cast(1 as bigint) > cast(1 as bigint)) AS (CAST(1 AS BIGINT) > 1)#x] -- !query SELECT cast(1 as float) > '1' FROM t -- !query analysis -Project [(cast(1 as float) > cast(1 as float)) AS (CAST(1 AS FLOAT) > 1)#x] +Project [(cast(cast(1 as float) as double) > cast(1 as double)) AS (CAST(1 AS FLOAT) > 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3551,7 +3578,7 @@ Project [(cast(1 as double) > cast(1 as double)) AS (CAST(1 AS DOUBLE) > 1)#x] -- !query SELECT cast(1 as decimal(10, 0)) > '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) > cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) > 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) > cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) > 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3617,7 +3644,7 @@ Project [(cast(2017-12-11 09:30:00 as date) > cast(1 as date)) AS (CAST(2017-12- -- !query SELECT cast(1 as tinyint) >= '1' FROM t -- !query analysis -Project [(cast(1 as tinyint) >= cast(1 as tinyint)) AS (CAST(1 AS TINYINT) >= 1)#x] +Project [(cast(cast(1 as tinyint) as bigint) >= cast(1 as bigint)) AS (CAST(1 AS TINYINT) >= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3628,7 +3655,7 @@ Project [(cast(1 as tinyint) >= cast(1 as tinyint)) AS (CAST(1 AS TINYINT) >= 1) -- !query SELECT cast(1 as smallint) >= '1' FROM t -- !query analysis -Project [(cast(1 as smallint) >= cast(1 as smallint)) AS (CAST(1 AS SMALLINT) >= 1)#x] +Project [(cast(cast(1 as smallint) as bigint) >= cast(1 as bigint)) AS (CAST(1 AS SMALLINT) >= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3639,7 +3666,7 @@ Project [(cast(1 as smallint) >= cast(1 as smallint)) AS (CAST(1 AS SMALLINT) >= -- !query SELECT cast(1 as int) >= '1' FROM t -- !query analysis -Project [(cast(1 as int) >= cast(1 as int)) AS (CAST(1 AS INT) >= 1)#x] +Project [(cast(cast(1 as int) as bigint) >= cast(1 as bigint)) AS (CAST(1 AS INT) >= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3661,7 +3688,7 @@ Project [(cast(1 as bigint) >= cast(1 as bigint)) AS (CAST(1 AS BIGINT) >= 1)#x] -- !query SELECT cast(1 as float) >= '1' FROM t -- !query analysis -Project [(cast(1 as float) >= cast(1 as float)) AS (CAST(1 AS FLOAT) >= 1)#x] +Project [(cast(cast(1 as float) as double) >= cast(1 as double)) AS (CAST(1 AS FLOAT) >= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3683,7 +3710,7 @@ Project [(cast(1 as double) >= cast(1 as double)) AS (CAST(1 AS DOUBLE) >= 1)#x] -- !query SELECT cast(1 as decimal(10, 0)) >= '1' FROM t -- !query analysis -Project [(cast(1 as decimal(10,0)) >= cast(1 as decimal(10,0))) AS (CAST(1 AS DECIMAL(10,0)) >= 1)#x] +Project [(cast(cast(1 as decimal(10,0)) as double) >= cast(1 as double)) AS (CAST(1 AS DECIMAL(10,0)) >= 1)#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3749,7 +3776,7 @@ Project [(cast(2017-12-11 09:30:00 as date) >= cast(1 as date)) AS (CAST(2017-12 -- !query SELECT cast(1 as tinyint) <> '1' FROM t -- !query analysis -Project [NOT (cast(1 as tinyint) = cast(1 as tinyint)) AS (NOT (CAST(1 AS TINYINT) = 1))#x] +Project [NOT (cast(cast(1 as tinyint) as bigint) = cast(1 as bigint)) AS (NOT (CAST(1 AS TINYINT) = 1))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3760,7 +3787,7 @@ Project [NOT (cast(1 as tinyint) = cast(1 as tinyint)) AS (NOT (CAST(1 AS TINYIN -- !query SELECT cast(1 as smallint) <> '1' FROM t -- !query analysis -Project [NOT (cast(1 as smallint) = cast(1 as smallint)) AS (NOT (CAST(1 AS SMALLINT) = 1))#x] +Project [NOT (cast(cast(1 as smallint) as bigint) = cast(1 as bigint)) AS (NOT (CAST(1 AS SMALLINT) = 1))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3771,7 +3798,7 @@ Project [NOT (cast(1 as smallint) = cast(1 as smallint)) AS (NOT (CAST(1 AS SMAL -- !query SELECT cast(1 as int) <> '1' FROM t -- !query analysis -Project [NOT (cast(1 as int) = cast(1 as int)) AS (NOT (CAST(1 AS INT) = 1))#x] +Project [NOT (cast(cast(1 as int) as bigint) = cast(1 as bigint)) AS (NOT (CAST(1 AS INT) = 1))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3793,7 +3820,7 @@ Project [NOT (cast(1 as bigint) = cast(1 as bigint)) AS (NOT (CAST(1 AS BIGINT) -- !query SELECT cast(1 as float) <> '1' FROM t -- !query analysis -Project [NOT (cast(1 as float) = cast(1 as float)) AS (NOT (CAST(1 AS FLOAT) = 1))#x] +Project [NOT (cast(cast(1 as float) as double) = cast(1 as double)) AS (NOT (CAST(1 AS FLOAT) = 1))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] @@ -3815,7 +3842,7 @@ Project [NOT (cast(1 as double) = cast(1 as double)) AS (NOT (CAST(1 AS DOUBLE) -- !query SELECT cast(1 as decimal(10, 0)) <> '1' FROM t -- !query analysis -Project [NOT (cast(1 as decimal(10,0)) = cast(1 as decimal(10,0))) AS (NOT (CAST(1 AS DECIMAL(10,0)) = 1))#x] +Project [NOT (cast(cast(1 as decimal(10,0)) as double) = cast(1 as double)) AS (NOT (CAST(1 AS DECIMAL(10,0)) = 1))#x] +- SubqueryAlias t +- View (`t`, [1#x]) +- Project [cast(1#x as int) AS 1#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/stringCastAndExpressions.sql.out index 22e60d0606382..e57f803124ee3 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -197,7 +197,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select to_timestamp(a) from t -- !query analysis -Project [to_timestamp(a#x, None, TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(a)#x] +Project [to_timestamp(a#x, None, TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(a)#x] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -208,7 +208,7 @@ Project [to_timestamp(a#x, None, TimestampType, Some(America/Los_Angeles), false -- !query select to_timestamp('2018-01-01', a) from t -- !query analysis -Project [to_timestamp(2018-01-01, Some(a#x), TimestampType, Some(America/Los_Angeles), false) AS to_timestamp(2018-01-01, a)#x] +Project [to_timestamp(2018-01-01, Some(a#x), TimestampType, Some(America/Los_Angeles), true) AS to_timestamp(2018-01-01, a)#x] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -219,7 +219,7 @@ Project [to_timestamp(2018-01-01, Some(a#x), TimestampType, Some(America/Los_Ang -- !query select to_unix_timestamp(a) from t -- !query analysis -Project [to_unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), false) AS to_unix_timestamp(a, yyyy-MM-dd HH:mm:ss)#xL] +Project [to_unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), true) AS to_unix_timestamp(a, yyyy-MM-dd HH:mm:ss)#xL] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -230,7 +230,7 @@ Project [to_unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), -- !query select to_unix_timestamp('2018-01-01', a) from t -- !query analysis -Project [to_unix_timestamp(2018-01-01, a#x, Some(America/Los_Angeles), false) AS to_unix_timestamp(2018-01-01, a)#xL] +Project [to_unix_timestamp(2018-01-01, a#x, Some(America/Los_Angeles), true) AS to_unix_timestamp(2018-01-01, a)#xL] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -241,7 +241,7 @@ Project [to_unix_timestamp(2018-01-01, a#x, Some(America/Los_Angeles), false) AS -- !query select unix_timestamp(a) from t -- !query analysis -Project [unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), false) AS unix_timestamp(a, yyyy-MM-dd HH:mm:ss)#xL] +Project [unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), true) AS unix_timestamp(a, yyyy-MM-dd HH:mm:ss)#xL] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -252,7 +252,7 @@ Project [unix_timestamp(a#x, yyyy-MM-dd HH:mm:ss, Some(America/Los_Angeles), fal -- !query select unix_timestamp('2018-01-01', a) from t -- !query analysis -Project [unix_timestamp(2018-01-01, a#x, Some(America/Los_Angeles), false) AS unix_timestamp(2018-01-01, a)#xL] +Project [unix_timestamp(2018-01-01, a#x, Some(America/Los_Angeles), true) AS unix_timestamp(2018-01-01, a)#xL] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -285,7 +285,7 @@ Project [from_unixtime(cast(2018-01-01 as bigint), a#x, Some(America/Los_Angeles -- !query select next_day(a, 'MO') from t -- !query analysis -Project [next_day(cast(a#x as date), MO, false) AS next_day(a, MO)#x] +Project [next_day(cast(a#x as date), MO, true) AS next_day(a, MO)#x] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] @@ -296,7 +296,7 @@ Project [next_day(cast(a#x as date), MO, false) AS next_day(a, MO)#x] -- !query select next_day('2018-01-01', a) from t -- !query analysis -Project [next_day(cast(2018-01-01 as date), a#x, false) AS next_day(2018-01-01, a)#x] +Project [next_day(cast(2018-01-01 as date), a#x, true) AS next_day(2018-01-01, a)#x] +- SubqueryAlias t +- View (`t`, [a#x]) +- Project [cast(a#x as string) AS a#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/widenSetOperationTypes.sql.out index 029ec4abb6faf..e855cdc14a921 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -91,19 +91,20 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as float) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS TINYINT)#x as float) AS CAST(1 AS TINYINT)#x] + :- Project [cast(CAST(1 AS TINYINT)#x as double) AS CAST(1 AS TINYINT)#x] : +- Project [cast(1 as tinyint) AS CAST(1 AS TINYINT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS FLOAT)#x as double) AS CAST(2 AS FLOAT)#x] + +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -151,19 +152,20 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS TINYINT)#x as string) AS CAST(1 AS TINYINT)#x] + :- Project [cast(CAST(1 AS TINYINT)#x as bigint) AS CAST(1 AS TINYINT)#xL] : +- Project [cast(1 as tinyint) AS CAST(1 AS TINYINT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as bigint) AS CAST(2 AS STRING)#xL] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -350,19 +352,20 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as float) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS SMALLINT)#x as float) AS CAST(1 AS SMALLINT)#x] + :- Project [cast(CAST(1 AS SMALLINT)#x as double) AS CAST(1 AS SMALLINT)#x] : +- Project [cast(1 as smallint) AS CAST(1 AS SMALLINT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS FLOAT)#x as double) AS CAST(2 AS FLOAT)#x] + +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -410,19 +413,20 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS SMALLINT)#x as string) AS CAST(1 AS SMALLINT)#x] + :- Project [cast(CAST(1 AS SMALLINT)#x as bigint) AS CAST(1 AS SMALLINT)#xL] : +- Project [cast(1 as smallint) AS CAST(1 AS SMALLINT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as bigint) AS CAST(2 AS STRING)#xL] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -609,19 +613,20 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as float) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS INT)#x as float) AS CAST(1 AS INT)#x] + :- Project [cast(CAST(1 AS INT)#x as double) AS CAST(1 AS INT)#x] : +- Project [cast(1 as int) AS CAST(1 AS INT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS FLOAT)#x as double) AS CAST(2 AS FLOAT)#x] + +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -669,19 +674,20 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS INT)#x as string) AS CAST(1 AS INT)#x] + :- Project [cast(CAST(1 AS INT)#x as bigint) AS CAST(1 AS INT)#xL] : +- Project [cast(1 as int) AS CAST(1 AS INT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as bigint) AS CAST(2 AS STRING)#xL] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -868,19 +874,20 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as float) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS BIGINT)#xL as float) AS CAST(1 AS BIGINT)#x] + :- Project [cast(CAST(1 AS BIGINT)#xL as double) AS CAST(1 AS BIGINT)#x] : +- Project [cast(1 as bigint) AS CAST(1 AS BIGINT)#xL] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS FLOAT)#x as double) AS CAST(2 AS FLOAT)#x] + +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -929,19 +936,19 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS BIGINT)#xL as string) AS CAST(1 AS BIGINT)#x] - : +- Project [cast(1 as bigint) AS CAST(1 AS BIGINT)#xL] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(1 as bigint) AS CAST(1 AS BIGINT)#xL] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as bigint) AS CAST(2 AS STRING)#xL] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1049,13 +1056,14 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as tinyint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS TINYINT)#x as float) AS CAST(2 AS TINYINT)#x] + :- Project [cast(CAST(1 AS FLOAT)#x as double) AS CAST(1 AS FLOAT)#x] + : +- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS TINYINT)#x as double) AS CAST(2 AS TINYINT)#x] +- Project [cast(2 as tinyint) AS CAST(2 AS TINYINT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1069,13 +1077,14 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as smallint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS SMALLINT)#x as float) AS CAST(2 AS SMALLINT)#x] + :- Project [cast(CAST(1 AS FLOAT)#x as double) AS CAST(1 AS FLOAT)#x] + : +- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS SMALLINT)#x as double) AS CAST(2 AS SMALLINT)#x] +- Project [cast(2 as smallint) AS CAST(2 AS SMALLINT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1089,13 +1098,14 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as int) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS INT)#x as float) AS CAST(2 AS INT)#x] + :- Project [cast(CAST(1 AS FLOAT)#x as double) AS CAST(1 AS FLOAT)#x] + : +- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS INT)#x as double) AS CAST(2 AS INT)#x] +- Project [cast(2 as int) AS CAST(2 AS INT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1109,13 +1119,14 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as bigint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS BIGINT)#xL as float) AS CAST(2 AS BIGINT)#x] + :- Project [cast(CAST(1 AS FLOAT)#x as double) AS CAST(1 AS FLOAT)#x] + : +- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS BIGINT)#xL as double) AS CAST(2 AS BIGINT)#x] +- Project [cast(2 as bigint) AS CAST(2 AS BIGINT)#xL] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1189,19 +1200,20 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS FLOAT)#x as string) AS CAST(1 AS FLOAT)#x] + :- Project [cast(CAST(1 AS FLOAT)#x as double) AS CAST(1 AS FLOAT)#x] : +- Project [cast(1 as float) AS CAST(1 AS FLOAT)#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as double) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1448,19 +1460,19 @@ SELECT cast(1 as double) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS DOUBLE)#x as string) AS CAST(1 AS DOUBLE)#x] - : +- Project [cast(1 as double) AS CAST(1 AS DOUBLE)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(1 as double) AS CAST(1 AS DOUBLE)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as double) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1709,19 +1721,20 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(1 AS DECIMAL(10,0))#x as string) AS CAST(1 AS DECIMAL(10,0))#x] + :- Project [cast(CAST(1 AS DECIMAL(10,0))#x as double) AS CAST(1 AS DECIMAL(10,0))#x] : +- Project [cast(1 as decimal(10,0)) AS CAST(1 AS DECIMAL(10,0))#x] : +- SubqueryAlias t : +- View (`t`, [1#x]) : +- Project [cast(1#x as int) AS 1#x] : +- Project [1 AS 1#x] : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as double) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1829,13 +1842,14 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as tinyint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS TINYINT)#x as string) AS CAST(2 AS TINYINT)#x] + :- Project [cast(CAST(1 AS STRING)#x as bigint) AS CAST(1 AS STRING)#xL] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS TINYINT)#x as bigint) AS CAST(2 AS TINYINT)#xL] +- Project [cast(2 as tinyint) AS CAST(2 AS TINYINT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1849,13 +1863,14 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as smallint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS SMALLINT)#x as string) AS CAST(2 AS SMALLINT)#x] + :- Project [cast(CAST(1 AS STRING)#x as bigint) AS CAST(1 AS STRING)#xL] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS SMALLINT)#x as bigint) AS CAST(2 AS SMALLINT)#xL] +- Project [cast(2 as smallint) AS CAST(2 AS SMALLINT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1869,13 +1884,14 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as int) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS INT)#x as string) AS CAST(2 AS INT)#x] + :- Project [cast(CAST(1 AS STRING)#x as bigint) AS CAST(1 AS STRING)#xL] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS INT)#x as bigint) AS CAST(2 AS INT)#xL] +- Project [cast(2 as int) AS CAST(2 AS INT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1889,19 +1905,19 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as bigint) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS BIGINT)#xL as string) AS CAST(2 AS BIGINT)#x] - +- Project [cast(2 as bigint) AS CAST(2 AS BIGINT)#xL] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(CAST(1 AS STRING)#x as bigint) AS CAST(1 AS STRING)#xL] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2 as bigint) AS CAST(2 AS BIGINT)#xL] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1909,13 +1925,14 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as float) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS FLOAT)#x as string) AS CAST(2 AS FLOAT)#x] + :- Project [cast(CAST(1 AS STRING)#x as double) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS FLOAT)#x as double) AS CAST(2 AS FLOAT)#x] +- Project [cast(2 as float) AS CAST(2 AS FLOAT)#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1929,19 +1946,19 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as double) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS DOUBLE)#x as string) AS CAST(2 AS DOUBLE)#x] - +- Project [cast(2 as double) AS CAST(2 AS DOUBLE)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(CAST(1 AS STRING)#x as double) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2 as double) AS CAST(2 AS DOUBLE)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -1949,13 +1966,14 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2 AS DECIMAL(10,0))#x as string) AS CAST(2 AS DECIMAL(10,0))#x] + :- Project [cast(CAST(1 AS STRING)#x as double) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS DECIMAL(10,0))#x as double) AS CAST(2 AS DECIMAL(10,0))#x] +- Project [cast(2 as decimal(10,0)) AS CAST(2 AS DECIMAL(10,0))#x] +- SubqueryAlias t +- View (`t`, [1#x]) @@ -1986,51 +2004,41 @@ Distinct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", - "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"BINARY\"", - "dataType2" : "\"STRING\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t" - } ] -} +Distinct ++- Union false, false + :- Project [cast(CAST(1 AS STRING)#x as binary) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2 as binary) AS CAST(2 AS BINARY)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", - "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"BOOLEAN\"", - "dataType2" : "\"STRING\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t" - } ] -} +Distinct ++- Union false, false + :- Project [cast(CAST(1 AS STRING)#x as boolean) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2 as boolean) AS CAST(2 AS BOOLEAN)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2038,19 +2046,19 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)#x as string) AS CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)#x] - +- Project [cast(2017-12-11 09:30:00.0 as timestamp) AS CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(CAST(1 AS STRING)#x as timestamp) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2017-12-11 09:30:00.0 as timestamp) AS CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2058,19 +2066,19 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) -- !query analysis Distinct +- Union false, false - :- Project [cast(1 as string) AS CAST(1 AS STRING)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(CAST(2017-12-11 09:30:00 AS DATE)#x as string) AS CAST(2017-12-11 09:30:00 AS DATE)#x] - +- Project [cast(2017-12-11 09:30:00 as date) AS CAST(2017-12-11 09:30:00 AS DATE)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(CAST(1 AS STRING)#x as date) AS CAST(1 AS STRING)#x] + : +- Project [cast(1 as string) AS CAST(1 AS STRING)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(2017-12-11 09:30:00 as date) AS CAST(2017-12-11 09:30:00 AS DATE)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2251,26 +2259,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", - "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"STRING\"", - "dataType2" : "\"BINARY\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t" - } ] -} +Distinct ++- Union false, false + :- Project [cast(1 as binary) AS CAST(1 AS BINARY)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as binary) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2545,26 +2548,21 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", - "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"STRING\"", - "dataType2" : "\"BOOLEAN\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t" - } ] -} +Distinct ++- Union false, false + :- Project [cast(1 as boolean) AS CAST(1 AS BOOLEAN)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as boolean) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -2841,19 +2839,19 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(2017-12-12 09:30:00.0 AS TIMESTAMP)#x as string) AS CAST(2017-12-12 09:30:00.0 AS TIMESTAMP)#x] - : +- Project [cast(2017-12-12 09:30:00.0 as timestamp) AS CAST(2017-12-12 09:30:00.0 AS TIMESTAMP)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(2017-12-12 09:30:00.0 as timestamp) AS CAST(2017-12-12 09:30:00.0 AS TIMESTAMP)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as timestamp) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query @@ -3125,19 +3123,19 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as string) -- !query analysis Distinct +- Union false, false - :- Project [cast(CAST(2017-12-12 09:30:00 AS DATE)#x as string) AS CAST(2017-12-12 09:30:00 AS DATE)#x] - : +- Project [cast(2017-12-12 09:30:00 as date) AS CAST(2017-12-12 09:30:00 AS DATE)#x] - : +- SubqueryAlias t - : +- View (`t`, [1#x]) - : +- Project [cast(1#x as int) AS 1#x] - : +- Project [1 AS 1#x] - : +- OneRowRelation - +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] - +- SubqueryAlias t - +- View (`t`, [1#x]) - +- Project [cast(1#x as int) AS 1#x] - +- Project [1 AS 1#x] - +- OneRowRelation + :- Project [cast(2017-12-12 09:30:00 as date) AS CAST(2017-12-12 09:30:00 AS DATE)#x] + : +- SubqueryAlias t + : +- View (`t`, [1#x]) + : +- Project [cast(1#x as int) AS 1#x] + : +- Project [1 AS 1#x] + : +- OneRowRelation + +- Project [cast(CAST(2 AS STRING)#x as date) AS CAST(2 AS STRING)#x] + +- Project [cast(2 as string) AS CAST(2 AS STRING)#x] + +- SubqueryAlias t + +- View (`t`, [1#x]) + +- Project [cast(1#x as int) AS 1#x] + +- Project [1 AS 1#x] + +- OneRowRelation -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/windowFrameCoercion.sql.out index 170e7dff38ac3..d516b82508de5 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/windowFrameCoercion.sql.out @@ -301,13 +301,12 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('1' as binary) DESC RANGE BET -- !query analysis org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE", + "errorClass" : "DATATYPE_MISMATCH.RANGE_FRAME_INVALID_TYPE", "sqlState" : "42K09", "messageParameters" : { - "expectedType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", - "exprType" : "\"BINARY\"", - "location" : "upper", - "sqlExpr" : "\"RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING\"" + "orderSpecType" : "\"BINARY\"", + "sqlExpr" : "\"(PARTITION BY 1 ORDER BY CAST(1 AS BINARY) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)\"", + "valueBoundaryType" : "\"INT\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-cross-join.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-cross-join.sql.out index c5ee1742f5d7c..391eb371d0d54 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-cross-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-cross-join.sql.out @@ -95,7 +95,7 @@ Project [k#x, v1#x, k#x, v2#x] SELECT * FROM nt1 cross join nt2 where udf(nt1.v1) = "1" and udf(nt2.v2) = "22" -- !query analysis Project [k#x, v1#x, k#x, v2#x] -+- Filter ((cast(udf(cast(v1#x as string)) as int) = cast(1 as int)) AND (cast(udf(cast(v2#x as string)) as int) = cast(22 as int))) ++- Filter ((cast(cast(udf(cast(v1#x as string)) as int) as bigint) = cast(1 as bigint)) AND (cast(cast(udf(cast(v2#x as string)) as int) as bigint) = cast(22 as bigint))) +- Join Cross :- SubqueryAlias nt1 : +- View (`nt1`, [k#x, v1#x]) diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out index 5811a4ff6566c..e016a8e4fab1e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out @@ -619,25 +619,8 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT udf(every("true")) -- !query analysis -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"true\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"BOOLEAN\"", - "sqlExpr" : "\"every(true)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 24, - "fragment" : "every(\"true\")" - } ] -} +Aggregate [cast(udf(cast(every(cast(true as boolean)) as string)) as boolean) AS udf(every(true))#x] ++- OneRowRelation -- !query diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-union.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-union.sql.out index a1436d0a77c83..df316707c7261 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-union.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-union.sql.out @@ -40,31 +40,32 @@ Project [cast(udf(cast(c1#x as string)) as int) AS c1#x, cast(udf(cast(c2#x as s -- !query SELECT udf(c1) as c1, udf(c2) as c2 -FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 +FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 WHERE c2 = 'a' UNION ALL SELECT udf(c1) as c1, udf(c2) as c2 FROM t2 UNION ALL SELECT udf(c1) as c1, udf(c2) as c2 FROM t2) -- !query analysis -Project [cast(udf(cast(c1#x as string)) as decimal(11,1)) AS c1#x, cast(udf(cast(c2#x as string)) as string) AS c2#x] +Project [cast(udf(cast(c1#x as string)) as decimal(11,1)) AS c1#x, cast(udf(cast(c2#xL as string)) as bigint) AS c2#xL] +- SubqueryAlias __auto_generated_subquery_name +- Union false, false :- Union false, false - : :- Project [cast(c1#x as decimal(11,1)) AS c1#x, c2#x] + : :- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] : : +- Project [cast(udf(cast(c1#x as string)) as int) AS c1#x, cast(udf(cast(c2#x as string)) as string) AS c2#x] - : : +- SubqueryAlias t1 - : : +- View (`t1`, [c1#x, c2#x]) - : : +- Project [cast(c1#x as int) AS c1#x, cast(c2#x as string) AS c2#x] - : : +- SubqueryAlias tbl - : : +- LocalRelation [c1#x, c2#x] - : +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as string) AS c2#x] + : : +- Filter (c2#x = a) + : : +- SubqueryAlias t1 + : : +- View (`t1`, [c1#x, c2#x]) + : : +- Project [cast(c1#x as int) AS c1#x, cast(c2#x as string) AS c2#x] + : : +- SubqueryAlias tbl + : : +- LocalRelation [c1#x, c2#x] + : +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] : +- Project [cast(udf(cast(c1#x as string)) as decimal(2,1)) AS c1#x, cast(udf(cast(c2#x as string)) as int) AS c2#x] : +- SubqueryAlias t2 : +- View (`t2`, [c1#x, c2#x]) : +- Project [cast(c1#x as decimal(2,1)) AS c1#x, cast(c2#x as int) AS c2#x] : +- SubqueryAlias tbl : +- LocalRelation [c1#x, c2#x] - +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as string) AS c2#x] + +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] +- Project [cast(udf(cast(c1#x as string)) as decimal(2,1)) AS c1#x, cast(udf(cast(c2#x as string)) as int) AS c2#x] +- SubqueryAlias t2 +- View (`t2`, [c1#x, c2#x]) @@ -159,10 +160,10 @@ UNION ALL SELECT map(1, 2, 3, NULL), udf(1) -- !query analysis Union false, false -:- Project [cast(map(1, 2)#x as map) AS map(1, 2)#x, str#x] +:- Project [cast(map(1, 2)#x as map) AS map(1, 2)#x, cast(str#x as bigint) AS str#xL] : +- Project [map(1, 2) AS map(1, 2)#x, cast(udf(cast(str as string)) as string) AS str#x] : +- OneRowRelation -+- Project [map(1, 2, 3, NULL)#x, cast(udf(1)#x as string) AS udf(1)#x] ++- Project [map(1, 2, 3, NULL)#x, cast(udf(1)#x as bigint) AS udf(1)#xL] +- Project [map(1, 2, 3, cast(null as int)) AS map(1, 2, 3, NULL)#x, cast(udf(cast(1 as string)) as int) AS udf(1)#x] +- OneRowRelation @@ -173,10 +174,10 @@ UNION ALL SELECT array(1, 2, 3, NULL), udf(1) -- !query analysis Union false, false -:- Project [cast(array(1, 2)#x as array) AS array(1, 2)#x, str#x] +:- Project [cast(array(1, 2)#x as array) AS array(1, 2)#x, cast(str#x as bigint) AS str#xL] : +- Project [array(1, 2) AS array(1, 2)#x, cast(udf(cast(str as string)) as string) AS str#x] : +- OneRowRelation -+- Project [array(1, 2, 3, NULL)#x, cast(udf(1)#x as string) AS udf(1)#x] ++- Project [array(1, 2, 3, NULL)#x, cast(udf(1)#x as bigint) AS udf(1)#xL] +- Project [array(1, 2, 3, cast(null as int)) AS array(1, 2, 3, NULL)#x, cast(udf(cast(1 as string)) as int) AS udf(1)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/union.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/union.sql.out index cafdd850e86d6..93456003254b8 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/union.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/union.sql.out @@ -46,25 +46,25 @@ FROM (SELECT * FROM t1 UNION ALL SELECT * FROM t2) -- !query analysis -Project [c1#x, c2#x] +Project [c1#x, c2#xL] +- SubqueryAlias __auto_generated_subquery_name +- Union false, false :- Union false, false - : :- Project [cast(c1#x as decimal(11,1)) AS c1#x, c2#x] + : :- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] : : +- Project [c1#x, c2#x] : : +- SubqueryAlias t1 : : +- View (`t1`, [c1#x, c2#x]) : : +- Project [cast(c1#x as int) AS c1#x, cast(c2#x as string) AS c2#x] : : +- SubqueryAlias tbl : : +- LocalRelation [c1#x, c2#x] - : +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as string) AS c2#x] + : +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] : +- Project [c1#x, c2#x] : +- SubqueryAlias t2 : +- View (`t2`, [c1#x, c2#x]) : +- Project [cast(c1#x as decimal(2,1)) AS c1#x, cast(c2#x as int) AS c2#x] : +- SubqueryAlias tbl : +- LocalRelation [c1#x, c2#x] - +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as string) AS c2#x] + +- Project [cast(c1#x as decimal(11,1)) AS c1#x, cast(c2#x as bigint) AS c2#xL] +- Project [c1#x, c2#x] +- SubqueryAlias t2 +- View (`t2`, [c1#x, c2#x]) @@ -159,10 +159,10 @@ UNION ALL SELECT map(1, 2, 3, NULL), 1 -- !query analysis Union false, false -:- Project [cast(map(1, 2)#x as map) AS map(1, 2)#x, str#x] +:- Project [cast(map(1, 2)#x as map) AS map(1, 2)#x, cast(str#x as bigint) AS str#xL] : +- Project [map(1, 2) AS map(1, 2)#x, str AS str#x] : +- OneRowRelation -+- Project [map(1, 2, 3, NULL)#x, cast(1#x as string) AS 1#x] ++- Project [map(1, 2, 3, NULL)#x, cast(1#x as bigint) AS 1#xL] +- Project [map(1, 2, 3, cast(null as int)) AS map(1, 2, 3, NULL)#x, 1 AS 1#x] +- OneRowRelation @@ -173,10 +173,10 @@ UNION ALL SELECT array(1, 2, 3, NULL), 1 -- !query analysis Union false, false -:- Project [cast(array(1, 2)#x as array) AS array(1, 2)#x, str#x] +:- Project [cast(array(1, 2)#x as array) AS array(1, 2)#x, cast(str#x as bigint) AS str#xL] : +- Project [array(1, 2) AS array(1, 2)#x, str AS str#x] : +- OneRowRelation -+- Project [array(1, 2, 3, NULL)#x, cast(1#x as string) AS 1#x] ++- Project [array(1, 2, 3, NULL)#x, cast(1#x as bigint) AS 1#xL] +- Project [array(1, 2, 3, cast(null as int)) AS array(1, 2, 3, NULL)#x, 1 AS 1#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/url-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/url-functions.sql.out index 9f213bcb8c914..bdf90f6a0ed14 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/url-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/url-functions.sql.out @@ -2,56 +2,56 @@ -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'HOST') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, HOST, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, HOST)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, HOST, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, HOST)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'PATH') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PATH, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PATH)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PATH, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PATH)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'QUERY') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, QUERY, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, QUERY)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, QUERY, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, QUERY)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'REF') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, REF, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, REF)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, REF, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, REF)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'PROTOCOL') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PROTOCOL, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PROTOCOL)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PROTOCOL, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, PROTOCOL)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'FILE') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, FILE, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, FILE)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, FILE, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, FILE)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'AUTHORITY') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, AUTHORITY, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, AUTHORITY)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, AUTHORITY, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, AUTHORITY)#x] +- OneRowRelation -- !query select parse_url('http://userinfo@spark.apache.org/path?query=1#Ref', 'USERINFO') -- !query analysis -Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, USERINFO, false) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, USERINFO)#x] +Project [parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, USERINFO, true) AS parse_url(http://userinfo@spark.apache.org/path?query=1#Ref, USERINFO)#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/window.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/window.sql.out index 12a90ce32cc68..367d5b0167014 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/window.sql.out @@ -268,11 +268,11 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY t RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData ORDER BY cate, to_timestamp_ntz(val_timestamp) -- !query analysis -Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) ASC NULLS FIRST], true +Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) ASC NULLS FIRST], true +- Project [val_timestamp#x, cate#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04' DAY TO HOUR FOLLOWING)#x] +- Project [val_timestamp#x, cate#x, _w0#x, _w1#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04' DAY TO HOUR FOLLOWING)#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04' DAY TO HOUR FOLLOWING)#x] +- Window [avg(_w0#x) windowspecdefinition(cate#x, _w1#x ASC NULLS FIRST, specifiedwindowframe(RangeFrame, currentrow$(), INTERVAL '23 04' DAY TO HOUR)) AS avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04' DAY TO HOUR FOLLOWING)#x], [cate#x], [_w1#x ASC NULLS FIRST] - +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) AS _w1#x] + +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) AS _w1#x] +- SubqueryAlias testdata +- View (`testData`, [val#x, val_long#xL, val_double#x, val_date#x, val_timestamp#x, cate#x]) +- Project [cast(val#x as int) AS val#x, cast(val_long#xL as bigint) AS val_long#xL, cast(val_double#x as double) AS val_double#x, cast(val_date#x as date) AS val_date#x, cast(val_timestamp#x as timestamp) AS val_timestamp#x, cast(cate#x as string) AS cate#x] @@ -304,11 +304,11 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY t RANGE BETWEEN CURRENT ROW AND interval '1-1' year to month FOLLOWING) FROM testData ORDER BY cate, to_timestamp_ntz(val_timestamp) -- !query analysis -Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) ASC NULLS FIRST], true +Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) ASC NULLS FIRST], true +- Project [val_timestamp#x, cate#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1-1' YEAR TO MONTH FOLLOWING)#x] +- Project [val_timestamp#x, cate#x, _w0#x, _w1#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1-1' YEAR TO MONTH FOLLOWING)#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1-1' YEAR TO MONTH FOLLOWING)#x] +- Window [avg(_w0#x) windowspecdefinition(cate#x, _w1#x ASC NULLS FIRST, specifiedwindowframe(RangeFrame, currentrow$(), INTERVAL '1-1' YEAR TO MONTH)) AS avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1-1' YEAR TO MONTH FOLLOWING)#x], [cate#x], [_w1#x ASC NULLS FIRST] - +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) AS _w1#x] + +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) AS _w1#x] +- SubqueryAlias testdata +- View (`testData`, [val#x, val_long#xL, val_double#x, val_date#x, val_timestamp#x, cate#x]) +- Project [cast(val#x as int) AS val#x, cast(val_long#xL as bigint) AS val_long#xL, cast(val_double#x as double) AS val_double#x, cast(val_date#x as date) AS val_date#x, cast(val_timestamp#x as timestamp) AS val_timestamp#x, cast(cate#x as string) AS cate#x] @@ -340,11 +340,11 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY t RANGE BETWEEN CURRENT ROW AND interval '1 2:3:4.001' day to second FOLLOWING) FROM testData ORDER BY cate, to_timestamp_ntz(val_timestamp) -- !query analysis -Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) ASC NULLS FIRST], true +Sort [cate#x ASC NULLS FIRST, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) ASC NULLS FIRST], true +- Project [val_timestamp#x, cate#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)#x] +- Project [val_timestamp#x, cate#x, _w0#x, _w1#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)#x, avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)#x] +- Window [avg(_w0#x) windowspecdefinition(cate#x, _w1#x ASC NULLS FIRST, specifiedwindowframe(RangeFrame, currentrow$(), INTERVAL '1 02:03:04.001' DAY TO SECOND)) AS avg(val_timestamp) OVER (PARTITION BY cate ORDER BY to_timestamp_ntz(val_timestamp) ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)#x], [cate#x], [_w1#x ASC NULLS FIRST] - +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), false) AS _w1#x] + +- Project [val_timestamp#x, cate#x, cast(val_timestamp#x as double) AS _w0#x, to_timestamp_ntz(val_timestamp#x, None, TimestampNTZType, Some(America/Los_Angeles), true) AS _w1#x] +- SubqueryAlias testdata +- View (`testData`, [val#x, val_long#xL, val_double#x, val_date#x, val_timestamp#x, cate#x]) +- Project [cast(val#x as int) AS val#x, cast(val_long#xL as bigint) AS val_long#xL, cast(val_double#x as double) AS val_double#x, cast(val_date#x as date) AS val_date#x, cast(val_timestamp#x as timestamp) AS val_timestamp#x, cast(cate#x as string) AS cate#x] diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out index de9fb2f395210..e0a249e4cf3fe 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out @@ -9,7 +9,7 @@ Project [to_xml((indent,), named_struct(a, 1, b, 2), Some(America/Los_Angeles)) -- !query select to_xml(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy', 'indent', '')) -- !query analysis -Project [to_xml((timestampFormat,dd/MM/yyyy), (indent,), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), false)), Some(America/Los_Angeles)) AS to_xml(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +Project [to_xml((timestampFormat,dd/MM/yyyy), (indent,), named_struct(time, to_timestamp(2015-08-26, Some(yyyy-MM-dd), TimestampType, Some(America/Los_Angeles), true)), Some(America/Los_Angeles)) AS to_xml(named_struct(time, to_timestamp(2015-08-26, yyyy-MM-dd)))#x] +- OneRowRelation diff --git a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql index 0775b9780332c..13bbd9d81b799 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql @@ -88,7 +88,7 @@ SELECT CAST(10 AS DECIMAL(10, 2)) div CAST(3 AS DECIMAL(5, 1)); set spark.sql.decimalOperations.allowPrecisionLoss=false; -- test decimal operations -select id, a+b, a-b, a*b, a/b from decimals_test order by id; +select /*+ COALESCE(1) */ id, a+b, a-b, a*b, a/b from decimals_test order by id; -- test operations between decimals and constants select id, a*10, b/10 from decimals_test order by id; diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/array.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/array.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/array.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/array.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/cast.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/cast.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/cast.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/cast.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/conditional-functions.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/conditional-functions.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/conditional-functions.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/conditional-functions.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/date.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/date.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/date.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/date.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/datetime-parsing-invalid.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/datetime-parsing-invalid.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/datetime-parsing-invalid.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/datetime-parsing-invalid.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/datetime-special.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/datetime-special.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/datetime-special.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/datetime-special.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/decimalArithmeticOperations.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/decimalArithmeticOperations.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/decimalArithmeticOperations.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-disabled.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/double-quoted-identifiers-disabled.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-disabled.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/double-quoted-identifiers-disabled.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-enabled.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/double-quoted-identifiers-enabled.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-enabled.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/double-quoted-identifiers-enabled.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/higher-order-functions.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/higher-order-functions.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/higher-order-functions.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/higher-order-functions.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/interval.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/interval.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/keywords.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/keywords.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/keywords.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/keywords.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/literals.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/literals.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/literals.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/literals.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/map.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/map.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/map.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/map.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/math.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/math.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/math.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/math.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/parse-schema-string.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/parse-schema-string.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/parse-schema-string.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/parse-schema-string.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/string-functions.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/string-functions.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/string-functions.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/string-functions.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/timestamp.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/timestamp.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/timestamp.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/timestamp.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/try_aggregates.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/try_aggregates.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/try_aggregates.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/try_aggregates.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/try_arithmetic.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/try_arithmetic.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/try_arithmetic.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/try_arithmetic.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/try_datetime_functions.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/try_datetime_functions.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/try_datetime_functions.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/try_datetime_functions.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/try_element_at.sql b/sql/core/src/test/resources/sql-tests/inputs/nonansi/try_element_at.sql similarity index 100% rename from sql/core/src/test/resources/sql-tests/inputs/ansi/try_element_at.sql rename to sql/core/src/test/resources/sql-tests/inputs/nonansi/try_element_at.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/pipe-operators.sql b/sql/core/src/test/resources/sql-tests/inputs/pipe-operators.sql index 8bca7144c0a98..6261bc93b1856 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/pipe-operators.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/pipe-operators.sql @@ -548,7 +548,8 @@ values (0, 'abc') tab(x, y) -- Union distinct with a VALUES list. values (0, 1) tab(x, y) -|> union table t; +|> union table t +|> where x = 0; -- Union all with a table subquery on both the source and target sides. (select * from t) diff --git a/sql/core/src/test/resources/sql-tests/inputs/udf/udf-union.sql b/sql/core/src/test/resources/sql-tests/inputs/udf/udf-union.sql index 883369705c3cb..207bf557acb0c 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/udf/udf-union.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/udf/udf-union.sql @@ -11,7 +11,7 @@ FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 -- Type Coerced Union SELECT udf(c1) as c1, udf(c2) as c2 -FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 +FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 WHERE c2 = 'a' UNION ALL SELECT udf(c1) as c1, udf(c2) as c2 FROM t2 UNION ALL diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out deleted file mode 100644 index 0dbdf1d9975c9..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ /dev/null @@ -1,1999 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- !query -SELECT CAST('1.23' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1.23'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 26, - "fragment" : "CAST('1.23' AS int)" - } ] -} - - --- !query -SELECT CAST('1.23' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1.23'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "CAST('1.23' AS long)" - } ] -} - - --- !query -SELECT CAST('-4.56' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'-4.56'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "CAST('-4.56' AS int)" - } ] -} - - --- !query -SELECT CAST('-4.56' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'-4.56'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "CAST('-4.56' AS long)" - } ] -} - - --- !query -SELECT CAST('abc' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "CAST('abc' AS int)" - } ] -} - - --- !query -SELECT CAST('abc' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 26, - "fragment" : "CAST('abc' AS long)" - } ] -} - - --- !query -SELECT CAST('abc' AS float) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"FLOAT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "CAST('abc' AS float)" - } ] -} - - --- !query -SELECT CAST('abc' AS double) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "CAST('abc' AS double)" - } ] -} - - --- !query -SELECT CAST('1234567890123' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1234567890123'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "CAST('1234567890123' AS int)" - } ] -} - - --- !query -SELECT CAST('12345678901234567890123' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'12345678901234567890123'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 46, - "fragment" : "CAST('12345678901234567890123' AS long)" - } ] -} - - --- !query -SELECT CAST('' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "''", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 22, - "fragment" : "CAST('' AS int)" - } ] -} - - --- !query -SELECT CAST('' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "''", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 23, - "fragment" : "CAST('' AS long)" - } ] -} - - --- !query -SELECT CAST('' AS float) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "''", - "sourceType" : "\"STRING\"", - "targetType" : "\"FLOAT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "CAST('' AS float)" - } ] -} - - --- !query -SELECT CAST('' AS double) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "''", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "CAST('' AS double)" - } ] -} - - --- !query -SELECT CAST(NULL AS int) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT CAST(NULL AS long) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT CAST('123.a' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'123.a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "CAST('123.a' AS int)" - } ] -} - - --- !query -SELECT CAST('123.a' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'123.a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "CAST('123.a' AS long)" - } ] -} - - --- !query -SELECT CAST('123.a' AS float) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'123.a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"FLOAT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 29, - "fragment" : "CAST('123.a' AS float)" - } ] -} - - --- !query -SELECT CAST('123.a' AS double) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'123.a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "CAST('123.a' AS double)" - } ] -} - - --- !query -SELECT CAST('-2147483648' AS int) --- !query schema -struct --- !query output --2147483648 - - --- !query -SELECT CAST('-2147483649' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'-2147483649'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 33, - "fragment" : "CAST('-2147483649' AS int)" - } ] -} - - --- !query -SELECT CAST('2147483647' AS int) --- !query schema -struct --- !query output -2147483647 - - --- !query -SELECT CAST('2147483648' AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'2147483648'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 32, - "fragment" : "CAST('2147483648' AS int)" - } ] -} - - --- !query -SELECT CAST('-9223372036854775808' AS long) --- !query schema -struct --- !query output --9223372036854775808 - - --- !query -SELECT CAST('-9223372036854775809' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'-9223372036854775809'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "CAST('-9223372036854775809' AS long)" - } ] -} - - --- !query -SELECT CAST('9223372036854775807' AS long) --- !query schema -struct --- !query output -9223372036854775807 - - --- !query -SELECT CAST('9223372036854775808' AS long) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'9223372036854775808'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 42, - "fragment" : "CAST('9223372036854775808' AS long)" - } ] -} - - --- !query -SELECT HEX(CAST('abc' AS binary)) --- !query schema -struct --- !query output -616263 - - --- !query -SELECT HEX(CAST(CAST(123 AS byte) AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 44, - "fragment" : "CAST(CAST(123 AS byte) AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(-123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 45, - "fragment" : "CAST(CAST(-123 AS byte) AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(123S AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"SMALLINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(123S AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(-123S AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"SMALLINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 32, - "fragment" : "CAST(-123S AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(123 AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"INT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 30, - "fragment" : "CAST(123 AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(-123 AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"INT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(-123 AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(123L AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(123 AS BINARY)\"", - "srcType" : "\"BIGINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 31, - "fragment" : "CAST(123L AS binary)" - } ] -} - - --- !query -SELECT HEX(CAST(-123L AS binary)) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(-123 AS BINARY)\"", - "srcType" : "\"BIGINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 32, - "fragment" : "CAST(-123L AS binary)" - } ] -} - - --- !query -DESC FUNCTION boolean --- !query schema -struct --- !query output -Class: org.apache.spark.sql.catalyst.expressions.Cast -Function: boolean -Usage: boolean(expr) - Casts the value `expr` to the target data type `boolean`. - - --- !query -DESC FUNCTION EXTENDED boolean --- !query schema -struct --- !query output -Class: org.apache.spark.sql.catalyst.expressions.Cast -Extended Usage: - No example/argument for boolean. - - Since: 2.0.1 - -Function: boolean -Usage: boolean(expr) - Casts the value `expr` to the target data type `boolean`. - - --- !query -SELECT CAST('interval 3 month 1 hour' AS interval) --- !query schema -struct --- !query output -3 months 1 hours - - --- !query -SELECT CAST("interval '3-1' year to month" AS interval year to month) --- !query schema -struct --- !query output -3-1 - - --- !query -SELECT CAST("interval '3 00:00:01' day to second" AS interval day to second) --- !query schema -struct --- !query output -3 00:00:01.000000000 - - --- !query -SELECT CAST(interval 3 month 1 hour AS string) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "_LEGACY_ERROR_TEMP_0029", - "messageParameters" : { - "literal" : "interval 3 month 1 hour" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 35, - "fragment" : "interval 3 month 1 hour" - } ] -} - - --- !query -SELECT CAST(interval 3 year 1 month AS string) --- !query schema -struct --- !query output -INTERVAL '3-1' YEAR TO MONTH - - --- !query -SELECT CAST(interval 3 day 1 second AS string) --- !query schema -struct --- !query output -INTERVAL '3 00:00:01' DAY TO SECOND - - --- !query -select cast(' 1' as tinyint) --- !query schema -struct --- !query output -1 - - --- !query -select cast(' 1\t' as tinyint) --- !query schema -struct --- !query output -1 - - --- !query -select cast(' 1' as smallint) --- !query schema -struct --- !query output -1 - - --- !query -select cast(' 1' as INT) --- !query schema -struct --- !query output -1 - - --- !query -select cast(' 1' as bigint) --- !query schema -struct --- !query output -1 - - --- !query -select cast(' 1' as float) --- !query schema -struct --- !query output -1.0 - - --- !query -select cast(' 1 ' as DOUBLE) --- !query schema -struct --- !query output -1.0 - - --- !query -select cast('1.0 ' as DEC) --- !query schema -struct --- !query output -1 - - --- !query -select cast('1中文' as tinyint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1中文'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TINYINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 29, - "fragment" : "cast('1中文' as tinyint)" - } ] -} - - --- !query -select cast('1中文' as smallint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1中文'", - "sourceType" : "\"STRING\"", - "targetType" : "\"SMALLINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "cast('1中文' as smallint)" - } ] -} - - --- !query -select cast('1中文' as INT) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1中文'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "cast('1中文' as INT)" - } ] -} - - --- !query -select cast('中文1' as bigint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'中文1'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "cast('中文1' as bigint)" - } ] -} - - --- !query -select cast('1中文' as bigint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1中文'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "cast('1中文' as bigint)" - } ] -} - - --- !query -select cast('\t\t true \n\r ' as boolean) --- !query schema -struct --- !query output -true - - --- !query -select cast('\t\n false \t\r' as boolean) --- !query schema -struct --- !query output -false - - --- !query -select cast('\t\n xyz \t\r' as boolean) --- !query schema -struct<> --- !query output -org.apache.spark.SparkRuntimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'\t\n xyz \t\r'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BOOLEAN\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "cast('\\t\\n xyz \\t\\r' as boolean)" - } ] -} - - --- !query -select cast('23.45' as decimal(4, 2)) --- !query schema -struct --- !query output -23.45 - - --- !query -select cast('123.45' as decimal(4, 2)) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "4", - "scale" : "2", - "value" : "123.45" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "cast('123.45' as decimal(4, 2))" - } ] -} - - --- !query -select cast('xyz' as decimal(4, 2)) --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'xyz'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DECIMAL(4,2)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "cast('xyz' as decimal(4, 2))" - } ] -} - - --- !query -select cast('2022-01-01' as date) --- !query schema -struct --- !query output -2022-01-01 - - --- !query -select cast('a' as date) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DATE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "cast('a' as date)" - } ] -} - - --- !query -select cast('2022-01-01 00:00:00' as timestamp) --- !query schema -struct --- !query output -2022-01-01 00:00:00 - - --- !query -select cast('a' as timestamp) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 29, - "fragment" : "cast('a' as timestamp)" - } ] -} - - --- !query -select cast('2022-01-01 00:00:00' as timestamp_ntz) --- !query schema -struct --- !query output -2022-01-01 00:00:00 - - --- !query -select cast('a' as timestamp_ntz) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP_NTZ\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 33, - "fragment" : "cast('a' as timestamp_ntz)" - } ] -} - - --- !query -select cast(cast('inf' as double) as timestamp) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "Infinity", - "sourceType" : "\"DOUBLE\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 47, - "fragment" : "cast(cast('inf' as double) as timestamp)" - } ] -} - - --- !query -select cast(cast('inf' as float) as timestamp) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "Infinity", - "sourceType" : "\"DOUBLE\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 46, - "fragment" : "cast(cast('inf' as float) as timestamp)" - } ] -} - - --- !query -select cast(interval '1' year as tinyint) --- !query schema -struct --- !query output -1 - - --- !query -select cast(interval '-10-2' year to month as smallint) --- !query schema -struct --- !query output --122 - - --- !query -select cast(interval '1000' month as int) --- !query schema -struct --- !query output -1000 - - --- !query -select cast(interval -'10.123456' second as tinyint) --- !query schema -struct --- !query output --10 - - --- !query -select cast(interval '23:59:59' hour to second as smallint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"INTERVAL HOUR TO SECOND\"", - "targetType" : "\"SMALLINT\"", - "value" : "INTERVAL '23:59:59' HOUR TO SECOND" - } -} - - --- !query -select cast(interval -'1 02:03:04.123' day to second as int) --- !query schema -struct --- !query output --93784 - - --- !query -select cast(interval '10' day as bigint) --- !query schema -struct --- !query output -10 - - --- !query -select cast(interval '-1000' month as tinyint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"INTERVAL MONTH\"", - "targetType" : "\"TINYINT\"", - "value" : "INTERVAL '-1000' MONTH" - } -} - - --- !query -select cast(interval '1000000' second as smallint) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"INTERVAL SECOND\"", - "targetType" : "\"SMALLINT\"", - "value" : "INTERVAL '1000000' SECOND" - } -} - - --- !query -select cast(1Y as interval year) --- !query schema -struct --- !query output -1-0 - - --- !query -select cast(-122S as interval year to month) --- !query schema -struct --- !query output --10-2 - - --- !query -select cast(ym as interval year to month) from values(-122S) as t(ym) --- !query schema -struct --- !query output --10-2 - - --- !query -select cast(1000 as interval month) --- !query schema -struct --- !query output -83-4 - - --- !query -select cast(-10L as interval second) --- !query schema -struct --- !query output --0 00:00:10.000000000 - - --- !query -select cast(100Y as interval hour to second) --- !query schema -struct --- !query output -0 00:01:40.000000000 - - --- !query -select cast(dt as interval hour to second) from values(100Y) as t(dt) --- !query schema -struct --- !query output -0 00:01:40.000000000 - - --- !query -select cast(-1000S as interval day to second) --- !query schema -struct --- !query output --0 00:16:40.000000000 - - --- !query -select cast(10 as interval day) --- !query schema -struct --- !query output -10 00:00:00.000000000 - - --- !query -select cast(2147483647 as interval year) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"INT\"", - "targetType" : "\"INTERVAL YEAR\"", - "value" : "2147483647" - } -} - - --- !query -select cast(-9223372036854775808L as interval day) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"BIGINT\"", - "targetType" : "\"INTERVAL DAY\"", - "value" : "-9223372036854775808L" - } -} - - --- !query -select cast(interval '-1' year as decimal(10, 0)) --- !query schema -struct --- !query output --1 - - --- !query -select cast(interval '1.000001' second as decimal(10, 6)) --- !query schema -struct --- !query output -1.000001 - - --- !query -select cast(interval '08:11:10.001' hour to second as decimal(10, 4)) --- !query schema -struct --- !query output -29470.0010 - - --- !query -select cast(interval '1 01:02:03.1' day to second as decimal(8, 1)) --- !query schema -struct --- !query output -90123.1 - - --- !query -select cast(interval '10.123' second as decimal(4, 2)) --- !query schema -struct --- !query output -10.12 - - --- !query -select cast(interval '10.005' second as decimal(4, 2)) --- !query schema -struct --- !query output -10.01 - - --- !query -select cast(interval '10.123' second as decimal(5, 2)) --- !query schema -struct --- !query output -10.12 - - --- !query -select cast(interval '10.123' second as decimal(1, 0)) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "1", - "scale" : "0", - "value" : "10.123000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "cast(interval '10.123' second as decimal(1, 0))" - } ] -} - - --- !query -select cast(10.123456BD as interval day to second) --- !query schema -struct --- !query output -0 00:00:10.123456000 - - --- !query -select cast(80.654321BD as interval hour to minute) --- !query schema -struct --- !query output -0 01:20:00.000000000 - - --- !query -select cast(-10.123456BD as interval year to month) --- !query schema -struct --- !query output --0-10 - - --- !query -select cast(10.654321BD as interval month) --- !query schema -struct --- !query output -0-11 - - --- !query -SELECT '1.23' :: int --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1.23'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "'1.23' :: int" - } ] -} - - --- !query -SELECT 'abc' :: int --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 19, - "fragment" : "'abc' :: int" - } ] -} - - --- !query -SELECT '12345678901234567890123' :: long --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'12345678901234567890123'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 40, - "fragment" : "'12345678901234567890123' :: long" - } ] -} - - --- !query -SELECT '' :: int --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "''", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 16, - "fragment" : "'' :: int" - } ] -} - - --- !query -SELECT NULL :: int --- !query schema -struct --- !query output -NULL - - --- !query -SELECT '123.a' :: int --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'123.a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 21, - "fragment" : "'123.a' :: int" - } ] -} - - --- !query -SELECT '-2147483648' :: int --- !query schema -struct --- !query output --2147483648 - - --- !query -SELECT HEX('abc' :: binary) --- !query schema -struct --- !query output -616263 - - --- !query -SELECT HEX((123 :: byte) :: binary) --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", - "sqlState" : "42K09", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "configVal" : "'false'", - "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", - "srcType" : "\"TINYINT\"", - "targetType" : "\"BINARY\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 34, - "fragment" : "(123 :: byte) :: binary" - } ] -} - - --- !query -SELECT 'interval 3 month 1 hour' :: interval --- !query schema -struct --- !query output -3 months 1 hours - - --- !query -SELECT interval 3 day 1 second :: string --- !query schema -struct --- !query output -INTERVAL '3 00:00:01' DAY TO SECOND - - --- !query -select ' 1 ' :: DOUBLE --- !query schema -struct --- !query output -1.0 - - --- !query -select '1.0 ' :: DEC --- !query schema -struct --- !query output -1 - - --- !query -select '\t\t true \n\r ' :: boolean --- !query schema -struct --- !query output -true - - --- !query -select '2022-01-01 00:00:00' :: timestamp --- !query schema -struct --- !query output -2022-01-01 00:00:00 - - --- !query -select interval '-10-2' year to month :: smallint --- !query schema -struct --- !query output --122 - - --- !query -select -10L :: interval second --- !query schema -struct --- !query output --0 00:00:10.000000000 - - --- !query -select interval '08:11:10.001' hour to second :: decimal(10, 4) --- !query schema -struct --- !query output -29470.0010 - - --- !query -select 10.123456BD :: interval day to second --- !query schema -struct --- !query output -0 00:00:10.123456000 - - --- !query -SELECT '1.23' :: int :: long --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1.23'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "'1.23' :: int" - } ] -} - - --- !query -SELECT '2147483648' :: long :: int --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"BIGINT\"", - "targetType" : "\"INT\"", - "value" : "2147483648L" - } -} - - --- !query -SELECT CAST('2147483648' :: long AS int) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "CAST_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "sourceType" : "\"BIGINT\"", - "targetType" : "\"INT\"", - "value" : "2147483648L" - } -} - - --- !query -SELECT map(1, '123', 2, '456')[1] :: int --- !query schema -struct --- !query output -123 - - --- !query -SELECT '2147483648' :: BINT --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "UNSUPPORTED_DATATYPE", - "sqlState" : "0A000", - "messageParameters" : { - "typeName" : "\"BINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 24, - "stopIndex" : 27, - "fragment" : "BINT" - } ] -} - - --- !query -SELECT '2147483648' :: SELECT --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "UNSUPPORTED_DATATYPE", - "sqlState" : "0A000", - "messageParameters" : { - "typeName" : "\"SELECT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 24, - "stopIndex" : 29, - "fragment" : "SELECT" - } ] -} - - --- !query -SELECT FALSE IS NOT NULL :: string --- !query schema -struct<> --- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "PARSE_SYNTAX_ERROR", - "sqlState" : "42601", - "messageParameters" : { - "error" : "'::'", - "hint" : "" - } -} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out deleted file mode 100644 index 0708a523900ff..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ /dev/null @@ -1,465 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- !query -select to_timestamp('294248', 'y') --- !query schema -struct<> --- !query output -java.lang.ArithmeticException -long overflow - - --- !query -select to_timestamp('1', 'yy') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '1' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('-12', 'yy') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '-12' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('123', 'yy') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '123' could not be parsed, unparsed text found at index 2" - } -} - - --- !query -select to_timestamp('1', 'yyy') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '1' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('1234567', 'yyyyyyy') --- !query schema -struct<> --- !query output -org.apache.spark.SparkUpgradeException -{ - "errorClass" : "INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION", - "sqlState" : "42K0B", - "messageParameters" : { - "config" : "\"spark.sql.legacy.timeParserPolicy\"", - "docroot" : "https://spark.apache.org/docs/latest", - "pattern" : "'yyyyyyy'" - } -} - - --- !query -select to_timestamp('366', 'D') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year" - } -} - - --- !query -select to_timestamp('9', 'DD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '9' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('366', 'DD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year" - } -} - - --- !query -select to_timestamp('9', 'DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '9' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('99', 'DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '99' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp('30-365', 'dd-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31." - } -} - - --- !query -select to_timestamp('11-365', 'MM-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31." - } -} - - --- !query -select to_timestamp('2019-366', 'yyyy-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year" - } -} - - --- !query -select to_timestamp('12-30-365', 'MM-dd-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31." - } -} - - --- !query -select to_timestamp('2020-01-365', 'yyyy-dd-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30" - } -} - - --- !query -select to_timestamp('2020-10-350', 'yyyy-MM-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15" - } -} - - --- !query -select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'" - } -} - - --- !query -select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) --- !query schema -struct> --- !query output -{"date":null} - - --- !query -select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" - } -} - - --- !query -select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text 'Unparseable' could not be parsed at index 0" - } -} - - --- !query -select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" - } -} - - --- !query -select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text 'Unparseable' could not be parsed at index 0" - } -} - - --- !query -select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" - } -} - - --- !query -select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text 'Unparseable' could not be parsed at index 0" - } -} - - --- !query -select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" - } -} - - --- !query -select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text 'Unparseable' could not be parsed at index 0" - } -} - - --- !query -select cast("Unparseable" as timestamp) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'Unparseable'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "cast(\"Unparseable\" as timestamp)" - } ] -} - - --- !query -select cast("Unparseable" as date) --- !query schema -struct<> --- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'Unparseable'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DATE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 34, - "fragment" : "cast(\"Unparseable\" as date)" - } ] -} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out deleted file mode 100644 index fd30ecf203028..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out +++ /dev/null @@ -1,365 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- !query -create table decimals_test(id int, a decimal(38,18), b decimal(38,18)) using parquet --- !query schema -struct<> --- !query output - - - --- !query -insert into decimals_test values(1, 100.0, 999.0), (2, 12345.123, 12345.123), - (3, 0.1234567891011, 1234.1), (4, 123456789123456789.0, 1.123456789123456789) --- !query schema -struct<> --- !query output - - - --- !query -select id, a*10, b/10 from decimals_test order by id --- !query schema -struct --- !query output -1 1000.000000000000000 99.900000000000000000 -2 123451.230000000000000 1234.512300000000000000 -3 1.234567891011000 123.410000000000000000 -4 1234567891234567890.000000000000000 0.112345678912345679 - - --- !query -select 10.3 * 3.0 --- !query schema -struct<(10.3 * 3.0):decimal(6,2)> --- !query output -30.90 - - --- !query -select 10.3000 * 3.0 --- !query schema -struct<(10.3000 * 3.0):decimal(9,5)> --- !query output -30.90000 - - --- !query -select 10.30000 * 30.0 --- !query schema -struct<(10.30000 * 30.0):decimal(11,6)> --- !query output -309.000000 - - --- !query -select 10.300000000000000000 * 3.000000000000000000 --- !query schema -struct<(10.300000000000000000 * 3.000000000000000000):decimal(38,34)> --- !query output -30.9000000000000000000000000000000000 - - --- !query -select 10.300000000000000000 * 3.0000000000000000000 --- !query schema -struct<(10.300000000000000000 * 3.0000000000000000000):decimal(38,34)> --- !query output -30.9000000000000000000000000000000000 - - --- !query -select (5e36BD + 0.1) + 5e36BD --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "1", - "value" : "10000000000000000000000000000000000000.1" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "(5e36BD + 0.1) + 5e36BD" - } ] -} - - --- !query -select (-4e36BD - 0.1) - 7e36BD --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "1", - "value" : "-11000000000000000000000000000000000000.1" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 31, - "fragment" : "(-4e36BD - 0.1) - 7e36BD" - } ] -} - - --- !query -select 12345678901234567890.0 * 12345678901234567890.0 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "2", - "value" : "152415787532388367501905199875019052100" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "12345678901234567890.0 * 12345678901234567890.0" - } ] -} - - --- !query -select 1e35BD / 0.1 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "1000000000000000000000000000000000000.000000000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 19, - "fragment" : "1e35BD / 0.1" - } ] -} - - --- !query -select 123456789123456789.1234567890 * 1.123456789123456789 --- !query schema -struct<(123456789123456789.1234567890 * 1.123456789123456789):decimal(38,18)> --- !query output -138698367904130467.654320988515622621 - - --- !query -select 123456789123456789.1234567890 * 1.123456789123456789 --- !query schema -struct<(123456789123456789.1234567890 * 1.123456789123456789):decimal(38,18)> --- !query output -138698367904130467.654320988515622621 - - --- !query -select 12345678912345.123456789123 / 0.000000012345678 --- !query schema -struct<(12345678912345.123456789123 / 1.2345678E-8):decimal(38,9)> --- !query output -1000000073899961059796.725866332 - - --- !query -select 1.0123456789012345678901234567890123456e36BD / 0.1 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "10123456789012345678901234567890123456.000000000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e36BD / 0.1" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e35BD / 1.0 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "101234567890123456789012345678901234.560000000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e35BD / 1.0" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e34BD / 1.0 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "10123456789012345678901234567890123.456000000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e34BD / 1.0" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e33BD / 1.0 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "1012345678901234567890123456789012.345600000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e33BD / 1.0" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e32BD / 1.0 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "101234567890123456789012345678901.234560000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e32BD / 1.0" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e31BD / 1.0 --- !query schema -struct<(10123456789012345678901234567890.123456 / 1.0):decimal(38,6)> --- !query output -10123456789012345678901234567890.123456 - - --- !query -select 1.0123456789012345678901234567890123456e31BD / 0.1 --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "38", - "scale" : "6", - "value" : "101234567890123456789012345678901.234560000000000000000000000000000000000" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 57, - "fragment" : "1.0123456789012345678901234567890123456e31BD / 0.1" - } ] -} - - --- !query -select 1.0123456789012345678901234567890123456e31BD / 10.0 --- !query schema -struct<(10123456789012345678901234567890.123456 / 10.0):decimal(38,6)> --- !query output -1012345678901234567890123456789.012346 - - --- !query -drop table decimals_test --- !query schema -struct<> --- !query output - diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out deleted file mode 100644 index fb60a920040e6..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/ansi/math.sql.out +++ /dev/null @@ -1,903 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- !query -SELECT round(25y, 1) --- !query schema -struct --- !query output -25 - - --- !query -SELECT round(25y, 0) --- !query schema -struct --- !query output -25 - - --- !query -SELECT round(25y, -1) --- !query schema -struct --- !query output -30 - - --- !query -SELECT round(25y, -2) --- !query schema -struct --- !query output -0 - - --- !query -SELECT round(25y, -3) --- !query schema -struct --- !query output -0 - - --- !query -SELECT round(127y, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 22, - "fragment" : "round(127y, -1)" - } ] -} - - --- !query -SELECT round(-128y, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 23, - "fragment" : "round(-128y, -1)" - } ] -} - - --- !query -SELECT round(525s, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525s, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525s, -1) --- !query schema -struct --- !query output -530 - - --- !query -SELECT round(525s, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT round(525s, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT round(32767s, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "round(32767s, -1)" - } ] -} - - --- !query -SELECT round(-32768s, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "round(-32768s, -1)" - } ] -} - - --- !query -SELECT round(525, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525, -1) --- !query schema -struct --- !query output -530 - - --- !query -SELECT round(525, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT round(525, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT round(2147483647, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "round(2147483647, -1)" - } ] -} - - --- !query -SELECT round(-2147483647, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 29, - "fragment" : "round(-2147483647, -1)" - } ] -} - - --- !query -SELECT round(525L, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525L, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT round(525L, -1) --- !query schema -struct --- !query output -530 - - --- !query -SELECT round(525L, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT round(525L, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT round(9223372036854775807L, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "round(9223372036854775807L, -1)" - } ] -} - - --- !query -SELECT round(-9223372036854775808L, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "round(-9223372036854775808L, -1)" - } ] -} - - --- !query -SELECT bround(25y, 1) --- !query schema -struct --- !query output -25 - - --- !query -SELECT bround(25y, 0) --- !query schema -struct --- !query output -25 - - --- !query -SELECT bround(25y, -1) --- !query schema -struct --- !query output -20 - - --- !query -SELECT bround(25y, -2) --- !query schema -struct --- !query output -0 - - --- !query -SELECT bround(25y, -3) --- !query schema -struct --- !query output -0 - - --- !query -SELECT bround(127y, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 23, - "fragment" : "bround(127y, -1)" - } ] -} - - --- !query -SELECT bround(-128y, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "bround(-128y, -1)" - } ] -} - - --- !query -SELECT bround(525s, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525s, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525s, -1) --- !query schema -struct --- !query output -520 - - --- !query -SELECT bround(525s, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT bround(525s, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT bround(32767s, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "bround(32767s, -1)" - } ] -} - - --- !query -SELECT bround(-32768s, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 26, - "fragment" : "bround(-32768s, -1)" - } ] -} - - --- !query -SELECT bround(525, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525, -1) --- !query schema -struct --- !query output -520 - - --- !query -SELECT bround(525, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT bround(525, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT bround(2147483647, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 29, - "fragment" : "bround(2147483647, -1)" - } ] -} - - --- !query -SELECT bround(-2147483647, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "bround(-2147483647, -1)" - } ] -} - - --- !query -SELECT bround(525L, 1) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525L, 0) --- !query schema -struct --- !query output -525 - - --- !query -SELECT bround(525L, -1) --- !query schema -struct --- !query output -520 - - --- !query -SELECT bround(525L, -2) --- !query schema -struct --- !query output -500 - - --- !query -SELECT bround(525L, -3) --- !query schema -struct --- !query output -1000 - - --- !query -SELECT bround(9223372036854775807L, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "bround(9223372036854775807L, -1)" - } ] -} - - --- !query -SELECT bround(-9223372036854775808L, -1) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 40, - "fragment" : "bround(-9223372036854775808L, -1)" - } ] -} - - --- !query -SELECT conv('100', 2, 10) --- !query schema -struct --- !query output -4 - - --- !query -SELECT conv(-10, 16, -10) --- !query schema -struct --- !query output --16 - - --- !query -SELECT conv('9223372036854775808', 10, 16) --- !query schema -struct --- !query output -8000000000000000 - - --- !query -SELECT conv('92233720368547758070', 10, 16) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow in function conv()" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "conv('92233720368547758070', 10, 16)" - } ] -} - - --- !query -SELECT conv('9223372036854775807', 36, 10) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow in function conv()" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 42, - "fragment" : "conv('9223372036854775807', 36, 10)" - } ] -} - - --- !query -SELECT conv('-9223372036854775807', 36, 10) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : "", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "Overflow in function conv()" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "conv('-9223372036854775807', 36, 10)" - } ] -} - - --- !query -SELECT BIN(0) --- !query schema -struct --- !query output -0 - - --- !query -SELECT BIN(25) --- !query schema -struct --- !query output -11001 - - --- !query -SELECT BIN(25L) --- !query schema -struct --- !query output -11001 - - --- !query -SELECT BIN(25.5) --- !query schema -struct --- !query output -11001 - - --- !query -SELECT POSITIVE(0Y) --- !query schema -struct<(+ 0):tinyint> --- !query output -0 - - --- !query -SELECT POSITIVE(25) --- !query schema -struct<(+ 25):int> --- !query output -25 - - --- !query -SELECT POSITIVE(-25L) --- !query schema -struct<(+ -25):bigint> --- !query output --25 - - --- !query -SELECT POSITIVE(25.5) --- !query schema -struct<(+ 25.5):decimal(3,1)> --- !query output -25.5 - - --- !query -SELECT POSITIVE("25.5") --- !query schema -struct<(+ 25.5):double> --- !query output -25.5 - - --- !query -SELECT POSITIVE("invalid") --- !query schema -struct<> --- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'invalid'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 26, - "fragment" : "POSITIVE(\"invalid\")" - } ] -} - - --- !query -SELECT POSITIVE(null) --- !query schema -struct<(+ NULL):double> --- !query output -NULL diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out.java21 b/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out.java21 deleted file mode 100644 index 9d3c97baecabd..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out.java21 +++ /dev/null @@ -1,415 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- !query -SELECT try_sum(col) FROM VALUES (5), (10), (15) AS tab(col) --- !query schema -struct --- !query output -30 - - --- !query -SELECT try_sum(col) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) --- !query schema -struct --- !query output -30.0 - - --- !query -SELECT try_sum(col) FROM VALUES (NULL), (10), (15) AS tab(col) --- !query schema -struct --- !query output -25 - - --- !query -SELECT try_sum(col) FROM VALUES (NULL), (NULL) AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_sum(col) FROM VALUES (9223372036854775807L), (1L) AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_sum(col) FROM VALUES (98765432109876543210987654321098765432BD), (98765432109876543210987654321098765432BD) AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_sum(col) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col) --- !query schema -struct --- !query output -0-2 - - --- !query -SELECT try_sum(col) FROM VALUES (interval '2147483647 months'), (interval '1 months') AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_sum(col) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col) --- !query schema -struct --- !query output -0 00:00:02.000000000 - - --- !query -SELECT try_sum(col) FROM VALUES (interval '106751991 DAYS'), (interval '1 DAYS') AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_sum(col / 0) FROM VALUES (5), (10), (15) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_sum(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_sum(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_sum(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 23, - "fragment" : "col + 1L" - } ] -} - - --- !query -SELECT try_sum(col / 0) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "INTERVAL_DIVIDED_BY_ZERO", - "sqlState" : "22012", - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_sum(col / 0) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "INTERVAL_DIVIDED_BY_ZERO", - "sqlState" : "22012", - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_avg(col) FROM VALUES (5), (10), (15) AS tab(col) --- !query schema -struct --- !query output -10.0 - - --- !query -SELECT try_avg(col) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) --- !query schema -struct --- !query output -10.00000 - - --- !query -SELECT try_avg(col) FROM VALUES (NULL), (10), (15) AS tab(col) --- !query schema -struct --- !query output -12.5 - - --- !query -SELECT try_avg(col) FROM VALUES (NULL), (NULL) AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_avg(col) FROM VALUES (9223372036854775807L), (1L) AS tab(col) --- !query schema -struct --- !query output -4.611686018427388E18 - - --- !query -SELECT try_avg(col) FROM VALUES (98765432109876543210987654321098765432BD), (98765432109876543210987654321098765432BD) AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_avg(col) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col) --- !query schema -struct --- !query output -0-1 - - --- !query -SELECT try_avg(col) FROM VALUES (interval '2147483647 months'), (interval '1 months') AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_avg(col) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col) --- !query schema -struct --- !query output -0 00:00:01.000000000 - - --- !query -SELECT try_avg(col) FROM VALUES (interval '106751991 DAYS'), (interval '1 DAYS') AS tab(col) --- !query schema -struct --- !query output -NULL - - --- !query -SELECT try_avg(col / 0) FROM VALUES (5), (10), (15) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_avg(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_avg(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_avg(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 23, - "fragment" : "col + 1L" - } ] -} - - --- !query -SELECT try_avg(col / 0) FROM VALUES (interval '1 months'), (interval '1 months') AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "INTERVAL_DIVIDED_BY_ZERO", - "sqlState" : "22012", - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} - - --- !query -SELECT try_avg(col / 0) FROM VALUES (interval '1 seconds'), (interval '1 seconds') AS tab(col) --- !query schema -struct<> --- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "INTERVAL_DIVIDED_BY_ZERO", - "sqlState" : "22012", - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index c1330c620acfb..7394e428091c7 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -178,17 +178,49 @@ struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "3", + "indexValue" : "5" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "element_at(array(1, 2, 3), 5)" + } ] +} -- !query select element_at(array(1, 2, 3), -5) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "3", + "indexValue" : "-5" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 37, + "fragment" : "element_at(array(1, 2, 3), -5)" + } ] +} -- !query @@ -199,32 +231,87 @@ struct<> org.apache.spark.SparkRuntimeException { "errorClass" : "INVALID_INDEX_OF_ZERO", - "sqlState" : "22003" + "sqlState" : "22003", + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "element_at(array(1, 2, 3), 0)" + } ] } -- !query select elt(4, '123', '456') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "2", + "indexValue" : "4" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "elt(4, '123', '456')" + } ] +} -- !query select elt(0, '123', '456') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "2", + "indexValue" : "0" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "elt(0, '123', '456')" + } ] +} -- !query select elt(-1, '123', '456') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "2", + "indexValue" : "-1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "elt(-1, '123', '456')" + } ] +} -- !query @@ -262,17 +349,49 @@ NULL -- !query select array(1, 2, 3)[5] -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "3", + "indexValue" : "5" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "array(1, 2, 3)[5]" + } ] +} -- !query select array(1, 2, 3)[-1] -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArrayIndexOutOfBoundsException +{ + "errorClass" : "INVALID_ARRAY_INDEX", + "sqlState" : "22003", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "arraySize" : "3", + "indexValue" : "-1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "array(1, 2, 3)[-1]" + } ] +} -- !query @@ -354,7 +473,7 @@ select size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10))) -- !query schema struct -- !query output --1 +NULL -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 738697c638832..0dbdf1d9975c9 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -2,113 +2,337 @@ -- !query SELECT CAST('1.23' AS int) -- !query schema -struct +struct<> -- !query output -1 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.23'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 26, + "fragment" : "CAST('1.23' AS int)" + } ] +} -- !query SELECT CAST('1.23' AS long) -- !query schema -struct +struct<> -- !query output -1 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.23'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "CAST('1.23' AS long)" + } ] +} -- !query SELECT CAST('-4.56' AS int) -- !query schema -struct +struct<> -- !query output --4 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'-4.56'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "CAST('-4.56' AS int)" + } ] +} -- !query SELECT CAST('-4.56' AS long) -- !query schema -struct +struct<> -- !query output --4 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'-4.56'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "CAST('-4.56' AS long)" + } ] +} -- !query SELECT CAST('abc' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "CAST('abc' AS int)" + } ] +} -- !query SELECT CAST('abc' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 26, + "fragment" : "CAST('abc' AS long)" + } ] +} -- !query SELECT CAST('abc' AS float) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"FLOAT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "CAST('abc' AS float)" + } ] +} -- !query SELECT CAST('abc' AS double) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "CAST('abc' AS double)" + } ] +} -- !query SELECT CAST('1234567890123' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1234567890123'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "CAST('1234567890123' AS int)" + } ] +} -- !query SELECT CAST('12345678901234567890123' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'12345678901234567890123'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "CAST('12345678901234567890123' AS long)" + } ] +} -- !query SELECT CAST('' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "''", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 22, + "fragment" : "CAST('' AS int)" + } ] +} -- !query SELECT CAST('' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "''", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "CAST('' AS long)" + } ] +} -- !query SELECT CAST('' AS float) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "''", + "sourceType" : "\"STRING\"", + "targetType" : "\"FLOAT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "CAST('' AS float)" + } ] +} -- !query SELECT CAST('' AS double) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "''", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "CAST('' AS double)" + } ] +} -- !query @@ -130,33 +354,97 @@ NULL -- !query SELECT CAST('123.a' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'123.a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "CAST('123.a' AS int)" + } ] +} -- !query SELECT CAST('123.a' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'123.a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "CAST('123.a' AS long)" + } ] +} -- !query SELECT CAST('123.a' AS float) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'123.a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"FLOAT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "CAST('123.a' AS float)" + } ] +} -- !query SELECT CAST('123.a' AS double) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'123.a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "CAST('123.a' AS double)" + } ] +} -- !query @@ -170,9 +458,25 @@ struct -- !query SELECT CAST('-2147483649' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'-2147483649'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "CAST('-2147483649' AS int)" + } ] +} -- !query @@ -186,9 +490,25 @@ struct -- !query SELECT CAST('2147483648' AS int) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2147483648'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "CAST('2147483648' AS int)" + } ] +} -- !query @@ -202,9 +522,25 @@ struct -- !query SELECT CAST('-9223372036854775809' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'-9223372036854775809'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "CAST('-9223372036854775809' AS long)" + } ] +} -- !query @@ -218,9 +554,25 @@ struct -- !query SELECT CAST('9223372036854775808' AS long) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'9223372036854775808'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "CAST('9223372036854775808' AS long)" + } ] +} -- !query @@ -234,65 +586,209 @@ struct -- !query SELECT HEX(CAST(CAST(123 AS byte) AS binary)) -- !query schema -struct +struct<> -- !query output -7B +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 44, + "fragment" : "CAST(CAST(123 AS byte) AS binary)" + } ] +} -- !query SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) -- !query schema -struct +struct<> -- !query output -85 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(-123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 45, + "fragment" : "CAST(CAST(-123 AS byte) AS binary)" + } ] +} -- !query SELECT HEX(CAST(123S AS binary)) -- !query schema -struct +struct<> -- !query output -007B +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"SMALLINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(123S AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123S AS binary)) -- !query schema -struct +struct<> -- !query output -FF85 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"SMALLINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 32, + "fragment" : "CAST(-123S AS binary)" + } ] +} -- !query SELECT HEX(CAST(123 AS binary)) -- !query schema -struct +struct<> -- !query output -0000007B +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 30, + "fragment" : "CAST(123 AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123 AS binary)) -- !query schema -struct +struct<> -- !query output -FFFFFF85 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(-123 AS binary)" + } ] +} -- !query SELECT HEX(CAST(123L AS binary)) -- !query schema -struct +struct<> -- !query output -000000000000007B +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(123 AS BINARY)\"", + "srcType" : "\"BIGINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 31, + "fragment" : "CAST(123L AS binary)" + } ] +} -- !query SELECT HEX(CAST(-123L AS binary)) -- !query schema -struct +struct<> -- !query output -FFFFFFFFFFFFFF85 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(-123 AS BINARY)\"", + "srcType" : "\"BIGINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 32, + "fragment" : "CAST(-123L AS binary)" + } ] +} -- !query @@ -448,41 +944,121 @@ struct -- !query select cast('1中文' as tinyint) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1中文'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TINYINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "cast('1中文' as tinyint)" + } ] +} -- !query select cast('1中文' as smallint) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1中文'", + "sourceType" : "\"STRING\"", + "targetType" : "\"SMALLINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "cast('1中文' as smallint)" + } ] +} -- !query select cast('1中文' as INT) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1中文'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "cast('1中文' as INT)" + } ] +} -- !query select cast('中文1' as bigint) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'中文1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "cast('中文1' as bigint)" + } ] +} -- !query select cast('1中文' as bigint) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1中文'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "cast('1中文' as bigint)" + } ] +} -- !query @@ -506,10 +1082,25 @@ false -- !query select cast('\t\n xyz \t\r' as boolean) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkRuntimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'\t\n xyz \t\r'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BOOLEAN\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "cast('\\t\\n xyz \\t\\r' as boolean)" + } ] +} -- !query @@ -523,17 +1114,50 @@ struct -- !query select cast('123.45' as decimal(4, 2)) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "4", + "scale" : "2", + "value" : "123.45" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "cast('123.45' as decimal(4, 2))" + } ] +} -- !query select cast('xyz' as decimal(4, 2)) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'xyz'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DECIMAL(4,2)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast('xyz' as decimal(4, 2))" + } ] +} -- !query @@ -547,9 +1171,25 @@ struct -- !query select cast('a' as date) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast('a' as date)" + } ] +} -- !query @@ -563,9 +1203,25 @@ struct -- !query select cast('a' as timestamp) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "cast('a' as timestamp)" + } ] +} -- !query @@ -579,25 +1235,73 @@ struct -- !query select cast('a' as timestamp_ntz) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP_NTZ\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast('a' as timestamp_ntz)" + } ] +} -- !query select cast(cast('inf' as double) as timestamp) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "Infinity", + "sourceType" : "\"DOUBLE\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast(cast('inf' as double) as timestamp)" + } ] +} -- !query select cast(cast('inf' as float) as timestamp) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "Infinity", + "sourceType" : "\"DOUBLE\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(cast('inf' as float) as timestamp)" + } ] +} -- !query @@ -875,7 +1579,14 @@ org.apache.spark.SparkArithmeticException "precision" : "1", "scale" : "0", "value" : "10.123000" - } + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(interval '10.123' second as decimal(1, 0))" + } ] } @@ -914,33 +1625,97 @@ struct -- !query SELECT '1.23' :: int -- !query schema -struct +struct<> -- !query output -1 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.23'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 20, + "fragment" : "'1.23' :: int" + } ] +} -- !query SELECT 'abc' :: int -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 19, + "fragment" : "'abc' :: int" + } ] +} -- !query SELECT '12345678901234567890123' :: long -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'12345678901234567890123'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "'12345678901234567890123' :: long" + } ] +} -- !query SELECT '' :: int -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "''", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'' :: int" + } ] +} -- !query @@ -954,9 +1729,25 @@ NULL -- !query SELECT '123.a' :: int -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'123.a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 21, + "fragment" : "'123.a' :: int" + } ] +} -- !query @@ -978,9 +1769,27 @@ struct -- !query SELECT HEX((123 :: byte) :: binary) -- !query schema -struct +struct<> -- !query output -7B +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(CAST(123 AS TINYINT) AS BINARY)\"", + "srcType" : "\"TINYINT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 12, + "stopIndex" : 34, + "fragment" : "(123 :: byte) :: binary" + } ] +} -- !query @@ -1067,25 +1876,59 @@ struct -- !query SELECT '1.23' :: int :: long -- !query schema -struct +struct<> -- !query output -1 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.23'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 20, + "fragment" : "'1.23' :: int" + } ] +} -- !query SELECT '2147483648' :: long :: int -- !query schema -struct +struct<> -- !query output --2147483648 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"BIGINT\"", + "targetType" : "\"INT\"", + "value" : "2147483648L" + } +} -- !query SELECT CAST('2147483648' :: long AS int) -- !query schema -struct +struct<> -- !query output --2147483648 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"BIGINT\"", + "targetType" : "\"INT\"", + "value" : "2147483648L" + } +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index c46c200ff026f..aa283d3249617 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -49,17 +49,33 @@ struct -- !query select make_date(2000, 13, 1) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + } +} -- !query select make_date(2000, 1, 33) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" + } +} -- !query @@ -184,9 +200,17 @@ struct -- !query select to_date("02-29", "MM-dd") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Invalid date 'February 29' as '1970' is not a leap year" + } +} -- !query @@ -242,9 +266,16 @@ struct -- !query select next_day("2015-07-23", "xx") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkIllegalArgumentException +{ + "errorClass" : "ILLEGAL_DAY_OF_WEEK", + "sqlState" : "22009", + "messageParameters" : { + "string" : "xx" + } +} -- !query @@ -274,9 +305,25 @@ struct -- !query select next_day("xx", "Mon") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'xx'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "next_day(\"xx\", \"Mon\")" + } ] +} -- !query @@ -418,13 +465,22 @@ select date_add('2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.SparkNumberFormatException { - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "functionName" : "date_add" - } + "expression" : "'1.2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "date_add('2011-11-11', '1.2')" + } ] } @@ -583,13 +639,22 @@ select date_sub(date'2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.SparkNumberFormatException { - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "functionName" : "date_sub" - } + "expression" : "'1.2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "date_sub(date'2011-11-11', '1.2')" + } ] } @@ -628,53 +693,17 @@ struct -- !query select date_add('2011-11-11', int_str) from date_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_add('2011-11-11', int_str)" - } ] -} +2011-11-12 -- !query select date_sub('2011-11-11', int_str) from date_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_sub('2011-11-11', int_str)" - } ] -} +2011-11-10 -- !query @@ -754,27 +783,9 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day> -- !query select date '2001-10-01' - '2001-09-28' -- !query schema -struct<> +struct<(DATE '2001-10-01' - 2001-09-28):interval day> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2001-09-28\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "date '2001-10-01' - '2001-09-28'" - } ] -} +3 00:00:00.000000000 -- !query @@ -812,27 +823,9 @@ struct<(date_str - DATE '2001-09-28'):interval day> -- !query select date '2001-09-28' - date_str from date_view -- !query schema -struct<> +struct<(DATE '2001-09-28' - date_str):interval day> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"date_str\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "date '2001-09-28' - date_str" - } ] -} +-3696 00:00:00.000000000 -- !query @@ -846,7 +839,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -871,11 +864,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"DATE '2011-11-11'\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" + "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 7353df600dd4f..5635196efc2e5 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -49,17 +49,33 @@ struct -- !query select make_date(2000, 13, 1) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + } +} -- !query select make_date(2000, 1, 33) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" + } +} -- !query @@ -184,9 +200,17 @@ struct -- !query select to_date("02-29", "MM-dd") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"02-29\"" + } +} -- !query @@ -242,9 +266,16 @@ struct -- !query select next_day("2015-07-23", "xx") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkIllegalArgumentException +{ + "errorClass" : "ILLEGAL_DAY_OF_WEEK", + "sqlState" : "22009", + "messageParameters" : { + "string" : "xx" + } +} -- !query @@ -274,9 +305,25 @@ struct -- !query select next_day("xx", "Mon") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'xx'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "next_day(\"xx\", \"Mon\")" + } ] +} -- !query @@ -418,13 +465,22 @@ select date_add('2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.SparkNumberFormatException { - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "functionName" : "date_add" - } + "expression" : "'1.2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "date_add('2011-11-11', '1.2')" + } ] } @@ -583,13 +639,22 @@ select date_sub(date'2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.SparkNumberFormatException { - "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - "sqlState" : "22023", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "functionName" : "date_sub" - } + "expression" : "'1.2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "date_sub(date'2011-11-11', '1.2')" + } ] } @@ -628,53 +693,17 @@ struct -- !query select date_add('2011-11-11', int_str) from date_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_add('2011-11-11', int_str)" - } ] -} +2011-11-12 -- !query select date_sub('2011-11-11', int_str) from date_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"int_str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 38, - "fragment" : "date_sub('2011-11-11', int_str)" - } ] -} +2011-11-10 -- !query @@ -754,27 +783,9 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day> -- !query select date '2001-10-01' - '2001-09-28' -- !query schema -struct<> +struct<(DATE '2001-10-01' - 2001-09-28):interval day> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2001-09-28\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 39, - "fragment" : "date '2001-10-01' - '2001-09-28'" - } ] -} +3 00:00:00.000000000 -- !query @@ -812,27 +823,9 @@ struct<(date_str - DATE '2001-09-28'):interval day> -- !query select date '2001-09-28' - date_str from date_view -- !query schema -struct<> +struct<(DATE '2001-09-28' - date_str):interval day> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"date_str\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "date '2001-09-28' - date_str" - } ] -} +-3696 00:00:00.000000000 -- !query @@ -846,7 +839,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -871,11 +864,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"DATE '2011-11-11'\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" + "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, "queryContext" : [ { "objectType" : "", @@ -1316,9 +1309,16 @@ struct -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "INVALID_FRACTION_OF_SECOND", + "sqlState" : "22023", + "messageParameters" : { + "secAndMicros" : "60.007" + } +} -- !query @@ -1340,9 +1340,17 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + } +} -- !query @@ -1364,17 +1372,33 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + } +} -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + } +} -- !query @@ -1554,169 +1578,337 @@ struct -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.0\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.1\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.12\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.123UTC\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.12345CST\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.123456PST\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.1234567PST\"" + } +} -- !query select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"123456 2019-10-06 10:11:12.123456PST\"" + } +} -- !query select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"223456 2019-10-06 10:11:12.123456PST\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.1234\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.123\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12\"" + } +} -- !query select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11:12.12\"" + } +} -- !query select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 10:11\"" + } +} -- !query select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06S10:11:12.12345\"" + } +} -- !query select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"12.12342019-10-06S10:11\"" + } +} -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"12.1232019-10-06S10:11\"" + } +} -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"12.1232019-10-06S10:11\"" + } +} -- !query select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"12.1234019-10-06S10:11\"" + } +} -- !query @@ -1778,9 +1970,17 @@ struct -- !query select to_timestamp("02-29", "MM-dd") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"02-29\"" + } +} -- !query @@ -1826,53 +2026,17 @@ struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query schema -struct<> +struct<(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:10\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" - } ] -} +0 00:00:01.000000000 -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query schema -struct<> +struct<(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10'):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:11\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" - } ] -} +0 00:00:01.000000000 -- !query @@ -1902,53 +2066,17 @@ struct<> -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query schema -struct<> +struct<(str - TIMESTAMP '2011-11-11 11:11:11'):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "str - timestamp'2011-11-11 11:11:11'" - } ] -} +0 00:00:00.000000000 -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query schema -struct<> +struct<(TIMESTAMP '2011-11-11 11:11:11' - str):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "timestamp'2011-11-11 11:11:11' - str" - } ] -} +0 00:00:00.000000000 -- !query @@ -1958,11 +2086,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -1982,11 +2110,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { @@ -2073,9 +2201,17 @@ struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Unparseable date: \"2019-10-06 A\"" + } +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index fffbb2a4e017f..0708a523900ff 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -11,33 +11,65 @@ long overflow -- !query select to_timestamp('1', 'yy') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '1' could not be parsed at index 0" + } +} -- !query select to_timestamp('-12', 'yy') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '-12' could not be parsed at index 0" + } +} -- !query select to_timestamp('123', 'yy') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '123' could not be parsed, unparsed text found at index 2" + } +} -- !query select to_timestamp('1', 'yyy') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '1' could not be parsed at index 0" + } +} -- !query @@ -60,97 +92,193 @@ org.apache.spark.SparkUpgradeException -- !query select to_timestamp('366', 'D') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year" + } +} -- !query select to_timestamp('9', 'DD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '9' could not be parsed at index 0" + } +} -- !query select to_timestamp('366', 'DD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Invalid date 'DayOfYear 366' as '1970' is not a leap year" + } +} -- !query select to_timestamp('9', 'DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '9' could not be parsed at index 0" + } +} -- !query select to_timestamp('99', 'DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '99' could not be parsed at index 0" + } +} -- !query select to_timestamp('30-365', 'dd-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31." + } +} -- !query select to_timestamp('11-365', 'MM-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31." + } +} -- !query select to_timestamp('2019-366', 'yyyy-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year" + } +} -- !query select to_timestamp('12-30-365', 'MM-dd-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31." + } +} -- !query select to_timestamp('2020-01-365', 'yyyy-dd-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30" + } +} -- !query select to_timestamp('2020-10-350', 'yyyy-MM-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15" + } +} -- !query select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'" + } +} -- !query @@ -164,78 +292,174 @@ struct> -- !query select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" + } +} -- !query select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'Unparseable' could not be parsed at index 0" + } +} -- !query select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" + } +} -- !query select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'Unparseable' could not be parsed at index 0" + } +} -- !query select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" + } +} -- !query select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'Unparseable' could not be parsed at index 0" + } +} -- !query select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2020-01-27T20:06:11.847' could not be parsed at index 10" + } +} -- !query select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'Unparseable' could not be parsed at index 0" + } +} -- !query select cast("Unparseable" as timestamp) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'Unparseable'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "cast(\"Unparseable\" as timestamp)" + } ] +} -- !query select cast("Unparseable" as date) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'Unparseable'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(\"Unparseable\" as date)" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out index 42e603981848e..cb52778c420ae 100644 --- a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out @@ -10,25 +10,67 @@ struct<> -- !query select a / b from t -- !query schema -struct<(a / b):decimal(8,6)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 12, + "fragment" : "a / b" + } ] +} -- !query select a % b from t -- !query schema -struct<(a % b):decimal(1,1)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 12, + "fragment" : "a % b" + } ] +} -- !query select pmod(a, b) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "pmod(a, b)" + } ] +} -- !query @@ -121,41 +163,126 @@ struct<(2.35E10 * 1.0):double> -- !query select (5e36BD + 0.1) + 5e36BD -- !query schema -struct<((5000000000000000000000000000000000000 + 0.1) + 5000000000000000000000000000000000000):decimal(38,1)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "1", + "value" : "10000000000000000000000000000000000000.1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "(5e36BD + 0.1) + 5e36BD" + } ] +} -- !query select (-4e36BD - 0.1) - 7e36BD -- !query schema -struct<((-4000000000000000000000000000000000000 - 0.1) - 7000000000000000000000000000000000000):decimal(38,1)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "1", + "value" : "-11000000000000000000000000000000000000.1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "(-4e36BD - 0.1) - 7e36BD" + } ] +} -- !query select 12345678901234567890.0 * 12345678901234567890.0 -- !query schema -struct<(12345678901234567890.0 * 12345678901234567890.0):decimal(38,2)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "2", + "value" : "152415787532388367501905199875019052100" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "12345678901234567890.0 * 12345678901234567890.0" + } ] +} -- !query select 1e35BD / 0.1 -- !query schema -struct<(100000000000000000000000000000000000 / 0.1):decimal(38,6)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "6", + "value" : "1000000000000000000000000000000000000.000000000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 19, + "fragment" : "1e35BD / 0.1" + } ] +} -- !query select 1.2345678901234567890E30BD * 1.2345678901234567890E25BD -- !query schema -struct<(1234567890123456789000000000000 * 12345678901234567890000000):decimal(38,0)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "0", + "value" : "15241578753238836750190519987501905210000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "1.2345678901234567890E30BD * 1.2345678901234567890E25BD" + } ] +} -- !query @@ -268,14 +395,28 @@ spark.sql.decimalOperations.allowPrecisionLoss false -- !query -select id, a+b, a-b, a*b, a/b from decimals_test order by id +select /*+ COALESCE(1) */ id, a+b, a-b, a*b, a/b from decimals_test order by id -- !query schema -struct +struct<> -- !query output -1 1099.000000000000000000 -899.000000000000000000 NULL 0.100100100100100100 -2 24690.246000000000000000 0.000000000000000000 NULL 1.000000000000000000 -3 1234.223456789101100000 -1233.976543210898900000 NULL 0.000100037913541123 -4 123456789123456790.123456789123456789 123456789123456787.876543210876543211 NULL 109890109097814272.043109406191131436 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "36", + "value" : "152.358023429667510000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 41, + "stopIndex" : 43, + "fragment" : "a*b" + } ] +} -- !query @@ -324,9 +465,26 @@ struct<(10.300000000000000000 * 3.000000000000000000):decimal(38,36)> -- !query select 10.300000000000000000 * 3.0000000000000000000 -- !query schema -struct<(10.300000000000000000 * 3.0000000000000000000):decimal(38,37)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "37", + "value" : "30.9000000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 52, + "fragment" : "10.300000000000000000 * 3.0000000000000000000" + } ] +} -- !query @@ -340,81 +498,251 @@ struct<(2.35E10 * 1.0):double> -- !query select (5e36BD + 0.1) + 5e36BD -- !query schema -struct<((5000000000000000000000000000000000000 + 0.1) + 5000000000000000000000000000000000000):decimal(38,1)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "1", + "value" : "10000000000000000000000000000000000000.1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "(5e36BD + 0.1) + 5e36BD" + } ] +} -- !query select (-4e36BD - 0.1) - 7e36BD -- !query schema -struct<((-4000000000000000000000000000000000000 - 0.1) - 7000000000000000000000000000000000000):decimal(38,1)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "1", + "value" : "-11000000000000000000000000000000000000.1" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "(-4e36BD - 0.1) - 7e36BD" + } ] +} -- !query select 12345678901234567890.0 * 12345678901234567890.0 -- !query schema -struct<(12345678901234567890.0 * 12345678901234567890.0):decimal(38,2)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "2", + "value" : "152415787532388367501905199875019052100" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "12345678901234567890.0 * 12345678901234567890.0" + } ] +} -- !query select 1e35BD / 0.1 -- !query schema -struct<(100000000000000000000000000000000000 / 0.1):decimal(38,3)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "3", + "value" : "1000000000000000000000000000000000000.000000000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 19, + "fragment" : "1e35BD / 0.1" + } ] +} -- !query select 1.2345678901234567890E30BD * 1.2345678901234567890E25BD -- !query schema -struct<(1234567890123456789000000000000 * 12345678901234567890000000):decimal(38,0)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "0", + "value" : "15241578753238836750190519987501905210000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "1.2345678901234567890E30BD * 1.2345678901234567890E25BD" + } ] +} -- !query select 12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345 -- !query schema -struct<(12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345):decimal(38,7)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "7", + "value" : "10012345678912345678912345678911.2469067" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 84, + "fragment" : "12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345" + } ] +} -- !query select 123456789123456789.1234567890 * 1.123456789123456789 -- !query schema -struct<(123456789123456789.1234567890 * 1.123456789123456789):decimal(38,28)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "28", + "value" : "138698367904130467.654320988515622620750" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "123456789123456789.1234567890 * 1.123456789123456789" + } ] +} -- !query select 12345678912345.123456789123 / 0.000000012345678 -- !query schema -struct<(12345678912345.123456789123 / 1.2345678E-8):decimal(38,18)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "18", + "value" : "1000000073899961059796.725866331521039184725213147467478092333" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "12345678912345.123456789123 / 0.000000012345678" + } ] +} -- !query select 1.0123456789012345678901234567890123456e36BD / 0.1 -- !query schema -struct<(1012345678901234567890123456789012345.6 / 0.1):decimal(38,2)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "2", + "value" : "10123456789012345678901234567890123456.000000000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "1.0123456789012345678901234567890123456e36BD / 0.1" + } ] +} -- !query select 1.0123456789012345678901234567890123456e35BD / 1.0 -- !query schema -struct<(101234567890123456789012345678901234.56 / 1.0):decimal(38,3)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "3", + "value" : "101234567890123456789012345678901234.560000000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "1.0123456789012345678901234567890123456e35BD / 1.0" + } ] +} -- !query @@ -452,9 +780,26 @@ struct<(10123456789012345678901234567890.123456 / 1.0):decimal(38,6)> -- !query select 1.0123456789012345678901234567890123456e31BD / 0.1 -- !query schema -struct<(10123456789012345678901234567890.123456 / 0.1):decimal(38,6)> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "38", + "scale" : "6", + "value" : "101234567890123456789012345678901.234560000000000000000000000000000000000" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "1.0123456789012345678901234567890123456e31BD / 0.1" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-all-mosha.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-all-mosha.sql.out index 9bc5fe0cc379e..d6735f620a638 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-all-mosha.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-all-mosha.sql.out @@ -44,11 +44,25 @@ struct SELECT i + 1, f / i, substring(s, 2, 3), extract(year from t), d / 2, size(a) FROM stuff GROUP BY ALL ORDER BY 1, 3, 4, 5, 6, 2 -- !query schema -struct<(i + 1):int,(f / i):decimal(17,15),substring(s, 2, 3):string,extract(year FROM t):int,(d / 2):double,size(a):int> +struct<> -- !query output -43 0.232142857142857 ell 1970 6.685 3 -43 0.318333333333333 est 1970 6.17283945E8 3 -1338 0.000923335826477 h n 2000 21.0 3 +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'42.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 64, + "stopIndex" : 68, + "fragment" : "d / 2" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index d8a9f4c2e11f5..633133ad7e4d6 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -626,27 +626,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT every("true") -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"true\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"BOOLEAN\"", - "sqlExpr" : "\"every(true)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "every(\"true\")" - } ] -} +true -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index a8a0423bdb3e0..4e012df792dea 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -123,33 +123,97 @@ struct<(INTERVAL '2' YEAR / 2):interval year to month> -- !query select interval 2 second * 'a' -- !query schema -struct<(INTERVAL '02' SECOND * a):interval day to second> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval 2 second * 'a'" + } ] +} -- !query select interval 2 second / 'a' -- !query schema -struct<(INTERVAL '02' SECOND / a):interval day to second> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval 2 second / 'a'" + } ] +} -- !query select interval 2 year * 'a' -- !query schema -struct<(INTERVAL '2' YEAR * a):interval year to month> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "interval 2 year * 'a'" + } ] +} -- !query select interval 2 year / 'a' -- !query schema -struct<(INTERVAL '2' YEAR / a):interval year to month> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "interval 2 year / 'a'" + } ] +} -- !query @@ -171,17 +235,49 @@ struct<(INTERVAL '2' YEAR * 2):interval year to month> -- !query select 'a' * interval 2 second -- !query schema -struct<(INTERVAL '02' SECOND * a):interval day to second> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "'a' * interval 2 second" + } ] +} -- !query select 'a' * interval 2 year -- !query schema -struct<(INTERVAL '2' YEAR * a):interval year to month> +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "'a' * interval 2 year" + } ] +} -- !query @@ -773,9 +869,26 @@ struct -- !query select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "18", + "scale" : "6", + "value" : "1234567890123456789" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)" + } ] +} -- !query @@ -1877,17 +1990,49 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select '4 11:11' - interval '4 22:12' day to minute -- !query schema -struct<4 11:11 - INTERVAL '4 22:12' DAY TO MINUTE:string> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'4 11:11'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "'4 11:11' - interval '4 22:12' day to minute" + } ] +} -- !query select '4 12:12:12' + interval '4 22:12' day to minute -- !query schema -struct<4 12:12:12 + INTERVAL '4 22:12' DAY TO MINUTE:string> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'4 12:12:12'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "'4 12:12:12' + interval '4 22:12' day to minute" + } ] +} -- !query @@ -1949,17 +2094,49 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select str - interval '4 22:12' day to minute from interval_view -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "str - interval '4 22:12' day to minute" + } ] +} -- !query select str + interval '4 22:12' day to minute from interval_view -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "str + interval '4 22:12' day to minute" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/math.sql.out b/sql/core/src/test/resources/sql-tests/results/math.sql.out index 09f4383933288..fb60a920040e6 100644 --- a/sql/core/src/test/resources/sql-tests/results/math.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/math.sql.out @@ -42,17 +42,49 @@ struct -- !query SELECT round(127y, -1) -- !query schema -struct --- !query output --126 +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 22, + "fragment" : "round(127y, -1)" + } ] +} -- !query SELECT round(-128y, -1) -- !query schema -struct +struct<> -- !query output -126 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "round(-128y, -1)" + } ] +} -- !query @@ -98,17 +130,49 @@ struct -- !query SELECT round(32767s, -1) -- !query schema -struct +struct<> -- !query output --32766 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "round(32767s, -1)" + } ] +} -- !query SELECT round(-32768s, -1) -- !query schema -struct +struct<> -- !query output -32766 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "round(-32768s, -1)" + } ] +} -- !query @@ -154,17 +218,49 @@ struct -- !query SELECT round(2147483647, -1) -- !query schema -struct +struct<> -- !query output --2147483646 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "round(2147483647, -1)" + } ] +} -- !query SELECT round(-2147483647, -1) -- !query schema -struct +struct<> -- !query output -2147483646 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "round(-2147483647, -1)" + } ] +} -- !query @@ -210,17 +306,49 @@ struct -- !query SELECT round(9223372036854775807L, -1) -- !query schema -struct +struct<> -- !query output --9223372036854775806 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "round(9223372036854775807L, -1)" + } ] +} -- !query SELECT round(-9223372036854775808L, -1) -- !query schema -struct +struct<> -- !query output -9223372036854775806 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "round(-9223372036854775808L, -1)" + } ] +} -- !query @@ -266,17 +394,49 @@ struct -- !query SELECT bround(127y, -1) -- !query schema -struct +struct<> -- !query output --126 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "bround(127y, -1)" + } ] +} -- !query SELECT bround(-128y, -1) -- !query schema -struct +struct<> -- !query output -126 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "bround(-128y, -1)" + } ] +} -- !query @@ -322,17 +482,49 @@ struct -- !query SELECT bround(32767s, -1) -- !query schema -struct +struct<> -- !query output --32766 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "bround(32767s, -1)" + } ] +} -- !query SELECT bround(-32768s, -1) -- !query schema -struct +struct<> -- !query output -32766 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 26, + "fragment" : "bround(-32768s, -1)" + } ] +} -- !query @@ -378,17 +570,49 @@ struct -- !query SELECT bround(2147483647, -1) -- !query schema -struct +struct<> -- !query output --2147483646 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "bround(2147483647, -1)" + } ] +} -- !query SELECT bround(-2147483647, -1) -- !query schema -struct +struct<> -- !query output -2147483646 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "bround(-2147483647, -1)" + } ] +} -- !query @@ -434,17 +658,49 @@ struct -- !query SELECT bround(9223372036854775807L, -1) -- !query schema -struct +struct<> -- !query output --9223372036854775806 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "bround(9223372036854775807L, -1)" + } ] +} -- !query SELECT bround(-9223372036854775808L, -1) -- !query schema -struct +struct<> -- !query output -9223372036854775806 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "bround(-9223372036854775808L, -1)" + } ] +} -- !query @@ -474,25 +730,73 @@ struct -- !query SELECT conv('92233720368547758070', 10, 16) -- !query schema -struct +struct<> -- !query output -FFFFFFFFFFFFFFFF +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow in function conv()" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "conv('92233720368547758070', 10, 16)" + } ] +} -- !query SELECT conv('9223372036854775807', 36, 10) -- !query schema -struct +struct<> -- !query output -18446744073709551615 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow in function conv()" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "conv('9223372036854775807', 36, 10)" + } ] +} -- !query SELECT conv('-9223372036854775807', 36, 10) -- !query schema -struct +struct<> -- !query output -18446744073709551615 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : "", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "Overflow in function conv()" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "conv('-9223372036854775807', 36, 10)" + } ] +} -- !query @@ -570,9 +874,25 @@ struct<(+ 25.5):double> -- !query SELECT POSITIVE("invalid") -- !query schema -struct<(+ invalid):double> --- !query output -NULL +struct<> +-- !query output +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'invalid'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 26, + "fragment" : "POSITIVE(\"invalid\")" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/array.sql.out similarity index 85% rename from sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/array.sql.out index 7394e428091c7..c1330c620acfb 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/array.sql.out @@ -178,49 +178,17 @@ struct +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "3", - "indexValue" : "5" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 36, - "fragment" : "element_at(array(1, 2, 3), 5)" - } ] -} +NULL -- !query select element_at(array(1, 2, 3), -5) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "3", - "indexValue" : "-5" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 37, - "fragment" : "element_at(array(1, 2, 3), -5)" - } ] -} +NULL -- !query @@ -231,87 +199,32 @@ struct<> org.apache.spark.SparkRuntimeException { "errorClass" : "INVALID_INDEX_OF_ZERO", - "sqlState" : "22003", - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 36, - "fragment" : "element_at(array(1, 2, 3), 0)" - } ] + "sqlState" : "22003" } -- !query select elt(4, '123', '456') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "2", - "indexValue" : "4" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "elt(4, '123', '456')" - } ] -} +NULL -- !query select elt(0, '123', '456') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "2", - "indexValue" : "0" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 27, - "fragment" : "elt(0, '123', '456')" - } ] -} +NULL -- !query select elt(-1, '123', '456') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "2", - "indexValue" : "-1" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "elt(-1, '123', '456')" - } ] -} +NULL -- !query @@ -349,49 +262,17 @@ NULL -- !query select array(1, 2, 3)[5] -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "3", - "indexValue" : "5" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "array(1, 2, 3)[5]" - } ] -} +NULL -- !query select array(1, 2, 3)[-1] -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -{ - "errorClass" : "INVALID_ARRAY_INDEX", - "sqlState" : "22003", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "arraySize" : "3", - "indexValue" : "-1" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 25, - "fragment" : "array(1, 2, 3)[-1]" - } ] -} +NULL -- !query @@ -473,7 +354,7 @@ select size(arrays_zip(array(1, 2, 3), array(4), null, array(7, 8, 9, 10))) -- !query schema struct -- !query output -NULL +-1 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/nonansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/cast.sql.out new file mode 100644 index 0000000000000..738697c638832 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/cast.sql.out @@ -0,0 +1,1156 @@ +-- Automatically generated by SQLQueryTestSuite +-- !query +SELECT CAST('1.23' AS int) +-- !query schema +struct +-- !query output +1 + + +-- !query +SELECT CAST('1.23' AS long) +-- !query schema +struct +-- !query output +1 + + +-- !query +SELECT CAST('-4.56' AS int) +-- !query schema +struct +-- !query output +-4 + + +-- !query +SELECT CAST('-4.56' AS long) +-- !query schema +struct +-- !query output +-4 + + +-- !query +SELECT CAST('abc' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('abc' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('abc' AS float) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('abc' AS double) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('1234567890123' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('12345678901234567890123' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('' AS float) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('' AS double) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST(NULL AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST(NULL AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('123.a' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('123.a' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('123.a' AS float) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('123.a' AS double) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('-2147483648' AS int) +-- !query schema +struct +-- !query output +-2147483648 + + +-- !query +SELECT CAST('-2147483649' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('2147483647' AS int) +-- !query schema +struct +-- !query output +2147483647 + + +-- !query +SELECT CAST('2147483648' AS int) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('-9223372036854775808' AS long) +-- !query schema +struct +-- !query output +-9223372036854775808 + + +-- !query +SELECT CAST('-9223372036854775809' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT CAST('9223372036854775807' AS long) +-- !query schema +struct +-- !query output +9223372036854775807 + + +-- !query +SELECT CAST('9223372036854775808' AS long) +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT HEX(CAST('abc' AS binary)) +-- !query schema +struct +-- !query output +616263 + + +-- !query +SELECT HEX(CAST(CAST(123 AS byte) AS binary)) +-- !query schema +struct +-- !query output +7B + + +-- !query +SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) +-- !query schema +struct +-- !query output +85 + + +-- !query +SELECT HEX(CAST(123S AS binary)) +-- !query schema +struct +-- !query output +007B + + +-- !query +SELECT HEX(CAST(-123S AS binary)) +-- !query schema +struct +-- !query output +FF85 + + +-- !query +SELECT HEX(CAST(123 AS binary)) +-- !query schema +struct +-- !query output +0000007B + + +-- !query +SELECT HEX(CAST(-123 AS binary)) +-- !query schema +struct +-- !query output +FFFFFF85 + + +-- !query +SELECT HEX(CAST(123L AS binary)) +-- !query schema +struct +-- !query output +000000000000007B + + +-- !query +SELECT HEX(CAST(-123L AS binary)) +-- !query schema +struct +-- !query output +FFFFFFFFFFFFFF85 + + +-- !query +DESC FUNCTION boolean +-- !query schema +struct +-- !query output +Class: org.apache.spark.sql.catalyst.expressions.Cast +Function: boolean +Usage: boolean(expr) - Casts the value `expr` to the target data type `boolean`. + + +-- !query +DESC FUNCTION EXTENDED boolean +-- !query schema +struct +-- !query output +Class: org.apache.spark.sql.catalyst.expressions.Cast +Extended Usage: + No example/argument for boolean. + + Since: 2.0.1 + +Function: boolean +Usage: boolean(expr) - Casts the value `expr` to the target data type `boolean`. + + +-- !query +SELECT CAST('interval 3 month 1 hour' AS interval) +-- !query schema +struct +-- !query output +3 months 1 hours + + +-- !query +SELECT CAST("interval '3-1' year to month" AS interval year to month) +-- !query schema +struct +-- !query output +3-1 + + +-- !query +SELECT CAST("interval '3 00:00:01' day to second" AS interval day to second) +-- !query schema +struct +-- !query output +3 00:00:01.000000000 + + +-- !query +SELECT CAST(interval 3 month 1 hour AS string) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "_LEGACY_ERROR_TEMP_0029", + "messageParameters" : { + "literal" : "interval 3 month 1 hour" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 13, + "stopIndex" : 35, + "fragment" : "interval 3 month 1 hour" + } ] +} + + +-- !query +SELECT CAST(interval 3 year 1 month AS string) +-- !query schema +struct +-- !query output +INTERVAL '3-1' YEAR TO MONTH + + +-- !query +SELECT CAST(interval 3 day 1 second AS string) +-- !query schema +struct +-- !query output +INTERVAL '3 00:00:01' DAY TO SECOND + + +-- !query +select cast(' 1' as tinyint) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(' 1\t' as tinyint) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(' 1' as smallint) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(' 1' as INT) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(' 1' as bigint) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(' 1' as float) +-- !query schema +struct +-- !query output +1.0 + + +-- !query +select cast(' 1 ' as DOUBLE) +-- !query schema +struct +-- !query output +1.0 + + +-- !query +select cast('1.0 ' as DEC) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast('1中文' as tinyint) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('1中文' as smallint) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('1中文' as INT) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('中文1' as bigint) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('1中文' as bigint) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('\t\t true \n\r ' as boolean) +-- !query schema +struct +-- !query output +true + + +-- !query +select cast('\t\n false \t\r' as boolean) +-- !query schema +struct +-- !query output +false + + +-- !query +select cast('\t\n xyz \t\r' as boolean) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('23.45' as decimal(4, 2)) +-- !query schema +struct +-- !query output +23.45 + + +-- !query +select cast('123.45' as decimal(4, 2)) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('xyz' as decimal(4, 2)) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('2022-01-01' as date) +-- !query schema +struct +-- !query output +2022-01-01 + + +-- !query +select cast('a' as date) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('2022-01-01 00:00:00' as timestamp) +-- !query schema +struct +-- !query output +2022-01-01 00:00:00 + + +-- !query +select cast('a' as timestamp) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast('2022-01-01 00:00:00' as timestamp_ntz) +-- !query schema +struct +-- !query output +2022-01-01 00:00:00 + + +-- !query +select cast('a' as timestamp_ntz) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast(cast('inf' as double) as timestamp) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast(cast('inf' as float) as timestamp) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast(interval '1' year as tinyint) +-- !query schema +struct +-- !query output +1 + + +-- !query +select cast(interval '-10-2' year to month as smallint) +-- !query schema +struct +-- !query output +-122 + + +-- !query +select cast(interval '1000' month as int) +-- !query schema +struct +-- !query output +1000 + + +-- !query +select cast(interval -'10.123456' second as tinyint) +-- !query schema +struct +-- !query output +-10 + + +-- !query +select cast(interval '23:59:59' hour to second as smallint) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"INTERVAL HOUR TO SECOND\"", + "targetType" : "\"SMALLINT\"", + "value" : "INTERVAL '23:59:59' HOUR TO SECOND" + } +} + + +-- !query +select cast(interval -'1 02:03:04.123' day to second as int) +-- !query schema +struct +-- !query output +-93784 + + +-- !query +select cast(interval '10' day as bigint) +-- !query schema +struct +-- !query output +10 + + +-- !query +select cast(interval '-1000' month as tinyint) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"INTERVAL MONTH\"", + "targetType" : "\"TINYINT\"", + "value" : "INTERVAL '-1000' MONTH" + } +} + + +-- !query +select cast(interval '1000000' second as smallint) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"INTERVAL SECOND\"", + "targetType" : "\"SMALLINT\"", + "value" : "INTERVAL '1000000' SECOND" + } +} + + +-- !query +select cast(1Y as interval year) +-- !query schema +struct +-- !query output +1-0 + + +-- !query +select cast(-122S as interval year to month) +-- !query schema +struct +-- !query output +-10-2 + + +-- !query +select cast(ym as interval year to month) from values(-122S) as t(ym) +-- !query schema +struct +-- !query output +-10-2 + + +-- !query +select cast(1000 as interval month) +-- !query schema +struct +-- !query output +83-4 + + +-- !query +select cast(-10L as interval second) +-- !query schema +struct +-- !query output +-0 00:00:10.000000000 + + +-- !query +select cast(100Y as interval hour to second) +-- !query schema +struct +-- !query output +0 00:01:40.000000000 + + +-- !query +select cast(dt as interval hour to second) from values(100Y) as t(dt) +-- !query schema +struct +-- !query output +0 00:01:40.000000000 + + +-- !query +select cast(-1000S as interval day to second) +-- !query schema +struct +-- !query output +-0 00:16:40.000000000 + + +-- !query +select cast(10 as interval day) +-- !query schema +struct +-- !query output +10 00:00:00.000000000 + + +-- !query +select cast(2147483647 as interval year) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"INT\"", + "targetType" : "\"INTERVAL YEAR\"", + "value" : "2147483647" + } +} + + +-- !query +select cast(-9223372036854775808L as interval day) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "CAST_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "sourceType" : "\"BIGINT\"", + "targetType" : "\"INTERVAL DAY\"", + "value" : "-9223372036854775808L" + } +} + + +-- !query +select cast(interval '-1' year as decimal(10, 0)) +-- !query schema +struct +-- !query output +-1 + + +-- !query +select cast(interval '1.000001' second as decimal(10, 6)) +-- !query schema +struct +-- !query output +1.000001 + + +-- !query +select cast(interval '08:11:10.001' hour to second as decimal(10, 4)) +-- !query schema +struct +-- !query output +29470.0010 + + +-- !query +select cast(interval '1 01:02:03.1' day to second as decimal(8, 1)) +-- !query schema +struct +-- !query output +90123.1 + + +-- !query +select cast(interval '10.123' second as decimal(4, 2)) +-- !query schema +struct +-- !query output +10.12 + + +-- !query +select cast(interval '10.005' second as decimal(4, 2)) +-- !query schema +struct +-- !query output +10.01 + + +-- !query +select cast(interval '10.123' second as decimal(5, 2)) +-- !query schema +struct +-- !query output +10.12 + + +-- !query +select cast(interval '10.123' second as decimal(1, 0)) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", + "sqlState" : "22003", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "precision" : "1", + "scale" : "0", + "value" : "10.123000" + } +} + + +-- !query +select cast(10.123456BD as interval day to second) +-- !query schema +struct +-- !query output +0 00:00:10.123456000 + + +-- !query +select cast(80.654321BD as interval hour to minute) +-- !query schema +struct +-- !query output +0 01:20:00.000000000 + + +-- !query +select cast(-10.123456BD as interval year to month) +-- !query schema +struct +-- !query output +-0-10 + + +-- !query +select cast(10.654321BD as interval month) +-- !query schema +struct +-- !query output +0-11 + + +-- !query +SELECT '1.23' :: int +-- !query schema +struct +-- !query output +1 + + +-- !query +SELECT 'abc' :: int +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT '12345678901234567890123' :: long +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT '' :: int +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT NULL :: int +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT '123.a' :: int +-- !query schema +struct +-- !query output +NULL + + +-- !query +SELECT '-2147483648' :: int +-- !query schema +struct +-- !query output +-2147483648 + + +-- !query +SELECT HEX('abc' :: binary) +-- !query schema +struct +-- !query output +616263 + + +-- !query +SELECT HEX((123 :: byte) :: binary) +-- !query schema +struct +-- !query output +7B + + +-- !query +SELECT 'interval 3 month 1 hour' :: interval +-- !query schema +struct +-- !query output +3 months 1 hours + + +-- !query +SELECT interval 3 day 1 second :: string +-- !query schema +struct +-- !query output +INTERVAL '3 00:00:01' DAY TO SECOND + + +-- !query +select ' 1 ' :: DOUBLE +-- !query schema +struct +-- !query output +1.0 + + +-- !query +select '1.0 ' :: DEC +-- !query schema +struct +-- !query output +1 + + +-- !query +select '\t\t true \n\r ' :: boolean +-- !query schema +struct +-- !query output +true + + +-- !query +select '2022-01-01 00:00:00' :: timestamp +-- !query schema +struct +-- !query output +2022-01-01 00:00:00 + + +-- !query +select interval '-10-2' year to month :: smallint +-- !query schema +struct +-- !query output +-122 + + +-- !query +select -10L :: interval second +-- !query schema +struct +-- !query output +-0 00:00:10.000000000 + + +-- !query +select interval '08:11:10.001' hour to second :: decimal(10, 4) +-- !query schema +struct +-- !query output +29470.0010 + + +-- !query +select 10.123456BD :: interval day to second +-- !query schema +struct +-- !query output +0 00:00:10.123456000 + + +-- !query +SELECT '1.23' :: int :: long +-- !query schema +struct +-- !query output +1 + + +-- !query +SELECT '2147483648' :: long :: int +-- !query schema +struct +-- !query output +-2147483648 + + +-- !query +SELECT CAST('2147483648' :: long AS int) +-- !query schema +struct +-- !query output +-2147483648 + + +-- !query +SELECT map(1, '123', 2, '456')[1] :: int +-- !query schema +struct +-- !query output +123 + + +-- !query +SELECT '2147483648' :: BINT +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "UNSUPPORTED_DATATYPE", + "sqlState" : "0A000", + "messageParameters" : { + "typeName" : "\"BINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 24, + "stopIndex" : 27, + "fragment" : "BINT" + } ] +} + + +-- !query +SELECT '2147483648' :: SELECT +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "UNSUPPORTED_DATATYPE", + "sqlState" : "0A000", + "messageParameters" : { + "typeName" : "\"SELECT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 24, + "stopIndex" : 29, + "fragment" : "SELECT" + } ] +} + + +-- !query +SELECT FALSE IS NOT NULL :: string +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'::'", + "hint" : "" + } +} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/conditional-functions.sql.out similarity index 78% rename from sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/conditional-functions.sql.out index aa8a600f87560..33882561f518a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/conditional-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/conditional-functions.sql.out @@ -138,25 +138,9 @@ NULL NULL NULL NULL NULL 1 NULL -- !query SELECT nullifzero('abc') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "nullifzero('abc')" - } ] -} +abc -- !query @@ -173,25 +157,9 @@ struct +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'abc'", - "sourceType" : "\"STRING\"", - "targetType" : "\"BIGINT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 24, - "fragment" : "zeroifnull('abc')" - } ] -} +abc -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/date.sql.out similarity index 91% rename from sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/date.sql.out index aa283d3249617..c46c200ff026f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/date.sql.out @@ -49,33 +49,17 @@ struct -- !query select make_date(2000, 13, 1) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", - "sqlState" : "22023", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" - } -} +NULL -- !query select make_date(2000, 1, 33) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", - "sqlState" : "22023", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" - } -} +NULL -- !query @@ -200,17 +184,9 @@ struct -- !query select to_date("02-29", "MM-dd") -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'February 29' as '1970' is not a leap year" - } -} +NULL -- !query @@ -266,16 +242,9 @@ struct -- !query select next_day("2015-07-23", "xx") -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkIllegalArgumentException -{ - "errorClass" : "ILLEGAL_DAY_OF_WEEK", - "sqlState" : "22009", - "messageParameters" : { - "string" : "xx" - } -} +NULL -- !query @@ -305,25 +274,9 @@ struct -- !query select next_day("xx", "Mon") -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'xx'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DATE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "next_day(\"xx\", \"Mon\")" - } ] -} +NULL -- !query @@ -465,22 +418,13 @@ select date_add('2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.SparkNumberFormatException +org.apache.spark.sql.AnalysisException { - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", + "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", + "sqlState" : "22023", "messageParameters" : { - "expression" : "'1.2'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 36, - "fragment" : "date_add('2011-11-11', '1.2')" - } ] + "functionName" : "date_add" + } } @@ -639,22 +583,13 @@ select date_sub(date'2011-11-11', '1.2') -- !query schema struct<> -- !query output -org.apache.spark.SparkNumberFormatException +org.apache.spark.sql.AnalysisException { - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", + "errorClass" : "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", + "sqlState" : "22023", "messageParameters" : { - "expression" : "'1.2'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 40, - "fragment" : "date_sub(date'2011-11-11', '1.2')" - } ] + "functionName" : "date_sub" + } } @@ -693,17 +628,53 @@ struct -- !query select date_add('2011-11-11', int_str) from date_view -- !query schema -struct +struct<> -- !query output -2011-11-12 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"int_str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "date_add('2011-11-11', int_str)" + } ] +} -- !query select date_sub('2011-11-11', int_str) from date_view -- !query schema -struct +struct<> -- !query output -2011-11-10 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"int_str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 38, + "fragment" : "date_sub('2011-11-11', int_str)" + } ] +} -- !query @@ -783,9 +754,27 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day> -- !query select date '2001-10-01' - '2001-09-28' -- !query schema -struct<(DATE '2001-10-01' - 2001-09-28):interval day> +struct<> -- !query output -3 00:00:00.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2001-09-28\"", + "inputType" : "\"DOUBLE\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "date '2001-10-01' - '2001-09-28'" + } ] +} -- !query @@ -823,9 +812,27 @@ struct<(date_str - DATE '2001-09-28'):interval day> -- !query select date '2001-09-28' - date_str from date_view -- !query schema -struct<(DATE '2001-09-28' - date_str):interval day> +struct<> -- !query output --3696 00:00:00.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"date_str\"", + "inputType" : "\"DOUBLE\"", + "paramIndex" : "second", + "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", + "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "date '2001-09-28' - date_str" + } ] +} -- !query @@ -839,7 +846,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DATE\"", + "inputType" : "\"DOUBLE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" @@ -864,11 +871,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"DATE '2011-11-11'\"", - "inputType" : "\"DATE\"", + "inputSql" : "\"1\"", + "inputType" : "\"DOUBLE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" + "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/nonansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/datetime-parsing-invalid.sql.out new file mode 100644 index 0000000000000..fffbb2a4e017f --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/datetime-parsing-invalid.sql.out @@ -0,0 +1,241 @@ +-- Automatically generated by SQLQueryTestSuite +-- !query +select to_timestamp('294248', 'y') +-- !query schema +struct<> +-- !query output +java.lang.ArithmeticException +long overflow + + +-- !query +select to_timestamp('1', 'yy') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('-12', 'yy') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('123', 'yy') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('1', 'yyy') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('1234567', 'yyyyyyy') +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkUpgradeException +{ + "errorClass" : "INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION", + "sqlState" : "42K0B", + "messageParameters" : { + "config" : "\"spark.sql.legacy.timeParserPolicy\"", + "docroot" : "https://spark.apache.org/docs/latest", + "pattern" : "'yyyyyyy'" + } +} + + +-- !query +select to_timestamp('366', 'D') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('9', 'DD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('366', 'DD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('9', 'DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('99', 'DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('30-365', 'dd-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('11-365', 'MM-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2019-366', 'yyyy-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('12-30-365', 'MM-dd-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2020-01-365', 'yyyy-dd-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2020-10-350', 'yyyy-MM-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) +-- !query schema +struct> +-- !query output +{"date":null} + + +-- !query +select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast("Unparseable" as timestamp) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select cast("Unparseable" as date) +-- !query schema +struct +-- !query output +NULL diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-special.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/datetime-special.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/datetime-special.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/datetime-special.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/nonansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/decimalArithmeticOperations.sql.out new file mode 100644 index 0000000000000..8074a7bf2ac6d --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/decimalArithmeticOperations.sql.out @@ -0,0 +1,195 @@ +-- Automatically generated by SQLQueryTestSuite +-- !query +create table decimals_test(id int, a decimal(38,18), b decimal(38,18)) using parquet +-- !query schema +struct<> +-- !query output + + + +-- !query +insert into decimals_test values(1, 100.0, 999.0), (2, 12345.123, 12345.123), + (3, 0.1234567891011, 1234.1), (4, 123456789123456789.0, 1.123456789123456789) +-- !query schema +struct<> +-- !query output + + + +-- !query +select id, a*10, b/10 from decimals_test order by id +-- !query schema +struct +-- !query output +1 1000.000000000000000 99.900000000000000000 +2 123451.230000000000000 1234.512300000000000000 +3 1.234567891011000 123.410000000000000000 +4 1234567891234567890.000000000000000 0.112345678912345679 + + +-- !query +select 10.3 * 3.0 +-- !query schema +struct<(10.3 * 3.0):decimal(6,2)> +-- !query output +30.90 + + +-- !query +select 10.3000 * 3.0 +-- !query schema +struct<(10.3000 * 3.0):decimal(9,5)> +-- !query output +30.90000 + + +-- !query +select 10.30000 * 30.0 +-- !query schema +struct<(10.30000 * 30.0):decimal(11,6)> +-- !query output +309.000000 + + +-- !query +select 10.300000000000000000 * 3.000000000000000000 +-- !query schema +struct<(10.300000000000000000 * 3.000000000000000000):decimal(38,34)> +-- !query output +30.9000000000000000000000000000000000 + + +-- !query +select 10.300000000000000000 * 3.0000000000000000000 +-- !query schema +struct<(10.300000000000000000 * 3.0000000000000000000):decimal(38,34)> +-- !query output +30.9000000000000000000000000000000000 + + +-- !query +select (5e36BD + 0.1) + 5e36BD +-- !query schema +struct<((5000000000000000000000000000000000000 + 0.1) + 5000000000000000000000000000000000000):decimal(38,1)> +-- !query output +NULL + + +-- !query +select (-4e36BD - 0.1) - 7e36BD +-- !query schema +struct<((-4000000000000000000000000000000000000 - 0.1) - 7000000000000000000000000000000000000):decimal(38,1)> +-- !query output +NULL + + +-- !query +select 12345678901234567890.0 * 12345678901234567890.0 +-- !query schema +struct<(12345678901234567890.0 * 12345678901234567890.0):decimal(38,2)> +-- !query output +NULL + + +-- !query +select 1e35BD / 0.1 +-- !query schema +struct<(100000000000000000000000000000000000 / 0.1):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 123456789123456789.1234567890 * 1.123456789123456789 +-- !query schema +struct<(123456789123456789.1234567890 * 1.123456789123456789):decimal(38,18)> +-- !query output +138698367904130467.654320988515622621 + + +-- !query +select 123456789123456789.1234567890 * 1.123456789123456789 +-- !query schema +struct<(123456789123456789.1234567890 * 1.123456789123456789):decimal(38,18)> +-- !query output +138698367904130467.654320988515622621 + + +-- !query +select 12345678912345.123456789123 / 0.000000012345678 +-- !query schema +struct<(12345678912345.123456789123 / 1.2345678E-8):decimal(38,9)> +-- !query output +1000000073899961059796.725866332 + + +-- !query +select 1.0123456789012345678901234567890123456e36BD / 0.1 +-- !query schema +struct<(1012345678901234567890123456789012345.6 / 0.1):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e35BD / 1.0 +-- !query schema +struct<(101234567890123456789012345678901234.56 / 1.0):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e34BD / 1.0 +-- !query schema +struct<(10123456789012345678901234567890123.456 / 1.0):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e33BD / 1.0 +-- !query schema +struct<(1012345678901234567890123456789012.3456 / 1.0):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e32BD / 1.0 +-- !query schema +struct<(101234567890123456789012345678901.23456 / 1.0):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e31BD / 1.0 +-- !query schema +struct<(10123456789012345678901234567890.123456 / 1.0):decimal(38,6)> +-- !query output +10123456789012345678901234567890.123456 + + +-- !query +select 1.0123456789012345678901234567890123456e31BD / 0.1 +-- !query schema +struct<(10123456789012345678901234567890.123456 / 0.1):decimal(38,6)> +-- !query output +NULL + + +-- !query +select 1.0123456789012345678901234567890123456e31BD / 10.0 +-- !query schema +struct<(10123456789012345678901234567890.123456 / 10.0):decimal(38,6)> +-- !query output +1012345678901234567890123456789.012346 + + +-- !query +drop table decimals_test +-- !query schema +struct<> +-- !query output + diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/double-quoted-identifiers-disabled.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/double-quoted-identifiers-disabled.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/double-quoted-identifiers-enabled.sql.out similarity index 66% rename from sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/double-quoted-identifiers-enabled.sql.out index 2444c399a87ec..81a98a60590f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/double-quoted-identifiers-enabled.sql.out @@ -4,20 +4,14 @@ SELECT 1 FROM "not_exist" -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 15, - "stopIndex" : 25, - "fragment" : "\"not_exist\"" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -26,12 +20,13 @@ USE SCHEMA "not_exist" -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "SCHEMA_NOT_FOUND", - "sqlState" : "42704", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "schemaName" : "`spark_catalog`.`not_exist`" + "error" : "'\"not_exist\"'", + "hint" : "" } } @@ -41,20 +36,14 @@ ALTER TABLE "not_exist" ADD COLUMN not_exist int -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 23, - "fragment" : "\"not_exist\"" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -63,20 +52,14 @@ ALTER TABLE not_exist ADD COLUMN "not_exist" int -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 21, - "fragment" : "not_exist" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -85,20 +68,14 @@ SELECT 1 AS "not_exist" FROM not_exist -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 30, - "stopIndex" : 38, - "fragment" : "not_exist" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -107,20 +84,14 @@ SELECT 1 FROM not_exist AS X("hello") -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "relationName" : "`not_exist`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 15, - "stopIndex" : 23, - "fragment" : "not_exist" - } ] + "error" : "'\"hello\"'", + "hint" : "" + } } @@ -129,21 +100,14 @@ SELECT "not_exist"() -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "UNRESOLVED_ROUTINE", - "sqlState" : "42883", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "routineName" : "`not_exist`", - "searchPath" : "[`system`.`builtin`, `system`.`session`, `spark_catalog`.`default`]" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 20, - "fragment" : "\"not_exist\"()" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -152,21 +116,14 @@ SELECT "not_exist".not_exist() -- !query schema struct<> -- !query output -org.apache.spark.sql.AnalysisException +org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "UNRESOLVED_ROUTINE", - "sqlState" : "42883", + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", "messageParameters" : { - "routineName" : "`not_exist`.`not_exist`", - "searchPath" : "[`system`.`builtin`, `system`.`session`, `spark_catalog`.`default`]" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "\"not_exist\".not_exist()" - } ] + "error" : "'\"not_exist\"'", + "hint" : "" + } } @@ -344,23 +301,9 @@ org.apache.spark.sql.AnalysisException -- !query SELECT "hello" -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION", - "sqlState" : "42703", - "messageParameters" : { - "objectName" : "`hello`" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 14, - "fragment" : "\"hello\"" - } ] -} +hello -- !query @@ -368,15 +311,7 @@ CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1 -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "PARSE_SYNTAX_ERROR", - "sqlState" : "42601", - "messageParameters" : { - "error" : "'\"hello\"'", - "hint" : "" - } -} + -- !query @@ -384,30 +319,15 @@ DROP VIEW v -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException -{ - "errorClass" : "TABLE_OR_VIEW_NOT_FOUND", - "sqlState" : "42P01", - "messageParameters" : { - "relationName" : "`spark_catalog`.`default`.`v`" - } -} + -- !query SELECT INTERVAL "1" YEAR -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "PARSE_SYNTAX_ERROR", - "sqlState" : "42601", - "messageParameters" : { - "error" : "'\"1\"'", - "hint" : "" - } -} +1-0 -- !query @@ -447,7 +367,15 @@ CREATE SCHEMA "myschema" -- !query schema struct<> -- !query output - +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myschema\"'", + "hint" : "" + } +} -- !query @@ -456,15 +384,31 @@ CREATE TEMPORARY VIEW "myview"("c1") AS -- !query schema struct<> -- !query output - +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myview\"'", + "hint" : "" + } +} -- !query SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1") -- !query schema -struct +struct<> -- !query output -1 +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"a2\"'", + "hint" : "" + } +} -- !query @@ -472,7 +416,15 @@ DROP TABLE "myview" -- !query schema struct<> -- !query output - +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myview\"'", + "hint" : "" + } +} -- !query @@ -480,4 +432,12 @@ DROP SCHEMA "myschema" -- !query schema struct<> -- !query output - +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "PARSE_SYNTAX_ERROR", + "sqlState" : "42601", + "messageParameters" : { + "error" : "'\"myschema\"'", + "hint" : "" + } +} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/higher-order-functions.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/higher-order-functions.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/interval.sql.out similarity index 94% rename from sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/interval.sql.out index 4e012df792dea..a8a0423bdb3e0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/interval.sql.out @@ -123,97 +123,33 @@ struct<(INTERVAL '2' YEAR / 2):interval year to month> -- !query select interval 2 second * 'a' -- !query schema -struct<> +struct<(INTERVAL '02' SECOND * a):interval day to second> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "interval 2 second * 'a'" - } ] -} +NULL -- !query select interval 2 second / 'a' -- !query schema -struct<> +struct<(INTERVAL '02' SECOND / a):interval day to second> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "interval 2 second / 'a'" - } ] -} +NULL -- !query select interval 2 year * 'a' -- !query schema -struct<> +struct<(INTERVAL '2' YEAR * a):interval year to month> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "interval 2 year * 'a'" - } ] -} +NULL -- !query select interval 2 year / 'a' -- !query schema -struct<> +struct<(INTERVAL '2' YEAR / a):interval year to month> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "interval 2 year / 'a'" - } ] -} +NULL -- !query @@ -235,49 +171,17 @@ struct<(INTERVAL '2' YEAR * 2):interval year to month> -- !query select 'a' * interval 2 second -- !query schema -struct<> +struct<(INTERVAL '02' SECOND * a):interval day to second> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 30, - "fragment" : "'a' * interval 2 second" - } ] -} +NULL -- !query select 'a' * interval 2 year -- !query schema -struct<> +struct<(INTERVAL '2' YEAR * a):interval year to month> -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"DOUBLE\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 28, - "fragment" : "'a' * interval 2 year" - } ] -} +NULL -- !query @@ -869,26 +773,9 @@ struct -- !query select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", - "sqlState" : "22003", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"", - "precision" : "18", - "scale" : "6", - "value" : "1234567890123456789" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 59, - "fragment" : "make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)" - } ] -} +NULL -- !query @@ -1990,49 +1877,17 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select '4 11:11' - interval '4 22:12' day to minute -- !query schema -struct<> +struct<4 11:11 - INTERVAL '4 22:12' DAY TO MINUTE:string> -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'4 11:11'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 51, - "fragment" : "'4 11:11' - interval '4 22:12' day to minute" - } ] -} +NULL -- !query select '4 12:12:12' + interval '4 22:12' day to minute -- !query schema -struct<> +struct<4 12:12:12 + INTERVAL '4 22:12' DAY TO MINUTE:string> -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'4 12:12:12'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "'4 12:12:12' + interval '4 22:12' day to minute" - } ] -} +NULL -- !query @@ -2094,49 +1949,17 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select str - interval '4 22:12' day to minute from interval_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 45, - "fragment" : "str - interval '4 22:12' day to minute" - } ] -} +NULL -- !query select str + interval '4 22:12' day to minute from interval_view -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'1'", - "sourceType" : "\"STRING\"", - "targetType" : "\"TIMESTAMP\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 45, - "fragment" : "str + interval '4 22:12' day to minute" - } ] -} +NULL -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out similarity index 75% rename from sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out index b2331ec4ab804..a885525028623 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out @@ -7,51 +7,51 @@ struct ADD false AFTER false AGGREGATE false -ALL true +ALL false ALTER false ALWAYS false ANALYZE false -AND true +AND false ANTI false -ANY true +ANY false ANY_VALUE false ARCHIVE false ARRAY false -AS true +AS false ASC false AT false -AUTHORIZATION true +AUTHORIZATION false BEGIN false BETWEEN false BIGINT false BINARY false BINDING false BOOLEAN false -BOTH true +BOTH false BUCKET false BUCKETS false BY false BYTE false CACHE false -CALL true +CALL false CALLED false CASCADE false -CASE true -CAST true +CASE false +CAST false CATALOG false CATALOGS false CHANGE false CHAR false CHARACTER false -CHECK true +CHECK false CLEAR false CLUSTER false CLUSTERED false CODEGEN false -COLLATE true -COLLATION true +COLLATE false +COLLATION false COLLECTION false -COLUMN true +COLUMN false COLUMNS false COMMENT false COMMIT false @@ -60,17 +60,17 @@ COMPACTIONS false COMPENSATION false COMPUTE false CONCATENATE false -CONSTRAINT true +CONSTRAINT false CONTAINS false COST false -CREATE true -CROSS true +CREATE false +CROSS false CUBE false CURRENT false -CURRENT_DATE true -CURRENT_TIME true -CURRENT_TIMESTAMP true -CURRENT_USER true +CURRENT_DATE false +CURRENT_TIME false +CURRENT_TIMESTAMP false +CURRENT_USER false DATA false DATABASE false DATABASES false @@ -97,49 +97,49 @@ DETERMINISTIC false DFS false DIRECTORIES false DIRECTORY false -DISTINCT true +DISTINCT false DISTRIBUTE false DIV false DO false DOUBLE false DROP false -ELSE true -END true -ESCAPE true +ELSE false +END false +ESCAPE false ESCAPED false EVOLUTION false -EXCEPT true +EXCEPT false EXCHANGE false EXCLUDE false -EXECUTE true +EXECUTE false EXISTS false EXPLAIN false EXPORT false EXTENDED false EXTERNAL false EXTRACT false -FALSE true -FETCH true +FALSE false +FETCH false FIELDS false FILEFORMAT false -FILTER true +FILTER false FIRST false FLOAT false FOLLOWING false -FOR true -FOREIGN true +FOR false +FOREIGN false FORMAT false FORMATTED false -FROM true -FULL true +FROM false +FULL false FUNCTION false FUNCTIONS false GENERATED false GLOBAL false -GRANT true -GROUP true +GRANT false +GROUP false GROUPING false -HAVING true +HAVING false HOUR false HOURS false IDENTIFIER false @@ -149,34 +149,34 @@ IGNORE false ILIKE false IMMEDIATE false IMPORT false -IN true +IN false INCLUDE false INCREMENT false INDEX false INDEXES false -INNER true +INNER false INPATH false INPUT false INPUTFORMAT false INSERT false INT false INTEGER false -INTERSECT true +INTERSECT false INTERVAL false -INTO true +INTO false INVOKER false -IS true +IS false ITEMS false ITERATE false -JOIN true +JOIN false KEYS false LANGUAGE false LAST false -LATERAL true +LATERAL false LAZY false -LEADING true +LEADING false LEAVE false -LEFT true +LEFT false LIKE false LIMIT false LINES false @@ -209,26 +209,26 @@ NAMESPACE false NAMESPACES false NANOSECOND false NANOSECONDS false -NATURAL true +NATURAL false NO false NONE false -NOT true -NULL true +NOT false +NULL false NULLS false NUMERIC false OF false -OFFSET true -ON true -ONLY true +OFFSET false +ON false +ONLY false OPTION false OPTIONS false -OR true -ORDER true +OR false +ORDER false OUT false -OUTER true +OUTER false OUTPUTFORMAT false OVER false -OVERLAPS true +OVERLAPS false OVERLAY false OVERWRITE false PARTITION false @@ -239,7 +239,7 @@ PIVOT false PLACING false POSITION false PRECEDING false -PRIMARY true +PRIMARY false PRINCIPALS false PROPERTIES false PURGE false @@ -252,7 +252,7 @@ RECORDREADER false RECORDWRITER false RECOVER false REDUCE false -REFERENCES true +REFERENCES false REFRESH false RENAME false REPAIR false @@ -265,7 +265,7 @@ RESTRICT false RETURN false RETURNS false REVOKE false -RIGHT true +RIGHT false ROLE false ROLES false ROLLBACK false @@ -277,12 +277,12 @@ SCHEMAS false SECOND false SECONDS false SECURITY false -SELECT true +SELECT false SEMI false SEPARATED false SERDE false SERDEPROPERTIES false -SESSION_USER true +SESSION_USER false SET false SETS false SHORT false @@ -290,12 +290,12 @@ SHOW false SINGLE false SKEWED false SMALLINT false -SOME true +SOME false SORT false SORTED false SOURCE false SPECIFIC false -SQL true +SQL false START false STATISTICS false STORED false @@ -307,14 +307,14 @@ SUBSTRING false SYNC false SYSTEM_TIME false SYSTEM_VERSION false -TABLE true +TABLE false TABLES false TABLESAMPLE false TARGET false TBLPROPERTIES false TERMINATED false -THEN true -TIME true +THEN false +TIME false TIMEDIFF false TIMESTAMP false TIMESTAMPADD false @@ -322,9 +322,9 @@ TIMESTAMPDIFF false TIMESTAMP_LTZ false TIMESTAMP_NTZ false TINYINT false -TO true +TO false TOUCH false -TRAILING true +TRAILING false TRANSACTION false TRANSACTIONS false TRANSFORM false @@ -336,17 +336,17 @@ TYPE false UNARCHIVE false UNBOUNDED false UNCACHE false -UNION true -UNIQUE true -UNKNOWN true +UNION false +UNIQUE false +UNKNOWN false UNLOCK false UNPIVOT false UNSET false UNTIL false UPDATE false USE false -USER true -USING true +USER false +USING false VALUES false VAR false VARCHAR false @@ -358,12 +358,12 @@ VIEWS false VOID false WEEK false WEEKS false -WHEN true -WHERE true +WHEN false +WHERE false WHILE false WINDOW false -WITH true -WITHIN true +WITH false +WITHIN false X false YEAR false YEARS false @@ -375,79 +375,4 @@ SELECT keyword from SQL_KEYWORDS() WHERE reserved -- !query schema struct -- !query output -ALL -AND -ANY -AS -AUTHORIZATION -BOTH -CALL -CASE -CAST -CHECK -COLLATE -COLLATION -COLUMN -CONSTRAINT -CREATE -CROSS -CURRENT_DATE -CURRENT_TIME -CURRENT_TIMESTAMP -CURRENT_USER -DISTINCT -ELSE -END -ESCAPE -EXCEPT -EXECUTE -FALSE -FETCH -FILTER -FOR -FOREIGN -FROM -FULL -GRANT -GROUP -HAVING -IN -INNER -INTERSECT -INTO -IS -JOIN -LATERAL -LEADING -LEFT -NATURAL -NOT -NULL -OFFSET -ON -ONLY -OR -ORDER -OUTER -OVERLAPS -PRIMARY -REFERENCES -RIGHT -SELECT -SESSION_USER -SOME -SQL -TABLE -THEN -TIME -TO -TRAILING -UNION -UNIQUE -UNKNOWN -USER -USING -WHEN -WHERE -WITH -WITHIN + diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/literals.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/literals.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/map.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/map.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/nonansi/math.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/math.sql.out new file mode 100644 index 0000000000000..09f4383933288 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/math.sql.out @@ -0,0 +1,583 @@ +-- Automatically generated by SQLQueryTestSuite +-- !query +SELECT round(25y, 1) +-- !query schema +struct +-- !query output +25 + + +-- !query +SELECT round(25y, 0) +-- !query schema +struct +-- !query output +25 + + +-- !query +SELECT round(25y, -1) +-- !query schema +struct +-- !query output +30 + + +-- !query +SELECT round(25y, -2) +-- !query schema +struct +-- !query output +0 + + +-- !query +SELECT round(25y, -3) +-- !query schema +struct +-- !query output +0 + + +-- !query +SELECT round(127y, -1) +-- !query schema +struct +-- !query output +-126 + + +-- !query +SELECT round(-128y, -1) +-- !query schema +struct +-- !query output +126 + + +-- !query +SELECT round(525s, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525s, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525s, -1) +-- !query schema +struct +-- !query output +530 + + +-- !query +SELECT round(525s, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT round(525s, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT round(32767s, -1) +-- !query schema +struct +-- !query output +-32766 + + +-- !query +SELECT round(-32768s, -1) +-- !query schema +struct +-- !query output +32766 + + +-- !query +SELECT round(525, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525, -1) +-- !query schema +struct +-- !query output +530 + + +-- !query +SELECT round(525, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT round(525, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT round(2147483647, -1) +-- !query schema +struct +-- !query output +-2147483646 + + +-- !query +SELECT round(-2147483647, -1) +-- !query schema +struct +-- !query output +2147483646 + + +-- !query +SELECT round(525L, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525L, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT round(525L, -1) +-- !query schema +struct +-- !query output +530 + + +-- !query +SELECT round(525L, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT round(525L, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT round(9223372036854775807L, -1) +-- !query schema +struct +-- !query output +-9223372036854775806 + + +-- !query +SELECT round(-9223372036854775808L, -1) +-- !query schema +struct +-- !query output +9223372036854775806 + + +-- !query +SELECT bround(25y, 1) +-- !query schema +struct +-- !query output +25 + + +-- !query +SELECT bround(25y, 0) +-- !query schema +struct +-- !query output +25 + + +-- !query +SELECT bround(25y, -1) +-- !query schema +struct +-- !query output +20 + + +-- !query +SELECT bround(25y, -2) +-- !query schema +struct +-- !query output +0 + + +-- !query +SELECT bround(25y, -3) +-- !query schema +struct +-- !query output +0 + + +-- !query +SELECT bround(127y, -1) +-- !query schema +struct +-- !query output +-126 + + +-- !query +SELECT bround(-128y, -1) +-- !query schema +struct +-- !query output +126 + + +-- !query +SELECT bround(525s, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525s, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525s, -1) +-- !query schema +struct +-- !query output +520 + + +-- !query +SELECT bround(525s, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT bround(525s, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT bround(32767s, -1) +-- !query schema +struct +-- !query output +-32766 + + +-- !query +SELECT bround(-32768s, -1) +-- !query schema +struct +-- !query output +32766 + + +-- !query +SELECT bround(525, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525, -1) +-- !query schema +struct +-- !query output +520 + + +-- !query +SELECT bround(525, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT bround(525, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT bround(2147483647, -1) +-- !query schema +struct +-- !query output +-2147483646 + + +-- !query +SELECT bround(-2147483647, -1) +-- !query schema +struct +-- !query output +2147483646 + + +-- !query +SELECT bround(525L, 1) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525L, 0) +-- !query schema +struct +-- !query output +525 + + +-- !query +SELECT bround(525L, -1) +-- !query schema +struct +-- !query output +520 + + +-- !query +SELECT bround(525L, -2) +-- !query schema +struct +-- !query output +500 + + +-- !query +SELECT bround(525L, -3) +-- !query schema +struct +-- !query output +1000 + + +-- !query +SELECT bround(9223372036854775807L, -1) +-- !query schema +struct +-- !query output +-9223372036854775806 + + +-- !query +SELECT bround(-9223372036854775808L, -1) +-- !query schema +struct +-- !query output +9223372036854775806 + + +-- !query +SELECT conv('100', 2, 10) +-- !query schema +struct +-- !query output +4 + + +-- !query +SELECT conv(-10, 16, -10) +-- !query schema +struct +-- !query output +-16 + + +-- !query +SELECT conv('9223372036854775808', 10, 16) +-- !query schema +struct +-- !query output +8000000000000000 + + +-- !query +SELECT conv('92233720368547758070', 10, 16) +-- !query schema +struct +-- !query output +FFFFFFFFFFFFFFFF + + +-- !query +SELECT conv('9223372036854775807', 36, 10) +-- !query schema +struct +-- !query output +18446744073709551615 + + +-- !query +SELECT conv('-9223372036854775807', 36, 10) +-- !query schema +struct +-- !query output +18446744073709551615 + + +-- !query +SELECT BIN(0) +-- !query schema +struct +-- !query output +0 + + +-- !query +SELECT BIN(25) +-- !query schema +struct +-- !query output +11001 + + +-- !query +SELECT BIN(25L) +-- !query schema +struct +-- !query output +11001 + + +-- !query +SELECT BIN(25.5) +-- !query schema +struct +-- !query output +11001 + + +-- !query +SELECT POSITIVE(0Y) +-- !query schema +struct<(+ 0):tinyint> +-- !query output +0 + + +-- !query +SELECT POSITIVE(25) +-- !query schema +struct<(+ 25):int> +-- !query output +25 + + +-- !query +SELECT POSITIVE(-25L) +-- !query schema +struct<(+ -25):bigint> +-- !query output +-25 + + +-- !query +SELECT POSITIVE(25.5) +-- !query schema +struct<(+ 25.5):decimal(3,1)> +-- !query output +25.5 + + +-- !query +SELECT POSITIVE("25.5") +-- !query schema +struct<(+ 25.5):double> +-- !query output +25.5 + + +-- !query +SELECT POSITIVE("invalid") +-- !query schema +struct<(+ invalid):double> +-- !query output +NULL + + +-- !query +SELECT POSITIVE(null) +-- !query schema +struct<(+ NULL):double> +-- !query output +NULL diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/parse-schema-string.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/parse-schema-string.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/parse-schema-string.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/parse-schema-string.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/string-functions.sql.out similarity index 96% rename from sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/string-functions.sql.out index 706673606625b..3f9f24f817f2c 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/string-functions.sql.out @@ -94,25 +94,9 @@ NULL -- !query select left("abcd", -2), left("abcd", 0), left("abcd", 'a') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 43, - "stopIndex" : 59, - "fragment" : "left(\"abcd\", 'a')" - } ] -} + NULL -- !query @@ -134,25 +118,9 @@ NULL -- !query select right("abcd", -2), right("abcd", 0), right("abcd", 'a') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'a'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 45, - "stopIndex" : 62, - "fragment" : "right(\"abcd\", 'a')" - } ] -} + NULL -- !query @@ -498,49 +466,17 @@ bar -- !query SELECT lpad('hi', 'invalid_length') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'invalid_length'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "lpad('hi', 'invalid_length')" - } ] -} +NULL -- !query SELECT rpad('hi', 'invalid_length') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkNumberFormatException -{ - "errorClass" : "CAST_INVALID_INPUT", - "sqlState" : "22018", - "messageParameters" : { - "expression" : "'invalid_length'", - "sourceType" : "\"STRING\"", - "targetType" : "\"INT\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 35, - "fragment" : "rpad('hi', 'invalid_length')" - } ] -} +NULL -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/timestamp.sql.out similarity index 89% rename from sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/timestamp.sql.out index e3cf1a1549228..0e0b014a3b161 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/timestamp.sql.out @@ -119,16 +119,9 @@ struct -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "INVALID_FRACTION_OF_SECOND", - "sqlState" : "22023", - "messageParameters" : { - "secAndMicros" : "60.007" - } -} +NULL -- !query @@ -150,17 +143,9 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", - "sqlState" : "22023", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" - } -} +NULL -- !query @@ -182,33 +167,17 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", - "sqlState" : "22023", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" - } -} +NULL -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", - "sqlState" : "22023", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" - } -} +NULL -- !query @@ -388,17 +357,9 @@ struct -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20" - } -} +NULL -- !query @@ -460,17 +421,9 @@ struct +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26" - } -} +NULL -- !query @@ -484,17 +437,9 @@ struct +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27" - } -} +NULL -- !query @@ -556,33 +501,17 @@ struct +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7" - } -} +NULL -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9" - } -} +NULL -- !query @@ -652,17 +581,9 @@ struct -- !query select to_timestamp("02-29", "MM-dd") -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkDateTimeException -{ - "errorClass" : "CANNOT_PARSE_TIMESTAMP", - "sqlState" : "22007", - "messageParameters" : { - "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'February 29' as '1970' is not a leap year" - } -} +NULL -- !query @@ -708,17 +629,53 @@ struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query schema -struct<(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10):interval day to second> +struct<> -- !query output -0 00:00:01.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2011-11-11 11:11:10\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" + } ] +} -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query schema -struct<(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10'):interval day to second> +struct<> -- !query output -0 00:00:01.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"2011-11-11 11:11:11\"", + "inputType" : "\"STRING\"", + "paramIndex" : "first", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" + } ] +} -- !query @@ -748,17 +705,53 @@ struct<> -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query schema -struct<(str - TIMESTAMP '2011-11-11 11:11:11'):interval day to second> +struct<> -- !query output -0 00:00:00.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "first", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "str - timestamp'2011-11-11 11:11:11'" + } ] +} -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query schema -struct<(TIMESTAMP '2011-11-11 11:11:11' - str):interval day to second> +struct<> -- !query output -0 00:00:00.000000000 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "inputSql" : "\"str\"", + "inputType" : "\"STRING\"", + "paramIndex" : "second", + "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", + "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "timestamp'2011-11-11 11:11:11' - str" + } ] +} -- !query @@ -768,11 +761,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "actualDataType" : "\"TIMESTAMP\"", - "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "left" : "\"TIMESTAMP\"", + "right" : "\"DOUBLE\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -792,11 +785,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", "sqlState" : "42K09", "messageParameters" : { - "actualDataType" : "\"TIMESTAMP\"", - "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "left" : "\"DOUBLE\"", + "right" : "\"TIMESTAMP\"", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/try_aggregates.sql.out similarity index 67% rename from sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/try_aggregates.sql.out index 94048ac8897bb..df1fe996781ad 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/try_aggregates.sql.out @@ -82,91 +82,33 @@ NULL -- !query SELECT try_sum(col / 0) FROM VALUES (5), (10), (15) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_sum(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_sum(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_sum(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 23, - "fragment" : "col + 1L" - } ] -} +-9223372036854775806 -- !query @@ -290,91 +232,33 @@ NULL -- !query SELECT try_avg(col / 0) FROM VALUES (5), (10), (15) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_avg(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_avg(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 22, - "fragment" : "col / 0" - } ] -} +NULL -- !query SELECT try_avg(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 16, - "stopIndex" : 23, - "fragment" : "col + 1L" - } ] -} +-4.6116860184273879E18 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/try_arithmetic.sql.out similarity index 66% rename from sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/try_arithmetic.sql.out index acf6e70a50dea..b12680c2a6751 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/nonansi/try_arithmetic.sql.out @@ -26,9 +26,9 @@ struct -- !query SELECT try_add(2147483647, "1") -- !query schema -struct +struct -- !query output -2147483648 +2.147483648E9 -- !query @@ -58,71 +58,25 @@ NULL -- !query SELECT try_add(1, (2147483647 + 1)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "integer overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 20, - "stopIndex" : 33, - "fragment" : "2147483647 + 1" - } ] -} +-2147483647 -- !query SELECT try_add(1L, (9223372036854775807L + 1L)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 21, - "stopIndex" : 45, - "fragment" : "9223372036854775807L + 1L" - } ] -} +-9223372036854775807 -- !query SELECT try_add(1, 1.0 / 0.0) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 19, - "stopIndex" : 27, - "fragment" : "1.0 / 0.0" - } ] -} +NULL -- !query @@ -290,71 +244,25 @@ NULL -- !query SELECT try_divide(1, (2147483647 + 1)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "integer overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 23, - "stopIndex" : 36, - "fragment" : "2147483647 + 1" - } ] -} +-4.6566128730773926E-10 -- !query SELECT try_divide(1L, (9223372036854775807L + 1L)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 24, - "stopIndex" : 48, - "fragment" : "9223372036854775807L + 1L" - } ] -} +-1.0842021724855044E-19 -- !query SELECT try_divide(1, 1.0 / 0.0) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 22, - "stopIndex" : 30, - "fragment" : "1.0 / 0.0" - } ] -} +NULL -- !query @@ -448,9 +356,9 @@ struct -- !query SELECT try_subtract(2147483647, "-1") -- !query schema -struct +struct -- !query output -2147483648 +2.147483648E9 -- !query @@ -480,71 +388,25 @@ NULL -- !query SELECT try_subtract(1, (2147483647 + 1)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "integer overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 25, - "stopIndex" : 38, - "fragment" : "2147483647 + 1" - } ] -} +NULL -- !query SELECT try_subtract(1L, (9223372036854775807L + 1L)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 50, - "fragment" : "9223372036854775807L + 1L" - } ] -} +NULL -- !query SELECT try_subtract(1, 1.0 / 0.0) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 24, - "stopIndex" : 32, - "fragment" : "1.0 / 0.0" - } ] -} +NULL -- !query @@ -606,9 +468,9 @@ struct -- !query SELECT try_multiply(2147483647, "-2") -- !query schema -struct +struct -- !query output --4294967294 +-4.294967294E9 -- !query @@ -638,71 +500,25 @@ NULL -- !query SELECT try_multiply(1, (2147483647 + 1)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "integer overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 25, - "stopIndex" : 38, - "fragment" : "2147483647 + 1" - } ] -} +-2147483648 -- !query SELECT try_multiply(1L, (9223372036854775807L + 1L)) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "ARITHMETIC_OVERFLOW", - "sqlState" : "22003", - "messageParameters" : { - "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "\"spark.sql.ansi.enabled\"", - "message" : "long overflow" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 26, - "stopIndex" : 50, - "fragment" : "9223372036854775807L + 1L" - } ] -} +-9223372036854775808 -- !query SELECT try_multiply(1, 1.0 / 0.0) -- !query schema -struct<> +struct -- !query output -org.apache.spark.SparkArithmeticException -{ - "errorClass" : "DIVIDE_BY_ZERO", - "sqlState" : "22012", - "messageParameters" : { - "config" : "\"spark.sql.ansi.enabled\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 24, - "stopIndex" : 32, - "fragment" : "1.0 / 0.0" - } ] -} +NULL -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/try_datetime_functions.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/try_datetime_functions.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/results/nonansi/try_element_at.sql.out similarity index 100% rename from sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out rename to sql/core/src/test/resources/sql-tests/results/nonansi/try_element_at.sql.out diff --git a/sql/core/src/test/resources/sql-tests/results/operators.sql.out b/sql/core/src/test/resources/sql-tests/results/operators.sql.out index 93ac6b49b0168..356e5eca5feb2 100644 --- a/sql/core/src/test/resources/sql-tests/results/operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/operators.sql.out @@ -130,9 +130,23 @@ struct<(5 / 2):double> -- !query select 5 / 0 -- !query schema -struct<(5 / 0):double> --- !query output -NULL +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 12, + "fragment" : "5 / 0" + } ] +} -- !query @@ -162,9 +176,23 @@ struct<(5 div 2):bigint> -- !query select 5 div 0 -- !query schema -struct<(5 div 0):bigint> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 14, + "fragment" : "5 div 0" + } ] +} -- !query @@ -194,9 +222,23 @@ struct<(CAST(51 AS DECIMAL(10,0)) div CAST(2 AS DECIMAL(2,0))):bigint> -- !query select cast(5 as decimal(1, 0)) div cast(0 as decimal(2, 0)) -- !query schema -struct<(CAST(5 AS DECIMAL(1,0)) div CAST(0 AS DECIMAL(2,0))):bigint> +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 60, + "fragment" : "cast(5 as decimal(1, 0)) div cast(0 as decimal(2, 0))" + } ] +} -- !query @@ -450,9 +492,23 @@ true -- !query select mod(7, 2), mod(7, 0), mod(0, 2), mod(7, null), mod(null, 2), mod(null, null) -- !query schema -struct +struct<> -- !query output -1 NULL 0 NULL NULL NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 19, + "stopIndex" : 27, + "fragment" : "mod(7, 0)" + } ] +} -- !query @@ -506,17 +562,45 @@ struct<(+ -1.11):double,(+ -1.11):decimal(3,2),negative(-1.11):double,negative(- -- !query select pmod(-7, 2), pmod(0, 2), pmod(7, 0), pmod(7, null), pmod(null, 2), pmod(null, null) -- !query schema -struct +struct<> -- !query output -1 0 NULL NULL NULL NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 33, + "stopIndex" : 42, + "fragment" : "pmod(7, 0)" + } ] +} -- !query select pmod(cast(3.13 as decimal), cast(0 as decimal)), pmod(cast(2 as smallint), cast(0 as smallint)) -- !query schema -struct --- !query output -NULL NULL +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "pmod(cast(3.13 as decimal), cast(0 as decimal))" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/pipe-operators.sql.out b/sql/core/src/test/resources/sql-tests/results/pipe-operators.sql.out index aae68dddbaab3..a365e759b7c14 100644 --- a/sql/core/src/test/resources/sql-tests/results/pipe-operators.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pipe-operators.sql.out @@ -1581,12 +1581,27 @@ struct -- !query values (0, 1) tab(x, y) |> union table t +|> where x = 0 -- !query schema -struct +struct<> -- !query output -0 1 -0 abc -1 def +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'abc'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 55, + "fragment" : "values (0, 1) tab(x, y)\n|> union table t\n|> where x = 0" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/predicate-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/predicate-functions.sql.out index 5b97f2a27b8ed..e2d0563a0c451 100644 --- a/sql/core/src/test/resources/sql-tests/results/predicate-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/predicate-functions.sql.out @@ -210,25 +210,73 @@ false -- !query select 2 > '1.0' -- !query schema -struct<(2 > 1.0):boolean> --- !query output -true +struct<> +-- !query output +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "2 > '1.0'" + } ] +} -- !query select 2 > '2.0' -- !query schema -struct<(2 > 2.0):boolean> +struct<> -- !query output -false +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "2 > '2.0'" + } ] +} -- !query select 2 > '2.2' -- !query schema -struct<(2 > 2.2):boolean> +struct<> -- !query output -false +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2.2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "2 > '2.2'" + } ] +} -- !query @@ -266,17 +314,49 @@ true -- !query select 2 >= '1.0' -- !query schema -struct<(2 >= 1.0):boolean> +struct<> -- !query output -true +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "2 >= '1.0'" + } ] +} -- !query select 2 >= '2.0' -- !query schema -struct<(2 >= 2.0):boolean> +struct<> -- !query output -true +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "2 >= '2.0'" + } ] +} -- !query @@ -322,17 +402,49 @@ false -- !query select 2 < '1.0' -- !query schema -struct<(2 < 1.0):boolean> +struct<> -- !query output -false +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "2 < '1.0'" + } ] +} -- !query select 2 < '2.0' -- !query schema -struct<(2 < 2.0):boolean> +struct<> -- !query output -false +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "2 < '2.0'" + } ] +} -- !query @@ -378,17 +490,49 @@ true -- !query select 2 <= '1.0' -- !query schema -struct<(2 <= 1.0):boolean> +struct<> -- !query output -false +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "2 <= '1.0'" + } ] +} -- !query select 2 <= '2.0' -- !query schema -struct<(2 <= 2.0):boolean> --- !query output -true +struct<> +-- !query output +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2.0'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "2 <= '2.0'" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index 3f9f24f817f2c..706673606625b 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -94,9 +94,25 @@ NULL -- !query select left("abcd", -2), left("abcd", 0), left("abcd", 'a') -- !query schema -struct +struct<> -- !query output - NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 43, + "stopIndex" : 59, + "fragment" : "left(\"abcd\", 'a')" + } ] +} -- !query @@ -118,9 +134,25 @@ NULL -- !query select right("abcd", -2), right("abcd", 0), right("abcd", 'a') -- !query schema -struct +struct<> -- !query output - NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 45, + "stopIndex" : 62, + "fragment" : "right(\"abcd\", 'a')" + } ] +} -- !query @@ -466,17 +498,49 @@ bar -- !query SELECT lpad('hi', 'invalid_length') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'invalid_length'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "lpad('hi', 'invalid_length')" + } ] +} -- !query SELECT rpad('hi', 'invalid_length') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'invalid_length'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "rpad('hi', 'invalid_length')" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subexp-elimination.sql.out b/sql/core/src/test/resources/sql-tests/results/subexp-elimination.sql.out index 0f7ff3f107567..e355055895162 100644 --- a/sql/core/src/test/resources/sql-tests/results/subexp-elimination.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subexp-elimination.sql.out @@ -48,12 +48,12 @@ NULL -- !query SELECT case when from_json(a, 'struct').a > 5 then from_json(a, 'struct').b when from_json(a, 'struct').a > 4 then from_json(a, 'struct').b + 1 else from_json(a, 'struct').b + 2 end FROM testData -- !query schema -struct 5) THEN from_json(a).b WHEN (from_json(a).a > 4) THEN (from_json(a).b + 1) ELSE (from_json(a).b + 2) END:string> +struct 5) THEN from_json(a).b WHEN (from_json(a).a > 4) THEN (from_json(a).b + 1) ELSE (from_json(a).b + 2) END:bigint> -- !query output -4.0 -4.0 -5.0 -7.0 +4 +4 +5 +7 NULL diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ltz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ltz.sql.out index b4ab5bdeb4ff8..963505615225a 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ltz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ltz.sql.out @@ -50,9 +50,16 @@ struct -- !query SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "INVALID_FRACTION_OF_SECOND", + "sqlState" : "22023", + "messageParameters" : { + "secAndMicros" : "60.007" + } +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index 81fa1f1dc3327..3a473dad828a9 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -67,9 +67,16 @@ org.apache.spark.sql.AnalysisException -- !query SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "INVALID_FRACTION_OF_SECOND", + "sqlState" : "22023", + "messageParameters" : { + "secAndMicros" : "60.007" + } +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 0e0b014a3b161..e3cf1a1549228 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -119,9 +119,16 @@ struct -- !query SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "INVALID_FRACTION_OF_SECOND", + "sqlState" : "22023", + "messageParameters" : { + "secAndMicros" : "60.007" + } +} -- !query @@ -143,9 +150,17 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 61) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + } +} -- !query @@ -167,17 +182,33 @@ struct -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + } +} -- !query SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + } +} -- !query @@ -357,9 +388,17 @@ struct -- !query select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2019-10-06 10:11:12.' could not be parsed at index 20" + } +} -- !query @@ -421,9 +460,17 @@ struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26" + } +} -- !query @@ -437,9 +484,17 @@ struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27" + } +} -- !query @@ -501,17 +556,33 @@ struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 7" + } +} -- !query select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text '12.1232019-10-06S10:11' could not be parsed at index 9" + } +} -- !query @@ -581,9 +652,17 @@ struct -- !query select to_timestamp("02-29", "MM-dd") -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Invalid date 'February 29' as '1970' is not a leap year" + } +} -- !query @@ -629,53 +708,17 @@ struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day -- !query select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' -- !query schema -struct<> +struct<(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:10\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10'" - } ] -} +0 00:00:01.000000000 -- !query select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' -- !query schema -struct<> +struct<(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10'):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"2011-11-11 11:11:11\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 61, - "fragment" : "'2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'" - } ] -} +0 00:00:01.000000000 -- !query @@ -705,53 +748,17 @@ struct<> -- !query select str - timestamp'2011-11-11 11:11:11' from ts_view -- !query schema -struct<> +struct<(str - TIMESTAMP '2011-11-11 11:11:11'):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "str - timestamp'2011-11-11 11:11:11'" - } ] -} +0 00:00:00.000000000 -- !query select timestamp'2011-11-11 11:11:11' - str from ts_view -- !query schema -struct<> +struct<(TIMESTAMP '2011-11-11 11:11:11' - str):interval day to second> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"str\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 43, - "fragment" : "timestamp'2011-11-11 11:11:11' - str" - } ] -} +0 00:00:00.000000000 -- !query @@ -761,11 +768,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' + 1)\"" }, "queryContext" : [ { @@ -785,11 +792,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + TIMESTAMP '2011-11-11 11:11:11')\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index 7975392fd0147..3c704dfcc4618 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -611,11 +611,11 @@ FROM( SELECT (b + 1) AS result ORDER BY result -- !query schema -struct +struct -- !query output -3.0 -6.0 -9.0 +3 +6 +9 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out index df1fe996781ad..94048ac8897bb 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out @@ -82,33 +82,91 @@ NULL -- !query SELECT try_sum(col / 0) FROM VALUES (5), (10), (15) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_sum(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_sum(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_sum(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) -- !query schema -struct +struct<> -- !query output --9223372036854775806 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 23, + "fragment" : "col + 1L" + } ] +} -- !query @@ -232,33 +290,91 @@ NULL -- !query SELECT try_avg(col / 0) FROM VALUES (5), (10), (15) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_avg(col / 0) FROM VALUES (5.0), (10.0), (15.0) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_avg(col / 0) FROM VALUES (NULL), (10), (15) AS tab(col) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 22, + "fragment" : "col / 0" + } ] +} -- !query SELECT try_avg(col + 1L) FROM VALUES (9223372036854775807L), (1L) AS tab(col) -- !query schema -struct +struct<> -- !query output --4.6116860184273879E18 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 16, + "stopIndex" : 23, + "fragment" : "col + 1L" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index b12680c2a6751..acf6e70a50dea 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -26,9 +26,9 @@ struct -- !query SELECT try_add(2147483647, "1") -- !query schema -struct +struct -- !query output -2.147483648E9 +2147483648 -- !query @@ -58,25 +58,71 @@ NULL -- !query SELECT try_add(1, (2147483647 + 1)) -- !query schema -struct +struct<> -- !query output --2147483647 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "integer overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 20, + "stopIndex" : 33, + "fragment" : "2147483647 + 1" + } ] +} -- !query SELECT try_add(1L, (9223372036854775807L + 1L)) -- !query schema -struct +struct<> -- !query output --9223372036854775807 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 21, + "stopIndex" : 45, + "fragment" : "9223372036854775807L + 1L" + } ] +} -- !query SELECT try_add(1, 1.0 / 0.0) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 19, + "stopIndex" : 27, + "fragment" : "1.0 / 0.0" + } ] +} -- !query @@ -244,25 +290,71 @@ NULL -- !query SELECT try_divide(1, (2147483647 + 1)) -- !query schema -struct +struct<> -- !query output --4.6566128730773926E-10 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "integer overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 23, + "stopIndex" : 36, + "fragment" : "2147483647 + 1" + } ] +} -- !query SELECT try_divide(1L, (9223372036854775807L + 1L)) -- !query schema -struct +struct<> -- !query output --1.0842021724855044E-19 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 24, + "stopIndex" : 48, + "fragment" : "9223372036854775807L + 1L" + } ] +} -- !query SELECT try_divide(1, 1.0 / 0.0) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 22, + "stopIndex" : 30, + "fragment" : "1.0 / 0.0" + } ] +} -- !query @@ -356,9 +448,9 @@ struct -- !query SELECT try_subtract(2147483647, "-1") -- !query schema -struct +struct -- !query output -2.147483648E9 +2147483648 -- !query @@ -388,25 +480,71 @@ NULL -- !query SELECT try_subtract(1, (2147483647 + 1)) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "integer overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 25, + "stopIndex" : 38, + "fragment" : "2147483647 + 1" + } ] +} -- !query SELECT try_subtract(1L, (9223372036854775807L + 1L)) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 26, + "stopIndex" : 50, + "fragment" : "9223372036854775807L + 1L" + } ] +} -- !query SELECT try_subtract(1, 1.0 / 0.0) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 24, + "stopIndex" : 32, + "fragment" : "1.0 / 0.0" + } ] +} -- !query @@ -468,9 +606,9 @@ struct -- !query SELECT try_multiply(2147483647, "-2") -- !query schema -struct +struct -- !query output --4.294967294E9 +-4294967294 -- !query @@ -500,25 +638,71 @@ NULL -- !query SELECT try_multiply(1, (2147483647 + 1)) -- !query schema -struct +struct<> -- !query output --2147483648 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "integer overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 25, + "stopIndex" : 38, + "fragment" : "2147483647 + 1" + } ] +} -- !query SELECT try_multiply(1L, (9223372036854775807L + 1L)) -- !query schema -struct +struct<> -- !query output --9223372036854775808 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "ARITHMETIC_OVERFLOW", + "sqlState" : "22003", + "messageParameters" : { + "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", + "config" : "\"spark.sql.ansi.enabled\"", + "message" : "long overflow" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 26, + "stopIndex" : 50, + "fragment" : "9223372036854775807L + 1L" + } ] +} -- !query SELECT try_multiply(1, 1.0 / 0.0) -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 24, + "stopIndex" : 32, + "fragment" : "1.0 / 0.0" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out index ee0536967ad3d..f3263241a5561 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out @@ -10,193 +10,625 @@ struct<> -- !query SELECT cast(1 as binary) = '1' FROM t -- !query schema -struct<(CAST(1 AS BINARY) = 1):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) > '2' FROM t -- !query schema -struct<(CAST(1 AS BINARY) > 2):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) >= '2' FROM t -- !query schema -struct<(CAST(1 AS BINARY) >= 2):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) < '2' FROM t -- !query schema -struct<(CAST(1 AS BINARY) < 2):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <= '2' FROM t -- !query schema -struct<(CAST(1 AS BINARY) <= 2):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <> '2' FROM t -- !query schema -struct<(NOT (CAST(1 AS BINARY) = 2)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) = cast(null as string) FROM t -- !query schema -struct<(CAST(1 AS BINARY) = CAST(NULL AS STRING)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) > cast(null as string) FROM t -- !query schema -struct<(CAST(1 AS BINARY) > CAST(NULL AS STRING)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) >= cast(null as string) FROM t -- !query schema -struct<(CAST(1 AS BINARY) >= CAST(NULL AS STRING)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) < cast(null as string) FROM t -- !query schema -struct<(CAST(1 AS BINARY) < CAST(NULL AS STRING)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <= cast(null as string) FROM t -- !query schema -struct<(CAST(1 AS BINARY) <= CAST(NULL AS STRING)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(1 as binary) <> cast(null as string) FROM t -- !query schema -struct<(NOT (CAST(1 AS BINARY) = CAST(NULL AS STRING))):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '1' = cast(1 as binary) FROM t -- !query schema -struct<(1 = CAST(1 AS BINARY)):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' > cast(1 as binary) FROM t -- !query schema -struct<(2 > CAST(1 AS BINARY)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' >= cast(1 as binary) FROM t -- !query schema -struct<(2 >= CAST(1 AS BINARY)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' < cast(1 as binary) FROM t -- !query schema -struct<(2 < CAST(1 AS BINARY)):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 14, + "stopIndex" : 30, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' <= cast(1 as binary) FROM t -- !query schema -struct<(2 <= CAST(1 AS BINARY)):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT '2' <> cast(1 as binary) FROM t -- !query schema -struct<(NOT (2 = CAST(1 AS BINARY))):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 31, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) = cast(1 as binary) FROM t -- !query schema -struct<(CAST(NULL AS STRING) = CAST(1 AS BINARY)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) > cast(1 as binary) FROM t -- !query schema -struct<(CAST(NULL AS STRING) > CAST(1 AS BINARY)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) >= cast(1 as binary) FROM t -- !query schema -struct<(CAST(NULL AS STRING) >= CAST(1 AS BINARY)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) < cast(1 as binary) FROM t -- !query schema -struct<(CAST(NULL AS STRING) < CAST(1 AS BINARY)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 31, + "stopIndex" : 47, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) <= cast(1 as binary) FROM t -- !query schema -struct<(CAST(NULL AS STRING) <= CAST(1 AS BINARY)):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query SELECT cast(null as string) <> cast(1 as binary) FROM t -- !query schema -struct<(NOT (CAST(NULL AS STRING) = CAST(1 AS BINARY))):boolean> +struct<> -- !query output -NULL +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.CAST_WITH_CONF_SUGGESTION", + "sqlState" : "42K09", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"", + "configVal" : "'false'", + "sqlExpr" : "\"CAST(1 AS BINARY)\"", + "srcType" : "\"INT\"", + "targetType" : "\"BINARY\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 32, + "stopIndex" : 48, + "fragment" : "cast(1 as binary)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index 9b363b15c507f..f83284e9bf6f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -10,57 +10,169 @@ struct<> -- !query SELECT true = cast(1 as tinyint) FROM t -- !query schema -struct<(true = CAST(1 AS TINYINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(true = CAST(1 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "true = cast(1 as tinyint)" + } ] +} -- !query SELECT true = cast(1 as smallint) FROM t -- !query schema -struct<(true = CAST(1 AS SMALLINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(true = CAST(1 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true = cast(1 as smallint)" + } ] +} -- !query SELECT true = cast(1 as int) FROM t -- !query schema -struct<(true = CAST(1 AS INT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(true = CAST(1 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "true = cast(1 as int)" + } ] +} -- !query SELECT true = cast(1 as bigint) FROM t -- !query schema -struct<(true = CAST(1 AS BIGINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(true = CAST(1 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "true = cast(1 as bigint)" + } ] +} -- !query SELECT true = cast(1 as float) FROM t -- !query schema -struct<(true = CAST(1 AS FLOAT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(true = CAST(1 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "true = cast(1 as float)" + } ] +} -- !query SELECT true = cast(1 as double) FROM t -- !query schema -struct<(true = CAST(1 AS DOUBLE)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(true = CAST(1 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "true = cast(1 as double)" + } ] +} -- !query SELECT true = cast(1 as decimal(10, 0)) FROM t -- !query schema -struct<(true = CAST(1 AS DECIMAL(10,0))):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(true = CAST(1 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "true = cast(1 as decimal(10, 0))" + } ] +} -- !query @@ -154,57 +266,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT true <=> cast(1 as tinyint) FROM t -- !query schema -struct<(true <=> CAST(1 AS TINYINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "true <=> cast(1 as tinyint)" + } ] +} -- !query SELECT true <=> cast(1 as smallint) FROM t -- !query schema -struct<(true <=> CAST(1 AS SMALLINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "true <=> cast(1 as smallint)" + } ] +} -- !query SELECT true <=> cast(1 as int) FROM t -- !query schema -struct<(true <=> CAST(1 AS INT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "true <=> cast(1 as int)" + } ] +} -- !query SELECT true <=> cast(1 as bigint) FROM t -- !query schema -struct<(true <=> CAST(1 AS BIGINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true <=> cast(1 as bigint)" + } ] +} -- !query SELECT true <=> cast(1 as float) FROM t -- !query schema -struct<(true <=> CAST(1 AS FLOAT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(true <=> CAST(1 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "true <=> cast(1 as float)" + } ] +} -- !query SELECT true <=> cast(1 as double) FROM t -- !query schema -struct<(true <=> CAST(1 AS DOUBLE)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(true <=> CAST(1 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true <=> cast(1 as double)" + } ] +} -- !query SELECT true <=> cast(1 as decimal(10, 0)) FROM t -- !query schema -struct<(true <=> CAST(1 AS DECIMAL(10,0))):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(true <=> CAST(1 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 41, + "fragment" : "true <=> cast(1 as decimal(10, 0))" + } ] +} -- !query @@ -298,57 +522,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) = true FROM t -- !query schema -struct<(CAST(1 AS TINYINT) = true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS TINYINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(1 as tinyint) = true" + } ] +} -- !query SELECT cast(1 as smallint) = true FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) = true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS SMALLINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as smallint) = true" + } ] +} -- !query SELECT cast(1 as int) = true FROM t -- !query schema -struct<(CAST(1 AS INT) = true):boolean> --- !query output -true - - --- !query -SELECT cast(1 as bigint) = true FROM t --- !query schema -struct<(CAST(1 AS BIGINT) = true):boolean> +struct<> -- !query output -true - +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS INT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 28, + "fragment" : "cast(1 as int) = true" + } ] +} + + +-- !query +SELECT cast(1 as bigint) = true FROM t +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS BIGINT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(1 as bigint) = true" + } ] +} + -- !query SELECT cast(1 as float) = true FROM t -- !query schema -struct<(CAST(1 AS FLOAT) = true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS FLOAT) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "cast(1 as float) = true" + } ] +} -- !query SELECT cast(1 as double) = true FROM t -- !query schema -struct<(CAST(1 AS DOUBLE) = true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DOUBLE) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(1 as double) = true" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) = true FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(10,0)) = true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 39, + "fragment" : "cast(1 as decimal(10, 0)) = true" + } ] +} -- !query @@ -442,57 +778,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) <=> true FROM t -- !query schema -struct<(CAST(1 AS TINYINT) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS TINYINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(1 as tinyint) <=> true" + } ] +} -- !query SELECT cast(1 as smallint) <=> true FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS SMALLINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast(1 as smallint) <=> true" + } ] +} -- !query SELECT cast(1 as int) <=> true FROM t -- !query schema -struct<(CAST(1 AS INT) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS INT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "cast(1 as int) <=> true" + } ] +} -- !query SELECT cast(1 as bigint) <=> true FROM t -- !query schema -struct<(CAST(1 AS BIGINT) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS BIGINT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as bigint) <=> true" + } ] +} -- !query SELECT cast(1 as float) <=> true FROM t -- !query schema -struct<(CAST(1 AS FLOAT) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS FLOAT) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(1 as float) <=> true" + } ] +} -- !query SELECT cast(1 as double) <=> true FROM t -- !query schema -struct<(CAST(1 AS DOUBLE) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DOUBLE) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(1 as double) <=> true" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <=> true FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(10,0)) <=> true):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) <=> true)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 41, + "fragment" : "cast(1 as decimal(10, 0)) <=> true" + } ] +} -- !query @@ -586,57 +1034,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT false = cast(0 as tinyint) FROM t -- !query schema -struct<(false = CAST(0 AS TINYINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(false = CAST(0 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "false = cast(0 as tinyint)" + } ] +} -- !query SELECT false = cast(0 as smallint) FROM t -- !query schema -struct<(false = CAST(0 AS SMALLINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(false = CAST(0 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false = cast(0 as smallint)" + } ] +} -- !query SELECT false = cast(0 as int) FROM t -- !query schema -struct<(false = CAST(0 AS INT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(false = CAST(0 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "false = cast(0 as int)" + } ] +} -- !query SELECT false = cast(0 as bigint) FROM t -- !query schema -struct<(false = CAST(0 AS BIGINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(false = CAST(0 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "false = cast(0 as bigint)" + } ] +} -- !query SELECT false = cast(0 as float) FROM t -- !query schema -struct<(false = CAST(0 AS FLOAT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(false = CAST(0 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "false = cast(0 as float)" + } ] +} -- !query SELECT false = cast(0 as double) FROM t -- !query schema -struct<(false = CAST(0 AS DOUBLE)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(false = CAST(0 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "false = cast(0 as double)" + } ] +} -- !query SELECT false = cast(0 as decimal(10, 0)) FROM t -- !query schema -struct<(false = CAST(0 AS DECIMAL(10,0))):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(false = CAST(0 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "false = cast(0 as decimal(10, 0))" + } ] +} -- !query @@ -730,57 +1290,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT false <=> cast(0 as tinyint) FROM t -- !query schema -struct<(false <=> CAST(0 AS TINYINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"TINYINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS TINYINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "false <=> cast(0 as tinyint)" + } ] +} -- !query SELECT false <=> cast(0 as smallint) FROM t -- !query schema -struct<(false <=> CAST(0 AS SMALLINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"SMALLINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS SMALLINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "false <=> cast(0 as smallint)" + } ] +} -- !query SELECT false <=> cast(0 as int) FROM t -- !query schema -struct<(false <=> CAST(0 AS INT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS INT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "false <=> cast(0 as int)" + } ] +} -- !query SELECT false <=> cast(0 as bigint) FROM t -- !query schema -struct<(false <=> CAST(0 AS BIGINT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"BIGINT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS BIGINT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false <=> cast(0 as bigint)" + } ] +} -- !query SELECT false <=> cast(0 as float) FROM t -- !query schema -struct<(false <=> CAST(0 AS FLOAT)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"FLOAT\"", + "sqlExpr" : "\"(false <=> CAST(0 AS FLOAT))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "false <=> cast(0 as float)" + } ] +} -- !query SELECT false <=> cast(0 as double) FROM t -- !query schema -struct<(false <=> CAST(0 AS DOUBLE)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DOUBLE\"", + "sqlExpr" : "\"(false <=> CAST(0 AS DOUBLE))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false <=> cast(0 as double)" + } ] +} -- !query SELECT false <=> cast(0 as decimal(10, 0)) FROM t -- !query schema -struct<(false <=> CAST(0 AS DECIMAL(10,0))):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BOOLEAN\"", + "right" : "\"DECIMAL(10,0)\"", + "sqlExpr" : "\"(false <=> CAST(0 AS DECIMAL(10,0)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "false <=> cast(0 as decimal(10, 0))" + } ] +} -- !query @@ -874,57 +1546,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(0 as tinyint) = false FROM t -- !query schema -struct<(CAST(0 AS TINYINT) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS TINYINT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(0 as tinyint) = false" + } ] +} -- !query SELECT cast(0 as smallint) = false FROM t -- !query schema -struct<(CAST(0 AS SMALLINT) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS SMALLINT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as smallint) = false" + } ] +} -- !query SELECT cast(0 as int) = false FROM t -- !query schema -struct<(CAST(0 AS INT) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS INT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 29, + "fragment" : "cast(0 as int) = false" + } ] +} -- !query SELECT cast(0 as bigint) = false FROM t -- !query schema -struct<(CAST(0 AS BIGINT) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS BIGINT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(0 as bigint) = false" + } ] +} -- !query SELECT cast(0 as float) = false FROM t -- !query schema -struct<(CAST(0 AS FLOAT) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS FLOAT) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(0 as float) = false" + } ] +} -- !query SELECT cast(0 as double) = false FROM t -- !query schema -struct<(CAST(0 AS DOUBLE) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DOUBLE) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "cast(0 as double) = false" + } ] +} -- !query SELECT cast(0 as decimal(10, 0)) = false FROM t -- !query schema -struct<(CAST(0 AS DECIMAL(10,0)) = false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DECIMAL(10,0)) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 40, + "fragment" : "cast(0 as decimal(10, 0)) = false" + } ] +} -- !query @@ -1018,57 +1802,169 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(0 as tinyint) <=> false FROM t -- !query schema -struct<(CAST(0 AS TINYINT) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TINYINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS TINYINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast(0 as tinyint) <=> false" + } ] +} -- !query SELECT cast(0 as smallint) <=> false FROM t -- !query schema -struct<(CAST(0 AS SMALLINT) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"SMALLINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS SMALLINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "cast(0 as smallint) <=> false" + } ] +} -- !query SELECT cast(0 as int) <=> false FROM t -- !query schema -struct<(CAST(0 AS INT) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS INT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "cast(0 as int) <=> false" + } ] +} -- !query SELECT cast(0 as bigint) <=> false FROM t -- !query schema -struct<(CAST(0 AS BIGINT) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"BIGINT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS BIGINT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as bigint) <=> false" + } ] +} -- !query SELECT cast(0 as float) <=> false FROM t -- !query schema -struct<(CAST(0 AS FLOAT) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"FLOAT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS FLOAT) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast(0 as float) <=> false" + } ] +} -- !query SELECT cast(0 as double) <=> false FROM t -- !query schema -struct<(CAST(0 AS DOUBLE) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DOUBLE\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DOUBLE) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast(0 as double) <=> false" + } ] +} -- !query SELECT cast(0 as decimal(10, 0)) <=> false FROM t -- !query schema -struct<(CAST(0 AS DECIMAL(10,0)) <=> false):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(0 AS DECIMAL(10,0)) <=> false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "cast(0 as decimal(10, 0)) <=> false" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out index 7973d11573767..35ff9e79d9808 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out @@ -42,7 +42,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as float) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -66,7 +66,7 @@ struct +struct -- !query output 1 @@ -202,7 +202,7 @@ struct +struct -- !query output 1.0 @@ -226,7 +226,7 @@ struct +struct -- !query output 1 @@ -362,7 +362,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as float) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -386,7 +386,7 @@ struct +struct -- !query output 1 @@ -522,7 +522,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as float) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -546,7 +546,7 @@ struct +struct -- !query output 1 @@ -650,7 +650,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as tinyint) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -658,7 +658,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as smallint) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -666,7 +666,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as int) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -674,7 +674,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as bigint) END FROM t -- !query schema -struct +struct -- !query output 1.0 @@ -706,7 +706,7 @@ struct +struct -- !query output 1.0 @@ -866,7 +866,7 @@ struct +struct -- !query output 1.0 @@ -1026,9 +1026,9 @@ struct +struct -- !query output -1 +1.0 -- !query @@ -1130,7 +1130,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as tinyint) END FROM t -- !query schema -struct +struct -- !query output 1 @@ -1138,7 +1138,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as smallint) END FROM t -- !query schema -struct +struct -- !query output 1 @@ -1146,7 +1146,7 @@ struct +struct -- !query output 1 @@ -1154,7 +1154,7 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as bigint) END FROM t -- !query schema -struct +struct -- !query output 1 @@ -1162,25 +1162,25 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as float) END FROM t -- !query schema -struct +struct -- !query output -1 +1.0 -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as double) END FROM t -- !query schema -struct +struct -- !query output -1 +1.0 -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as decimal(10, 0)) END FROM t -- !query schema -struct +struct -- !query output -1 +1.0 -- !query @@ -1194,67 +1194,67 @@ struct -- !query SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t -- !query schema +struct +-- !query output +1 + + +-- !query +SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t +-- !query schema +struct +-- !query output +true + + +-- !query +SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00.0' as timestamp) END FROM t +-- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BINARY) END\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 73, - "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END" + "stopIndex" : 96, + "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00.0' as timestamp) END" } ] } -- !query -SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t +SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00' as date) END FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 72, - "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END" + "stopIndex" : 89, + "fragment" : "CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00' as date) END" } ] } --- !query -SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00.0' as timestamp) END FROM t --- !query schema -struct --- !query output -1 - - --- !query -SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2017-12-11 09:30:00' as date) END FROM t --- !query schema -struct --- !query output -1 - - -- !query SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as tinyint) END FROM t -- !query schema @@ -1426,25 +1426,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS BINARY) ELSE CAST(2 AS STRING) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 73, - "fragment" : "CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END" - } ] -} +1 -- !query @@ -1698,25 +1682,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`casewhen`", - "sqlExpr" : "\"CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 72, - "fragment" : "CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END" - } ] -} +true -- !query @@ -1970,7 +1938,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast(2 as string) END FROM t -- !query schema -struct +struct -- !query output 2017-12-12 09:30:00 @@ -2210,7 +2178,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as string) END FROM t -- !query schema -struct +struct -- !query output 2017-12-12 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/concat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/concat.sql.out index bb02058f6c4ad..0f42834d28246 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/concat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/concat.sql.out @@ -311,6 +311,22 @@ SELECT (string_array1 || int_array2) sti_array FROM various_arrays -- !query schema -struct,si_array:array,ib_array:array,bd_array:array,dd_array:array,df_array:array,std_array:array,tst_array:array,sti_array:array> +struct<> -- !query output -[2,1,3,4] [2,1,3,4] [2,1,3,4] [2,1,9223372036854775808,9223372036854775809] [9.223372036854776E18,9.223372036854776E18,3.0,4.0] [2.0,1.0,3.0,4.0] ["a","b","2016-03-12","2016-03-11"] ["2016-11-15 20:54:00","2016-11-12 20:54:00","c","d"] ["a","b","3","4"] +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 295, + "stopIndex" : 322, + "fragment" : "string_array1 || data_array2" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index b23e57c470fe4..54e26851ba57e 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -7834,33 +7834,97 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) = cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) = cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) = cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) = cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) = cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) = cast(1 as boolean)" + } ] +} -- !query @@ -8922,33 +8986,97 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) <=> cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(3, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) <=> cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(5, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <=> cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(10, 0)) <=> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) <=> cast(1 as boolean) FROM t -- !query schema -struct<(CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS BOOLEAN)):boolean> +struct<> -- !query output -true +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) <=> CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(20, 0)) <=> cast(1 as boolean)" + } ] +} -- !query @@ -14618,33 +14746,97 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as decimal(3, 0)) <> cast(1 as boolean) FROM t -- !query schema -struct<(NOT (CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(3,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(5, 0)) <> cast(1 as boolean) FROM t -- !query schema -struct<(NOT (CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(5,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(10, 0)) <> cast(1 as boolean) FROM t -- !query schema -struct<(NOT (CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(10,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) <> cast(1 as boolean)" + } ] +} -- !query SELECT cast(1 as decimal(20, 0)) <> cast(1 as boolean) FROM t -- !query schema -struct<(NOT (CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))):boolean> +struct<> -- !query output -false +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"DECIMAL(20,0)\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) = CAST(1 AS BOOLEAN))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) <> cast(1 as boolean)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index 5f16135fcaf4f..cf6931a4ffdaa 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -1186,9 +1186,25 @@ struct<(CAST(1 AS STRING) / CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT cast(1 as string) / cast(1 as string) FROM t -- !query schema -struct<(CAST(1 AS STRING) / CAST(1 AS STRING)):double> +struct<> -- !query output -1.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", + "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS STRING))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 44, + "fragment" : "cast(1 as string) / cast(1 as string)" + } ] +} -- !query @@ -1198,11 +1214,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -1222,11 +1238,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -1246,11 +1262,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -1270,11 +1286,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS STRING) / CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -1462,11 +1478,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -1750,11 +1766,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -2038,11 +2054,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) / CAST(1 AS STRING))\"" }, "queryContext" : [ { @@ -2326,11 +2342,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) / CAST(1 AS STRING))\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out index c0c3cefab8413..b2e0f50028a4e 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out @@ -42,7 +42,7 @@ struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS BIGINT))):bigint> -- !query SELECT IF(true, cast(1 as tinyint), cast(2 as float)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS FLOAT))):float> +struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS FLOAT))):double> -- !query output 1.0 @@ -66,7 +66,7 @@ struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS DECIMAL(10,0)))):decimal(10,0)> -- !query SELECT IF(true, cast(1 as tinyint), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS TINYINT), CAST(2 AS STRING))):bigint> -- !query output 1 @@ -202,7 +202,7 @@ struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BIGINT))):bigint> -- !query SELECT IF(true, cast(1 as smallint), cast(2 as float)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS FLOAT))):float> +struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS FLOAT))):double> -- !query output 1.0 @@ -226,7 +226,7 @@ struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS DECIMAL(10,0)))):decimal(10,0)> -- !query SELECT IF(true, cast(1 as smallint), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS SMALLINT), CAST(2 AS STRING))):bigint> -- !query output 1 @@ -362,7 +362,7 @@ struct<(IF(true, CAST(1 AS INT), CAST(2 AS BIGINT))):bigint> -- !query SELECT IF(true, cast(1 as int), cast(2 as float)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS INT), CAST(2 AS FLOAT))):float> +struct<(IF(true, CAST(1 AS INT), CAST(2 AS FLOAT))):double> -- !query output 1.0 @@ -386,7 +386,7 @@ struct<(IF(true, CAST(1 AS INT), CAST(2 AS DECIMAL(10,0)))):decimal(10,0)> -- !query SELECT IF(true, cast(1 as int), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS INT), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS INT), CAST(2 AS STRING))):bigint> -- !query output 1 @@ -522,7 +522,7 @@ struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS BIGINT))):bigint> -- !query SELECT IF(true, cast(1 as bigint), cast(2 as float)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS FLOAT))):float> +struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS FLOAT))):double> -- !query output 1.0 @@ -546,7 +546,7 @@ struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS DECIMAL(10,0)))):decimal(20,0)> -- !query SELECT IF(true, cast(1 as bigint), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS BIGINT), CAST(2 AS STRING))):bigint> -- !query output 1 @@ -650,7 +650,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as float), cast(2 as tinyint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS TINYINT))):float> +struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS TINYINT))):double> -- !query output 1.0 @@ -658,7 +658,7 @@ struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS TINYINT))):float> -- !query SELECT IF(true, cast(1 as float), cast(2 as smallint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS SMALLINT))):float> +struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS SMALLINT))):double> -- !query output 1.0 @@ -666,7 +666,7 @@ struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS SMALLINT))):float> -- !query SELECT IF(true, cast(1 as float), cast(2 as int)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS INT))):float> +struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS INT))):double> -- !query output 1.0 @@ -674,7 +674,7 @@ struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS INT))):float> -- !query SELECT IF(true, cast(1 as float), cast(2 as bigint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS BIGINT))):float> +struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS BIGINT))):double> -- !query output 1.0 @@ -706,7 +706,7 @@ struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS DECIMAL(10,0)))):double> -- !query SELECT IF(true, cast(1 as float), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS FLOAT), CAST(2 AS STRING))):double> -- !query output 1.0 @@ -866,7 +866,7 @@ struct<(IF(true, CAST(1 AS DOUBLE), CAST(2 AS DECIMAL(10,0)))):double> -- !query SELECT IF(true, cast(1 as double), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS DOUBLE), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS DOUBLE), CAST(2 AS STRING))):double> -- !query output 1.0 @@ -1026,9 +1026,9 @@ struct<(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS DECIMAL(10,0)))):decimal(10 -- !query SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS STRING))):double> -- !query output -1 +1.0 -- !query @@ -1130,7 +1130,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as string), cast(2 as tinyint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS TINYINT))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS TINYINT))):bigint> -- !query output 1 @@ -1138,7 +1138,7 @@ struct<(IF(true, CAST(1 AS STRING), CAST(2 AS TINYINT))):string> -- !query SELECT IF(true, cast(1 as string), cast(2 as smallint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS SMALLINT))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS SMALLINT))):bigint> -- !query output 1 @@ -1146,7 +1146,7 @@ struct<(IF(true, CAST(1 AS STRING), CAST(2 AS SMALLINT))):string> -- !query SELECT IF(true, cast(1 as string), cast(2 as int)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS INT))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS INT))):bigint> -- !query output 1 @@ -1154,7 +1154,7 @@ struct<(IF(true, CAST(1 AS STRING), CAST(2 AS INT))):string> -- !query SELECT IF(true, cast(1 as string), cast(2 as bigint)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS BIGINT))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS BIGINT))):bigint> -- !query output 1 @@ -1162,25 +1162,25 @@ struct<(IF(true, CAST(1 AS STRING), CAST(2 AS BIGINT))):string> -- !query SELECT IF(true, cast(1 as string), cast(2 as float)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS FLOAT))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS FLOAT))):double> -- !query output -1 +1.0 -- !query SELECT IF(true, cast(1 as string), cast(2 as double)) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS DOUBLE))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS DOUBLE))):double> -- !query output -1 +1.0 -- !query SELECT IF(true, cast(1 as string), cast(2 as decimal(10, 0))) FROM t -- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2 AS DECIMAL(10,0)))):string> +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS DECIMAL(10,0)))):double> -- !query output -1 +1.0 -- !query @@ -1194,67 +1194,67 @@ struct<(IF(true, CAST(1 AS STRING), CAST(2 AS STRING))):string> -- !query SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t -- !query schema +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS BINARY))):binary> +-- !query output +1 + + +-- !query +SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t +-- !query schema +struct<(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN))):boolean> +-- !query output +true + + +-- !query +SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t +-- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS STRING), CAST(2 AS BINARY)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "IF(true, cast(1 as string), cast('2' as binary))" + "stopIndex" : 78, + "fragment" : "IF(true, cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp))" } ] } -- !query -SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t +SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "IF(true, cast(1 as string), cast(2 as boolean))" + "stopIndex" : 71, + "fragment" : "IF(true, cast(1 as string), cast('2017-12-11 09:30:00' as date))" } ] } --- !query -SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t --- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))):string> --- !query output -1 - - --- !query -SELECT IF(true, cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t --- !query schema -struct<(IF(true, CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE))):string> --- !query output -1 - - -- !query SELECT IF(true, cast('1' as binary), cast(2 as tinyint)) FROM t -- !query schema @@ -1426,25 +1426,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t -- !query schema -struct<> +struct<(IF(true, CAST(1 AS BINARY), CAST(2 AS STRING))):binary> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS BINARY), CAST(2 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 55, - "fragment" : "IF(true, cast('1' as binary), cast(2 as string))" - } ] -} +1 -- !query @@ -1698,25 +1682,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t -- !query schema -struct<> +struct<(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING))):boolean> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`if`", - "sqlExpr" : "\"(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 8, - "stopIndex" : 54, - "fragment" : "IF(true, cast(1 as boolean), cast(2 as string))" - } ] -} +true -- !query @@ -1970,7 +1938,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(2017-12-12 09:30:00.0 AS TIMESTAMP), CAST(2 AS STRING))):timestamp> -- !query output 2017-12-12 09:30:00 @@ -2210,7 +2178,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as string)) FROM t -- !query schema -struct<(IF(true, CAST(2017-12-12 09:30:00 AS DATE), CAST(2 AS STRING))):string> +struct<(IF(true, CAST(2017-12-12 09:30:00 AS DATE), CAST(2 AS STRING))):date> -- !query output 2017-12-12 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out index a746500c746f9..bb75fe5991acf 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out @@ -10,25 +10,25 @@ struct<> -- !query SELECT 1 + '2' FROM t -- !query schema -struct<(1 + 2):double> +struct<(1 + 2):bigint> -- !query output -3.0 +3 -- !query SELECT 1 - '2' FROM t -- !query schema -struct<(1 - 2):double> +struct<(1 - 2):bigint> -- !query output --1.0 +-1 -- !query SELECT 1 * '2' FROM t -- !query schema -struct<(1 * 2):double> +struct<(1 * 2):bigint> -- !query output -2.0 +2 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out index 3ca78fa17a64b..7c9152a66a9c1 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out @@ -708,7 +708,7 @@ SELECT cast(1 as float) in (cast(1 as string)) FROM t -- !query schema struct<(CAST(1 AS FLOAT) IN (CAST(1 AS STRING))):boolean> -- !query output -false +true -- !query @@ -868,7 +868,7 @@ SELECT cast(1 as double) in (cast(1 as string)) FROM t -- !query schema struct<(CAST(1 AS DOUBLE) IN (CAST(1 AS STRING))):boolean> -- !query output -false +true -- !query @@ -1164,7 +1164,7 @@ SELECT cast(1 as string) in (cast(1 as float)) FROM t -- !query schema struct<(CAST(1 AS STRING) IN (CAST(1 AS FLOAT))):boolean> -- !query output -false +true -- !query @@ -1172,7 +1172,7 @@ SELECT cast(1 as string) in (cast(1 as double)) FROM t -- !query schema struct<(CAST(1 AS STRING) IN (CAST(1 AS DOUBLE))):boolean> -- !query output -false +true -- !query @@ -1194,67 +1194,67 @@ true -- !query SELECT cast(1 as string) in (cast('1' as binary)) FROM t -- !query schema +struct<(CAST(1 AS STRING) IN (CAST(1 AS BINARY))):boolean> +-- !query output +true + + +-- !query +SELECT cast(1 as string) in (cast(1 as boolean)) FROM t +-- !query schema +struct<(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN))):boolean> +-- !query output +true + + +-- !query +SELECT cast(1 as string) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t +-- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BINARY\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS BINARY)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 26, - "stopIndex" : 49, - "fragment" : "in (cast('1' as binary))" + "stopIndex" : 72, + "fragment" : "in (cast('2017-12-11 09:30:00.0' as timestamp))" } ] } -- !query -SELECT cast(1 as string) in (cast(1 as boolean)) FROM t +SELECT cast(1 as string) in (cast('2017-12-11 09:30:00' as date)) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"BOOLEAN\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 26, - "stopIndex" : 48, - "fragment" : "in (cast(1 as boolean))" + "stopIndex" : 65, + "fragment" : "in (cast('2017-12-11 09:30:00' as date))" } ] } --- !query -SELECT cast(1 as string) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t --- !query schema -struct<(CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))):boolean> --- !query output -false - - --- !query -SELECT cast(1 as string) in (cast('2017-12-11 09:30:00' as date)) FROM t --- !query schema -struct<(CAST(1 AS STRING) IN (CAST(2017-12-11 09:30:00 AS DATE))):boolean> --- !query output -false - - -- !query SELECT cast('1' as binary) in (cast(1 as tinyint)) FROM t -- !query schema @@ -1426,25 +1426,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) in (cast(1 as string)) FROM t -- !query schema -struct<> +struct<(CAST(1 AS BINARY) IN (CAST(1 AS STRING))):boolean> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BINARY) IN (CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 28, - "stopIndex" : 49, - "fragment" : "in (cast(1 as string))" - } ] -} +true -- !query @@ -1698,25 +1682,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT true in (cast(1 as string)) FROM t -- !query schema -struct<> +struct<(true IN (CAST(1 AS STRING))):boolean> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(true IN (CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 13, - "stopIndex" : 34, - "fragment" : "in (cast(1 as string))" - } ] -} +true -- !query @@ -1970,9 +1938,25 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as string)) FROM t -- !query schema -struct<(CAST(2017-12-12 09:30:00.0 AS TIMESTAMP) IN (CAST(2 AS STRING))):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 51, + "stopIndex" : 72, + "fragment" : "in (cast(2 as string))" + } ] +} -- !query @@ -2210,9 +2194,25 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as string)) FROM t -- !query schema -struct<(CAST(2017-12-12 09:30:00 AS DATE) IN (CAST(2 AS STRING))):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 44, + "stopIndex" : 65, + "fragment" : "in (cast(2 as string))" + } ] +} -- !query @@ -3466,67 +3466,67 @@ true -- !query SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t -- !query schema +struct<(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BINARY))):boolean> +-- !query output +true + + +-- !query +SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t +-- !query schema +struct<(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN))):boolean> +-- !query output +true + + +-- !query +SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t +-- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"STRING\", \"BINARY\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BINARY)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 26, - "stopIndex" : 68, - "fragment" : "in (cast(1 as string), cast('1' as binary))" + "stopIndex" : 91, + "fragment" : "in (cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp))" } ] } -- !query -SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t +SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "dataType" : "[\"STRING\", \"STRING\", \"BOOLEAN\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 26, - "stopIndex" : 67, - "fragment" : "in (cast(1 as string), cast(1 as boolean))" + "stopIndex" : 84, + "fragment" : "in (cast(1 as string), cast('2017-12-11 09:30:00' as date))" } ] } --- !query -SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t --- !query schema -struct<(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))):boolean> --- !query output -true - - --- !query -SELECT cast(1 as string) in (cast(1 as string), cast('2017-12-11 09:30:00' as date)) FROM t --- !query schema -struct<(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(2017-12-11 09:30:00 AS DATE))):boolean> --- !query output -true - - -- !query SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as tinyint)) FROM t -- !query schema @@ -3698,25 +3698,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t -- !query schema -struct<> +struct<(CAST(1 AS BINARY) IN (CAST(1 AS BINARY), CAST(1 AS STRING))):boolean> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BINARY\", \"BINARY\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BINARY) IN (CAST(1 AS BINARY), CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 28, - "stopIndex" : 70, - "fragment" : "in (cast('1' as binary), cast(1 as string))" - } ] -} +true -- !query @@ -3970,25 +3954,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t -- !query schema -struct<> +struct<(CAST(1 AS BOOLEAN) IN (CAST(1 AS BOOLEAN), CAST(1 AS STRING))):boolean> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", - "sqlState" : "42K09", - "messageParameters" : { - "dataType" : "[\"BOOLEAN\", \"BOOLEAN\", \"STRING\"]", - "functionName" : "`in`", - "sqlExpr" : "\"(CAST(1 AS BOOLEAN) IN (CAST(1 AS BOOLEAN), CAST(1 AS STRING)))\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 29, - "stopIndex" : 72, - "fragment" : "in (cast('1' as boolean), cast(1 as string))" - } ] -} +true -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out index 5c00e8a5b63db..a0bd111f6ba64 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out @@ -124,36 +124,104 @@ struct>> SELECT map_zip_with(string_map1, int_map, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query schema -struct>> +struct<> -- !query output -{"2":{"k":"2","v1":"1","v2":1},"true":{"k":"true","v1":"false","v2":null}} +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"STRING\"", + "rightType" : "\"INT\"", + "sqlExpr" : "\"map_zip_with(string_map1, int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "map_zip_with(string_map1, int_map, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(string_map2, date_map, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query schema -struct>> +struct<> -- !query output -{"2016-03-14":{"k":"2016-03-14","v1":"2016-03-13","v2":2016-03-13}} +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"STRING\"", + "rightType" : "\"DATE\"", + "sqlExpr" : "\"map_zip_with(string_map2, date_map, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "map_zip_with(string_map2, date_map, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(timestamp_map, string_map3, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query schema -struct>> +struct<> -- !query output -{"2016-11-15 20:54:00":{"k":"2016-11-15 20:54:00","v1":2016-11-12 20:54:00,"v2":null},"2016-11-15 20:54:00.000":{"k":"2016-11-15 20:54:00.000","v1":null,"v2":"2016-11-12 20:54:00.000"}} +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"TIMESTAMP\"", + "rightType" : "\"STRING\"", + "sqlExpr" : "\"map_zip_with(timestamp_map, string_map3, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 81, + "fragment" : "map_zip_with(timestamp_map, string_map3, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query SELECT map_zip_with(decimal_map1, string_map4, (k, v1, v2) -> struct(k, v1, v2)) m FROM various_maps -- !query schema -struct>> +struct<> -- !query output -{"922337203685477897945456575809789456":{"k":"922337203685477897945456575809789456","v1":922337203685477897945456575809789456,"v2":"text"}} +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.MAP_ZIP_WITH_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "functionName" : "`map_zip_with`", + "leftType" : "\"DECIMAL(36,0)\"", + "rightType" : "\"STRING\"", + "sqlExpr" : "\"map_zip_with(decimal_map1, string_map4, lambdafunction(struct(k, v1, v2), k, v1, v2))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 80, + "fragment" : "map_zip_with(decimal_map1, string_map4, (k, v1, v2) -> struct(k, v1, v2))" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index 8fd398ff87f0b..893e9b511986b 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -78,9 +78,25 @@ SELECT map_concat(int_string_map1, tinyint_map2) istt_map FROM various_maps -- !query schema -struct,si_map:map,ib_map:map,bd_map:map,df_map:map,std_map:map,tst_map:map,sti_map:map,istt_map:map> +struct<> -- !query output -{1:2,3:4} {1:2,7:8} {4:6,8:9} {6:7,9223372036854775808:9223372036854775809} {3.0:4.0,9.223372036854776E18:9.223372036854776E18} {"2016-03-12":"2016-03-11","a":"b"} {"2016-11-15 20:54:00":"2016-11-12 20:54:00","c":"d"} {"7":"8","a":"b"} {1:"a",3:"4"} +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "dataType" : "(\"MAP\" or \"MAP\")", + "functionName" : "`map_concat`", + "sqlExpr" : "\"map_concat(string_map1, date_map2)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 257, + "stopIndex" : 290, + "fragment" : "map_concat(string_map1, date_map2)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 5005d682e1927..a97abd5dd181b 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -10,33 +10,33 @@ struct<> -- !query SELECT '1' + cast(1 as tinyint) FROM t -- !query schema -struct<(1 + CAST(1 AS TINYINT)):double> +struct<(1 + CAST(1 AS TINYINT)):bigint> -- !query output -2.0 +2 -- !query SELECT '1' + cast(1 as smallint) FROM t -- !query schema -struct<(1 + CAST(1 AS SMALLINT)):double> +struct<(1 + CAST(1 AS SMALLINT)):bigint> -- !query output -2.0 +2 -- !query SELECT '1' + cast(1 as int) FROM t -- !query schema -struct<(1 + CAST(1 AS INT)):double> +struct<(1 + CAST(1 AS INT)):bigint> -- !query output -2.0 +2 -- !query SELECT '1' + cast(1 as bigint) FROM t -- !query schema -struct<(1 + CAST(1 AS BIGINT)):double> +struct<(1 + CAST(1 AS BIGINT)):bigint> -- !query output -2.0 +2 -- !query @@ -66,9 +66,25 @@ struct<(1 + CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT '1' + '1' FROM t -- !query schema -struct<(1 + 1):double> +struct<> -- !query output -2.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "sqlExpr" : "\"(1 + 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' + '1'" + } ] +} -- !query @@ -78,11 +94,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -102,11 +118,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -126,11 +142,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 + CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -153,11 +169,11 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputSql" : "\"CAST(2017-12-11 09:30:00 AS DATE)\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" + "sqlExpr" : "\"date_add(1, CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { "objectType" : "", @@ -172,33 +188,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' - cast(1 as tinyint) FROM t -- !query schema -struct<(1 - CAST(1 AS TINYINT)):double> +struct<(1 - CAST(1 AS TINYINT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' - cast(1 as smallint) FROM t -- !query schema -struct<(1 - CAST(1 AS SMALLINT)):double> +struct<(1 - CAST(1 AS SMALLINT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' - cast(1 as int) FROM t -- !query schema -struct<(1 - CAST(1 AS INT)):double> +struct<(1 - CAST(1 AS INT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' - cast(1 as bigint) FROM t -- !query schema -struct<(1 - CAST(1 AS BIGINT)):double> +struct<(1 - CAST(1 AS BIGINT)):bigint> -- !query output -0.0 +0 -- !query @@ -228,9 +244,25 @@ struct<(1 - CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT '1' - '1' FROM t -- !query schema -struct<(1 - 1):double> +struct<> -- !query output -0.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", + "sqlExpr" : "\"(1 - 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' - '1'" + } ] +} -- !query @@ -240,11 +272,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 - CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -264,11 +296,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(1 - CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -286,16 +318,14 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(1 - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", @@ -310,41 +340,57 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' - cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 - CAST(2017-12-11 09:30:00 AS DATE)):interval day> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' - cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query SELECT '1' * cast(1 as tinyint) FROM t -- !query schema -struct<(1 * CAST(1 AS TINYINT)):double> +struct<(1 * CAST(1 AS TINYINT)):bigint> -- !query output -1.0 +1 -- !query SELECT '1' * cast(1 as smallint) FROM t -- !query schema -struct<(1 * CAST(1 AS SMALLINT)):double> +struct<(1 * CAST(1 AS SMALLINT)):bigint> -- !query output -1.0 +1 -- !query SELECT '1' * cast(1 as int) FROM t -- !query schema -struct<(1 * CAST(1 AS INT)):double> +struct<(1 * CAST(1 AS INT)):bigint> -- !query output -1.0 +1 -- !query SELECT '1' * cast(1 as bigint) FROM t -- !query schema -struct<(1 * CAST(1 AS BIGINT)):double> +struct<(1 * CAST(1 AS BIGINT)):bigint> -- !query output -1.0 +1 -- !query @@ -374,9 +420,25 @@ struct<(1 * CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT '1' * '1' FROM t -- !query schema -struct<(1 * 1):double> +struct<> -- !query output -1.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"(1 * 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' * '1'" + } ] +} -- !query @@ -386,11 +448,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -410,11 +472,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -434,11 +496,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -458,11 +520,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 * CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -534,9 +596,25 @@ struct<(1 / CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT '1' / '1' FROM t -- !query schema -struct<(1 / 1):double> +struct<> -- !query output -1.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", + "sqlExpr" : "\"(1 / 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' / '1'" + } ] +} -- !query @@ -546,11 +624,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -570,11 +648,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -594,11 +672,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -618,11 +696,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(1 / CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -638,33 +716,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT '1' % cast(1 as tinyint) FROM t -- !query schema -struct<(1 % CAST(1 AS TINYINT)):double> +struct<(1 % CAST(1 AS TINYINT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' % cast(1 as smallint) FROM t -- !query schema -struct<(1 % CAST(1 AS SMALLINT)):double> +struct<(1 % CAST(1 AS SMALLINT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' % cast(1 as int) FROM t -- !query schema -struct<(1 % CAST(1 AS INT)):double> +struct<(1 % CAST(1 AS INT)):bigint> -- !query output -0.0 +0 -- !query SELECT '1' % cast(1 as bigint) FROM t -- !query schema -struct<(1 % CAST(1 AS BIGINT)):double> +struct<(1 % CAST(1 AS BIGINT)):bigint> -- !query output -0.0 +0 -- !query @@ -694,9 +772,25 @@ struct<(1 % CAST(1 AS DECIMAL(10,0))):double> -- !query SELECT '1' % '1' FROM t -- !query schema -struct<(1 % 1):double> +struct<> -- !query output -0.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"(1 % 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "'1' % '1'" + } ] +} -- !query @@ -706,11 +800,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -730,11 +824,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -754,11 +848,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -778,11 +872,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(1 % CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -798,33 +892,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT pmod('1', cast(1 as tinyint)) FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod('1', cast(1 as smallint)) FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod('1', cast(1 as int)) FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod('1', cast(1 as bigint)) FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query @@ -854,9 +948,25 @@ struct -- !query SELECT pmod('1', '1') FROM t -- !query schema -struct +struct<> -- !query output -0.0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", + "sqlState" : "42K09", + "messageParameters" : { + "actualDataType" : "\"STRING\"", + "inputType" : "\"NUMERIC\"", + "sqlExpr" : "\"pmod(1, 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 21, + "fragment" : "pmod('1', '1')" + } ] +} -- !query @@ -866,11 +976,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BINARY\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(1 AS BINARY))\"" }, "queryContext" : [ { @@ -890,11 +1000,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"BOOLEAN\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(1 AS BOOLEAN))\"" }, "queryContext" : [ { @@ -914,11 +1024,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"TIMESTAMP\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, "queryContext" : [ { @@ -938,11 +1048,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DOUBLE\"", - "right" : "\"DATE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(1, CAST(2017-12-11 09:30:00 AS DATE))\"" }, "queryContext" : [ { @@ -958,33 +1068,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) + '1' FROM t -- !query schema -struct<(CAST(1 AS TINYINT) + 1):double> +struct<(CAST(1 AS TINYINT) + 1):bigint> -- !query output -2.0 +2 -- !query SELECT cast(1 as smallint) + '1' FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) + 1):double> +struct<(CAST(1 AS SMALLINT) + 1):bigint> -- !query output -2.0 +2 -- !query SELECT cast(1 as int) + '1' FROM t -- !query schema -struct<(CAST(1 AS INT) + 1):double> +struct<(CAST(1 AS INT) + 1):bigint> -- !query output -2.0 +2 -- !query SELECT cast(1 as bigint) + '1' FROM t -- !query schema -struct<(CAST(1 AS BIGINT) + 1):double> +struct<(CAST(1 AS BIGINT) + 1):bigint> -- !query output -2.0 +2 -- !query @@ -1018,11 +1128,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) + 1)\"" }, "queryContext" : [ { @@ -1042,11 +1152,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) + 1)\"" }, "queryContext" : [ { @@ -1066,11 +1176,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + 1)\"" }, "queryContext" : [ { @@ -1094,7 +1204,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "sqlState" : "42K09", "messageParameters" : { "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", + "inputType" : "\"DATE\"", "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" @@ -1112,33 +1222,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) - '1' FROM t -- !query schema -struct<(CAST(1 AS TINYINT) - 1):double> +struct<(CAST(1 AS TINYINT) - 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as smallint) - '1' FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) - 1):double> +struct<(CAST(1 AS SMALLINT) - 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as int) - '1' FROM t -- !query schema -struct<(CAST(1 AS INT) - 1):double> +struct<(CAST(1 AS INT) - 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as bigint) - '1' FROM t -- !query schema -struct<(CAST(1 AS BIGINT) - 1):double> +struct<(CAST(1 AS BIGINT) - 1):bigint> -- !query output -0.0 +0 -- !query @@ -1172,11 +1282,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) - 1)\"" }, "queryContext" : [ { @@ -1196,11 +1306,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) - 1)\"" }, "queryContext" : [ { @@ -1218,16 +1328,14 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"STRING\"", - "paramIndex" : "second", - "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", - "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 1)\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", @@ -1244,16 +1352,14 @@ SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "inputSql" : "\"1\"", - "inputType" : "\"DOUBLE\"", - "paramIndex" : "second", - "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", - "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", @@ -1268,33 +1374,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) * '1' FROM t -- !query schema -struct<(CAST(1 AS TINYINT) * 1):double> +struct<(CAST(1 AS TINYINT) * 1):bigint> -- !query output -1.0 +1 -- !query SELECT cast(1 as smallint) * '1' FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) * 1):double> +struct<(CAST(1 AS SMALLINT) * 1):bigint> -- !query output -1.0 +1 -- !query SELECT cast(1 as int) * '1' FROM t -- !query schema -struct<(CAST(1 AS INT) * 1):double> +struct<(CAST(1 AS INT) * 1):bigint> -- !query output -1.0 +1 -- !query SELECT cast(1 as bigint) * '1' FROM t -- !query schema -struct<(CAST(1 AS BIGINT) * 1):double> +struct<(CAST(1 AS BIGINT) * 1):bigint> -- !query output -1.0 +1 -- !query @@ -1328,11 +1434,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BINARY) * 1)\"" }, "queryContext" : [ { @@ -1352,11 +1458,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) * 1)\"" }, "queryContext" : [ { @@ -1376,11 +1482,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) * 1)\"" }, "queryContext" : [ { @@ -1400,11 +1506,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) * 1)\"" }, "queryContext" : [ { @@ -1480,11 +1586,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BINARY) / 1)\"" }, "queryContext" : [ { @@ -1504,11 +1610,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) / 1)\"" }, "queryContext" : [ { @@ -1528,11 +1634,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) / 1)\"" }, "queryContext" : [ { @@ -1552,11 +1658,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "(\"DOUBLE\" or \"DECIMAL\")", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) / 1)\"" }, "queryContext" : [ { @@ -1572,33 +1678,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as tinyint) % '1' FROM t -- !query schema -struct<(CAST(1 AS TINYINT) % 1):double> +struct<(CAST(1 AS TINYINT) % 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as smallint) % '1' FROM t -- !query schema -struct<(CAST(1 AS SMALLINT) % 1):double> +struct<(CAST(1 AS SMALLINT) % 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as int) % '1' FROM t -- !query schema -struct<(CAST(1 AS INT) % 1):double> +struct<(CAST(1 AS INT) % 1):bigint> -- !query output -0.0 +0 -- !query SELECT cast(1 as bigint) % '1' FROM t -- !query schema -struct<(CAST(1 AS BIGINT) % 1):double> +struct<(CAST(1 AS BIGINT) % 1):bigint> -- !query output -0.0 +0 -- !query @@ -1632,11 +1738,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BINARY) % 1)\"" }, "queryContext" : [ { @@ -1656,11 +1762,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(1 AS BOOLEAN) % 1)\"" }, "queryContext" : [ { @@ -1680,11 +1786,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) % 1)\"" }, "queryContext" : [ { @@ -1704,11 +1810,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00 AS DATE) % 1)\"" }, "queryContext" : [ { @@ -1724,33 +1830,33 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT pmod(cast(1 as tinyint), '1') FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod(cast(1 as smallint), '1') FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod(cast(1 as int), '1') FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query SELECT pmod(cast(1 as bigint), '1') FROM t -- !query schema -struct +struct -- !query output -0.0 +0 -- !query @@ -1784,11 +1890,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BINARY\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BINARY\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(1 AS BINARY), 1)\"" }, "queryContext" : [ { @@ -1808,11 +1914,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"BOOLEAN\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"BOOLEAN\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(1 AS BOOLEAN), 1)\"" }, "queryContext" : [ { @@ -1832,11 +1938,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"TIMESTAMP\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"TIMESTAMP\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP), 1)\"" }, "queryContext" : [ { @@ -1856,11 +1962,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_WRONG_TYPE", "sqlState" : "42K09", "messageParameters" : { - "left" : "\"DATE\"", - "right" : "\"DOUBLE\"", + "actualDataType" : "\"DATE\"", + "inputType" : "\"NUMERIC\"", "sqlExpr" : "\"pmod(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, "queryContext" : [ { @@ -1956,17 +2062,49 @@ true -- !query SELECT '1' = cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 = CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' = cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' = cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 = CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' = cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2044,17 +2182,49 @@ true -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) = '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) = 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) = '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) = 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) = '1'" + } ] +} -- !query @@ -2140,17 +2310,49 @@ true -- !query SELECT '1' <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 <=> CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "'1' <=> cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' <=> cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 <=> CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 50, + "fragment" : "'1' <=> cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2228,17 +2430,49 @@ true -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) <=> 1):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) <=> '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) <=> 1):boolean> +struct<> -- !query output -false +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> '1'" + } ] +} -- !query @@ -2324,17 +2558,49 @@ false -- !query SELECT '1' < cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 < CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' < cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' < cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 < CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' < cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2420,17 +2686,49 @@ true -- !query SELECT '1' <= cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 <= CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "'1' <= cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' <= cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 <= CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "'1' <= cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2516,17 +2814,49 @@ false -- !query SELECT '1' > cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 > CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' > cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' > cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 > CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' > cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2612,17 +2942,49 @@ true -- !query SELECT '1' >= cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(1 >= CAST(2017-12-11 09:30:00.0 AS TIMESTAMP)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "'1' >= cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' >= cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(1 >= CAST(2017-12-11 09:30:00 AS DATE)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "'1' >= cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2708,17 +3070,49 @@ false -- !query SELECT '1' <> cast('2017-12-11 09:30:00.0' as timestamp) FROM t -- !query schema -struct<(NOT (1 = CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "'1' <> cast('2017-12-11 09:30:00.0' as timestamp)" + } ] +} -- !query SELECT '1' <> cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<(NOT (1 = CAST(2017-12-11 09:30:00 AS DATE))):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "'1' <> cast('2017-12-11 09:30:00' as date)" + } ] +} -- !query @@ -2804,17 +3198,49 @@ false -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) < '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) < 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) < '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) < '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) < 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) < '1'" + } ] +} -- !query @@ -2900,17 +3326,49 @@ true -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) <= 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <= '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) <= '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) <= 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00' as date) <= '1'" + } ] +} -- !query @@ -2996,17 +3454,49 @@ false -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) > '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) > 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) > '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) > '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) > 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) > '1'" + } ] +} -- !query @@ -3092,17 +3582,49 @@ true -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) >= 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) >= '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) >= '1' FROM t -- !query schema -struct<(CAST(2017-12-11 09:30:00 AS DATE) >= 1):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00' as date) >= '1'" + } ] +} -- !query @@ -3188,17 +3710,49 @@ false -- !query SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> '1' FROM t -- !query schema -struct<(NOT (CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) = 1)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <> '1'" + } ] +} -- !query SELECT cast('2017-12-11 09:30:00' as date) <> '1' FROM t -- !query schema -struct<(NOT (CAST(2017-12-11 09:30:00 AS DATE) = 1)):boolean> +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00' as date) <> '1'" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index 08b70ff920eb6..781fff4835c51 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -10,81 +10,241 @@ struct<> -- !query select cast(a as byte) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TINYINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 22, + "fragment" : "cast(a as byte)" + } ] +} -- !query select cast(a as short) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"SMALLINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "cast(a as short)" + } ] +} -- !query select cast(a as int) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 21, + "fragment" : "cast(a as int)" + } ] +} -- !query select cast(a as long) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 22, + "fragment" : "cast(a as long)" + } ] +} -- !query select cast(a as float) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"FLOAT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "cast(a as float)" + } ] +} -- !query select cast(a as double) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DOUBLE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "cast(a as double)" + } ] +} -- !query select cast(a as decimal) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DECIMAL(10,0)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "cast(a as decimal)" + } ] +} -- !query select cast(a as boolean) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkRuntimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BOOLEAN\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "cast(a as boolean)" + } ] +} -- !query select cast(a as timestamp) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 27, + "fragment" : "cast(a as timestamp)" + } ] +} -- !query select cast(a as date) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 22, + "fragment" : "cast(a as date)" + } ] +} -- !query @@ -170,9 +330,23 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query select to_timestamp(a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "fragment" : "" + } ] +} -- !query @@ -195,9 +369,17 @@ org.apache.spark.SparkUpgradeException -- !query select to_unix_timestamp(a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'aa' could not be parsed at index 0" + } +} -- !query @@ -220,9 +402,17 @@ org.apache.spark.SparkUpgradeException -- !query select unix_timestamp(a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CANNOT_PARSE_TIMESTAMP", + "sqlState" : "22007", + "messageParameters" : { + "ansiConfig" : "\"spark.sql.ansi.enabled\"", + "message" : "Text 'aa' could not be parsed at index 0" + } +} -- !query @@ -245,41 +435,112 @@ org.apache.spark.SparkUpgradeException -- !query select from_unixtime(a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 23, + "fragment" : "from_unixtime(a)" + } ] +} -- !query select from_unixtime('2018-01-01', a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2018-01-01'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 37, + "fragment" : "from_unixtime('2018-01-01', a)" + } ] +} -- !query select next_day(a, 'MO') from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 24, + "fragment" : "next_day(a, 'MO')" + } ] +} -- !query select next_day('2018-01-01', a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkIllegalArgumentException +{ + "errorClass" : "ILLEGAL_DAY_OF_WEEK", + "sqlState" : "22009", + "messageParameters" : { + "string" : "aa" + } +} -- !query select trunc(a, 'MM') from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 21, + "fragment" : "trunc(a, 'MM')" + } ] +} -- !query @@ -301,9 +562,25 @@ NULL -- !query select sha2(a, a) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'aa'", + "sourceType" : "\"STRING\"", + "targetType" : "\"INT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 17, + "fragment" : "sha2(a, a)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out index 79184489758bf..678d00a39b2d5 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -46,7 +46,7 @@ struct -- !query SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as float) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -73,7 +73,7 @@ struct -- !query SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -226,7 +226,7 @@ struct -- !query SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as float) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -253,7 +253,7 @@ struct -- !query SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -406,7 +406,7 @@ struct -- !query SELECT cast(1 as int) FROM t UNION SELECT cast(2 as float) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -433,7 +433,7 @@ struct -- !query SELECT cast(1 as int) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -586,7 +586,7 @@ struct -- !query SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as float) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -613,7 +613,7 @@ struct -- !query SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -730,7 +730,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as float) FROM t UNION SELECT cast(2 as tinyint) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -739,7 +739,7 @@ struct -- !query SELECT cast(1 as float) FROM t UNION SELECT cast(2 as smallint) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -748,7 +748,7 @@ struct -- !query SELECT cast(1 as float) FROM t UNION SELECT cast(2 as int) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -757,7 +757,7 @@ struct -- !query SELECT cast(1 as float) FROM t UNION SELECT cast(2 as bigint) FROM t -- !query schema -struct +struct -- !query output 1.0 2.0 @@ -793,10 +793,10 @@ struct -- !query SELECT cast(1 as float) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1.0 -2 +2.0 -- !query @@ -973,10 +973,10 @@ struct -- !query SELECT cast(1 as double) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output 1.0 -2 +2.0 -- !query @@ -1153,10 +1153,10 @@ struct -- !query SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct -- !query output -1 -2 +1.0 +2.0 -- !query @@ -1270,7 +1270,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as tinyint) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -1279,7 +1279,7 @@ struct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as smallint) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -1288,7 +1288,7 @@ struct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as int) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -1297,7 +1297,7 @@ struct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as bigint) FROM t -- !query schema -struct +struct -- !query output 1 2 @@ -1306,28 +1306,28 @@ struct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as float) FROM t -- !query schema -struct +struct -- !query output -1 +1.0 2.0 -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as double) FROM t -- !query schema -struct +struct -- !query output -1 +1.0 2.0 -- !query SELECT cast(1 as string) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t -- !query schema -struct +struct -- !query output -1 -2 +1.0 +2.0 -- !query @@ -1342,75 +1342,68 @@ struct -- !query SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t -- !query schema +struct +-- !query output +1 +2 + + +-- !query +SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t +-- !query schema +struct +-- !query output +true + + +-- !query +SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timestamp) FROM t +-- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"BINARY\"", - "dataType2" : "\"STRING\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t" + "stopIndex" : 94, + "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timestamp) FROM t" } ] } -- !query -SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t +SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkDateTimeException { - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"BOOLEAN\"", - "dataType2" : "\"STRING\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" + "expression" : "'1'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t" + "stopIndex" : 87, + "fragment" : "SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FROM t" } ] } --- !query -SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timestamp) FROM t --- !query schema -struct --- !query output -1 -2017-12-11 09:30:00 - - --- !query -SELECT cast(1 as string) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FROM t --- !query schema -struct --- !query output -1 -2017-12-11 - - -- !query SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as tinyint) FROM t -- !query schema @@ -1603,28 +1596,10 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", - "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"STRING\"", - "dataType2" : "\"BINARY\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t" - } ] -} +1 +2 -- !query @@ -1911,17 +1886,14 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException +org.apache.spark.SparkRuntimeException { - "errorClass" : "INCOMPATIBLE_COLUMN_TYPE", - "sqlState" : "42825", + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", "messageParameters" : { - "columnOrdinalNumber" : "first", - "dataType1" : "\"STRING\"", - "dataType2" : "\"BOOLEAN\"", - "hint" : "", - "operator" : "UNION", - "tableOrdinalNumber" : "second" + "expression" : "'2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BOOLEAN\"" }, "queryContext" : [ { "objectType" : "", @@ -2214,10 +2186,25 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct<> -- !query output -2 -2017-12-12 09:30:00 +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"TIMESTAMP\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 94, + "fragment" : "SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as string) FROM t" + } ] +} -- !query @@ -2484,10 +2471,25 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as string) FROM t -- !query schema -struct +struct<> -- !query output -2 -2017-12-12 +org.apache.spark.SparkDateTimeException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'2'", + "sourceType" : "\"STRING\"", + "targetType" : "\"DATE\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 87, + "fragment" : "SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as string) FROM t" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out index f6d1120c75ff2..67645cfb732f5 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out @@ -191,13 +191,12 @@ struct<> -- !query output org.apache.spark.sql.catalyst.ExtendedAnalysisException { - "errorClass" : "DATATYPE_MISMATCH.SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE", + "errorClass" : "DATATYPE_MISMATCH.RANGE_FRAME_INVALID_TYPE", "sqlState" : "42K09", "messageParameters" : { - "expectedType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", - "exprType" : "\"BINARY\"", - "location" : "upper", - "sqlExpr" : "\"RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING\"" + "orderSpecType" : "\"BINARY\"", + "sqlExpr" : "\"(PARTITION BY 1 ORDER BY CAST(1 AS BINARY) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)\"", + "valueBoundaryType" : "\"INT\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out index 1caeac58ab0ba..ad12f8bd03fd0 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf/udaf-group-by.sql.out @@ -397,9 +397,25 @@ org.apache.spark.sql.AnalysisException -- !query SELECT k, udaf(v) FROM test_agg GROUP BY k HAVING udaf(v) = false -- !query schema -struct +struct<> -- !query output -4 0 +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"INT\"", + "right" : "\"BOOLEAN\"", + "sqlExpr" : "\"(udaf(v) = false)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 51, + "stopIndex" : 65, + "fragment" : "udaf(v) = false" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index ce70c91d8d06e..6a70c8b96841d 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -520,27 +520,9 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query SELECT udf(every("true")) -- !query schema -struct<> +struct -- !query output -org.apache.spark.sql.catalyst.ExtendedAnalysisException -{ - "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", - "sqlState" : "42K09", - "messageParameters" : { - "inputSql" : "\"true\"", - "inputType" : "\"STRING\"", - "paramIndex" : "first", - "requiredType" : "\"BOOLEAN\"", - "sqlExpr" : "\"every(true)\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 12, - "stopIndex" : 24, - "fragment" : "every(\"true\")" - } ] -} +true -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-union.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-union.sql.out index a355bdb16580a..7f2931c3ade0a 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-union.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-union.sql.out @@ -31,20 +31,31 @@ struct -- !query SELECT udf(c1) as c1, udf(c2) as c2 -FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 +FROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 WHERE c2 = 'a' UNION ALL SELECT udf(c1) as c1, udf(c2) as c2 FROM t2 UNION ALL SELECT udf(c1) as c1, udf(c2) as c2 FROM t2) -- !query schema -struct +struct<> -- !query output -1.0 1 -1.0 1 -1.0 a -2.0 4 -2.0 4 -2.0 b +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 243, + "fragment" : "SELECT udf(c1) as c1, udf(c2) as c2\nFROM (SELECT udf(c1) as c1, udf(c2) as c2 FROM t1 WHERE c2 = 'a'\n UNION ALL\n SELECT udf(c1) as c1, udf(c2) as c2 FROM t2\n UNION ALL\n SELECT udf(c1) as c1, udf(c2) as c2 FROM t2)" + } ] +} -- !query @@ -106,10 +117,25 @@ SELECT map(1, 2), udf('str') as str UNION ALL SELECT map(1, 2, 3, NULL), udf(1) -- !query schema -struct,str:string> +struct<> -- !query output -{1:2,3:null} 1 -{1:2} str +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'str'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 79, + "fragment" : "SELECT map(1, 2), udf('str') as str\nUNION ALL\nSELECT map(1, 2, 3, NULL), udf(1)" + } ] +} -- !query @@ -117,10 +143,25 @@ SELECT array(1, 2), udf('str') as str UNION ALL SELECT array(1, 2, 3, NULL), udf(1) -- !query schema -struct,str:string> +struct<> -- !query output -[1,2,3,null] 1 -[1,2] str +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'str'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 83, + "fragment" : "SELECT array(1, 2), udf('str') as str\nUNION ALL\nSELECT array(1, 2, 3, NULL), udf(1)" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 451f514a21708..40e24e7b4e873 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -379,17 +379,23 @@ FROM testData WINDOW w AS (PARTITION BY udf(cate) ORDER BY udf(val)) ORDER BY cate, udf(val) -- !query schema -struct,collect_set:array,skewness:double,kurtosis:double> +struct<> -- !query output -NULL NULL NULL NULL NULL 0 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 1 0.5 0.0 1 1 NULL NULL 0 NULL NULL NULL NULL [] [] NULL NULL -3 NULL 3 3 3 1 3 3.0 NULL NULL 3 NULL NULL 3 NULL 3 3 3 2 2 1.0 1.0 2 2 0.0 NULL 1 0.0 NULL NULL 0.0 [3] [3] NULL NULL -NULL a NULL NULL NULL 0 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 1 0.25 0.0 1 1 NULL NULL 0 NULL NULL NULL NULL [] [] NULL NULL -1 a 1 1 1 2 2 1.0 0.0 NULL 1 NULL NULL 1 NULL 1 1 1 2 2 0.75 0.3333333333333333 1 2 0.0 0.0 1 0.0 NULL 0.0 0.0 [1,1] [1] 0.7071067811865476 -1.5 -1 a 1 1 1 2 2 1.0 0.0 NULL 1 NULL NULL 1 NULL 1 1 1 2 2 0.75 0.3333333333333333 2 3 0.0 0.0 1 0.0 NULL 0.0 0.0 [1,1] [1] 0.7071067811865476 -1.5 -2 a 2 1 1 3 4 1.3333333333333333 0.5773502691896258 NULL 1 NULL NULL 1 NULL 2 2 2 4 3 1.0 1.0 2 4 0.22222222222222224 0.33333333333333337 2 4.772185885555555E8 1.0 0.5773502691896258 0.4714045207910317 [1,1,2] [1,2] 1.1539890888012805 -0.6672217220327235 -1 b 1 1 1 1 1 1.0 NULL 1 1 1 1 1 1 1 1 1 1 1 0.3333333333333333 0.0 1 1 0.0 NULL 1 NULL NULL NULL 0.0 [1] [1] NULL NULL -2 b 2 1 1 2 3 1.5 0.7071067811865476 1 1 1 1 1 1 2 2 2 2 2 0.6666666666666666 0.5 1 2 0.25 0.5 2 0.0 NULL 0.7071067811865476 0.5 [1,2] [1,2] 0.0 -2.0000000000000013 -3 b 3 1 1 3 6 2.0 1.0 1 1 1 1 1 1 3 3 3 3 3 1.0 1.0 2 3 0.6666666666666666 1.0 3 5.3687091175E8 1.0 1.0 0.816496580927726 [1,2,3] [1,2,3] 0.7057890433107311 -1.4999999999999984 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1126, + "stopIndex" : 1161, + "fragment" : "corr(udf(val), udf(val_long)) OVER w" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/union.sql.out b/sql/core/src/test/resources/sql-tests/results/union.sql.out index 674e58cd102f0..3825470777bdb 100644 --- a/sql/core/src/test/resources/sql-tests/results/union.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/union.sql.out @@ -37,14 +37,25 @@ FROM (SELECT * FROM t1 UNION ALL SELECT * FROM t2) -- !query schema -struct +struct<> -- !query output -1.0 1 -1.0 1 -1.0 a -2.0 4 -2.0 4 -2.0 b +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'a'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 120, + "fragment" : "SELECT *\nFROM (SELECT * FROM t1\n UNION ALL\n SELECT * FROM t2\n UNION ALL\n SELECT * FROM t2)" + } ] +} -- !query @@ -106,10 +117,25 @@ SELECT map(1, 2), 'str' UNION ALL SELECT map(1, 2, 3, NULL), 1 -- !query schema -struct,str:string> +struct<> -- !query output -{1:2,3:null} 1 -{1:2} str +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'str'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 62, + "fragment" : "SELECT map(1, 2), 'str'\nUNION ALL\nSELECT map(1, 2, 3, NULL), 1" + } ] +} -- !query @@ -117,10 +143,25 @@ SELECT array(1, 2), 'str' UNION ALL SELECT array(1, 2, 3, NULL), 1 -- !query schema -struct,str:string> +struct<> -- !query output -[1,2,3,null] 1 -[1,2] str +org.apache.spark.SparkNumberFormatException +{ + "errorClass" : "CAST_INVALID_INPUT", + "sqlState" : "22018", + "messageParameters" : { + "expression" : "'str'", + "sourceType" : "\"STRING\"", + "targetType" : "\"BIGINT\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1, + "stopIndex" : 66, + "fragment" : "SELECT array(1, 2), 'str'\nUNION ALL\nSELECT array(1, 2, 3, NULL), 1" + } ] +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index 96cf9e53787f7..182a4b819fcb0 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -557,17 +557,23 @@ FROM testData WINDOW w AS (PARTITION BY cate ORDER BY val) ORDER BY cate, val -- !query schema -struct,collect_set:array,skewness:double,kurtosis:double> +struct<> -- !query output -NULL NULL NULL NULL NULL 0 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 1 0.5 0.0 1 1 NULL NULL 0 NULL NULL NULL NULL [] [] NULL NULL -3 NULL 3 3 3 1 3 3.0 NULL NULL 3 NULL NULL 3 NULL 3 3 3 2 2 1.0 1.0 2 2 0.0 NULL 1 0.0 NULL NULL 0.0 [3] [3] NULL NULL -NULL a NULL NULL NULL 0 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 1 0.25 0.0 1 1 NULL NULL 0 NULL NULL NULL NULL [] [] NULL NULL -1 a 1 1 1 2 2 1.0 0.0 NULL 1 NULL NULL 1 NULL 1 1 1 2 2 0.75 0.3333333333333333 1 2 0.0 0.0 1 0.0 NULL 0.0 0.0 [1,1] [1] 0.7071067811865476 -1.5 -1 a 1 1 1 2 2 1.0 0.0 NULL 1 NULL NULL 1 NULL 1 1 1 2 2 0.75 0.3333333333333333 2 3 0.0 0.0 1 0.0 NULL 0.0 0.0 [1,1] [1] 0.7071067811865476 -1.5 -2 a 2 1 1 3 4 1.3333333333333333 0.5773502691896258 NULL 1 NULL NULL 1 NULL 2 2 2 4 3 1.0 1.0 2 4 0.22222222222222224 0.33333333333333337 2 4.772185885555555E8 1.0 0.5773502691896258 0.4714045207910317 [1,1,2] [1,2] 1.1539890888012805 -0.6672217220327235 -1 b 1 1 1 1 1 1.0 NULL 1 1 1 1 1 1 1 1 1 1 1 0.3333333333333333 0.0 1 1 0.0 NULL 1 NULL NULL NULL 0.0 [1] [1] NULL NULL -2 b 2 1 1 2 3 1.5 0.7071067811865476 1 1 1 1 1 1 2 2 2 2 2 0.6666666666666666 0.5 1 2 0.25 0.5 2 0.0 NULL 0.7071067811865476 0.5 [1,2] [1,2] 0.0 -2.0000000000000013 -3 b 3 1 1 3 6 2.0 1.0 1 1 1 1 1 1 3 3 3 3 3 1.0 1.0 2 3 0.6666666666666666 1.0 3 5.3687091175E8 1.0 1.0 0.816496580927726 [1,2,3] [1,2,3] 0.7057890433107311 -1.4999999999999984 +org.apache.spark.SparkArithmeticException +{ + "errorClass" : "DIVIDE_BY_ZERO", + "sqlState" : "22012", + "messageParameters" : { + "config" : "\"spark.sql.ansi.enabled\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 1016, + "stopIndex" : 1041, + "fragment" : "corr(val, val_long) OVER w" + } ] +} -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala index 4bd20bc245613..0872efd92002c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala @@ -185,8 +185,8 @@ trait SQLQueryTestHelper extends Logging { */ protected trait PgSQLTest - /** Trait that indicates ANSI-related tests with the ANSI mode enabled. */ - protected trait AnsiTest + /** Trait that indicates Non-ANSI-related tests with the ANSI mode disabled. */ + protected trait NonAnsiTest /** Trait that indicates an analyzer test that shows the analyzed plan string as output. */ protected trait AnalyzerTest extends TestCase { @@ -214,10 +214,10 @@ trait SQLQueryTestHelper extends Logging { } /** An ANSI-related test case. */ - protected case class AnsiTestCase( - name: String, inputFile: String, resultFile: String) extends TestCase with AnsiTest { + protected case class NonAnsiTestCase( + name: String, inputFile: String, resultFile: String) extends TestCase with NonAnsiTest { override def asAnalyzerTest(newName: String, newResultFile: String): TestCase = - AnsiAnalyzerTestCase(newName, inputFile, newResultFile) + NonAnsiAnalyzerTestCase(newName, inputFile, newResultFile) } /** An analyzer test that shows the analyzed plan string as output. */ @@ -290,9 +290,9 @@ trait SQLQueryTestHelper extends Logging { protected case class RegularAnalyzerTestCase( name: String, inputFile: String, resultFile: String) extends AnalyzerTest - protected case class AnsiAnalyzerTestCase( + protected case class NonAnsiAnalyzerTestCase( name: String, inputFile: String, resultFile: String) - extends AnalyzerTest with AnsiTest + extends AnalyzerTest with NonAnsiTest protected case class PgSQLAnalyzerTestCase( name: String, inputFile: String, resultFile: String) extends AnalyzerTest with PgSQLTest diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index b031f45ddbf34..5c56377f21c20 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -306,13 +306,13 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper localSparkSession.udf.register("vol", (s: String) => s) localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true) - case _: SQLQueryTestSuite#AnsiTest => - localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) + case _: SQLQueryTestSuite#NonAnsiTest => + localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, false) case _: SQLQueryTestSuite#TimestampNTZTest => localSparkSession.conf.set(SQLConf.TIMESTAMP_TYPE.key, TimestampTypes.TIMESTAMP_NTZ.toString) case _ => - localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, false) + localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) } if (sparkConfigSet.nonEmpty) { @@ -445,8 +445,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper } } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}postgreSQL")) { PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil - } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) { - AnsiTestCase(testCaseName, absPath, resultFile) :: Nil + } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}nonansi")) { + NonAnsiTestCase(testCaseName, absPath, resultFile) :: Nil } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) { TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}cte.sql")) { diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index 782f549182ec2..662f43fc00399 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -124,13 +124,13 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ case _: SQLQueryTestSuite#PgSQLTest => statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true") - case _: SQLQueryTestSuite#AnsiTest => - statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") + case _: SQLQueryTestSuite#NonAnsiTest => + statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false") case _: SQLQueryTestSuite#TimestampNTZTest => statement.execute(s"SET ${SQLConf.TIMESTAMP_TYPE.key} = " + s"${TimestampTypes.TIMESTAMP_NTZ.toString}") case _ => - statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false") + statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") } // Run the SQL queries preparing them for comparison. @@ -270,8 +270,8 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ Seq.empty } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}postgreSQL")) { PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil - } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) { - AnsiTestCase(testCaseName, absPath, resultFile) :: Nil + } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}nonansi")) { + NonAnsiTestCase(testCaseName, absPath, resultFile) :: Nil } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) { TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil } else {