aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/resources/sql-tests/results/literals.sql.out
diff options
context:
space:
mode:
authorpetermaxlee <petermaxlee@gmail.com>2016-08-11 13:55:10 -0700
committerReynold Xin <rxin@databricks.com>2016-08-11 13:55:10 -0700
commitcf9367826c38e5f34ae69b409f5d09c55ed1d319 (patch)
tree9e4d03df11aaa9f857c32694f35c81f5a3539ed9 /sql/core/src/test/resources/sql-tests/results/literals.sql.out
parentacaf2a81ad5238fd1bc81e7be2c328f40c07e755 (diff)
downloadspark-cf9367826c38e5f34ae69b409f5d09c55ed1d319.tar.gz
spark-cf9367826c38e5f34ae69b409f5d09c55ed1d319.tar.bz2
spark-cf9367826c38e5f34ae69b409f5d09c55ed1d319.zip
[SPARK-17018][SQL] literals.sql for testing literal parsing
## What changes were proposed in this pull request? This patch adds literals.sql for testing literal parsing end-to-end in SQL. ## How was this patch tested? The patch itself is only about adding test cases. Author: petermaxlee <petermaxlee@gmail.com> Closes #14598 from petermaxlee/SPARK-17018-2.
Diffstat (limited to 'sql/core/src/test/resources/sql-tests/results/literals.sql.out')
-rw-r--r--sql/core/src/test/resources/sql-tests/results/literals.sql.out374
1 files changed, 374 insertions, 0 deletions
diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
new file mode 100644
index 0000000000..6d5fabdf62
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
@@ -0,0 +1,374 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 38
+
+
+-- !query 0
+select null, Null, nUll
+-- !query 0 schema
+struct<NULL:null,NULL:null,NULL:null>
+-- !query 0 output
+NULL NULL NULL
+
+
+-- !query 1
+select true, tRue, false, fALse
+-- !query 1 schema
+struct<true:boolean,true:boolean,false:boolean,false:boolean>
+-- !query 1 output
+true true false false
+
+
+-- !query 2
+select 1Y
+-- !query 2 schema
+struct<1:tinyint>
+-- !query 2 output
+1
+
+
+-- !query 3
+select 127Y, -128Y
+-- !query 3 schema
+struct<>
+-- !query 3 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"128" Radix:10(line 1, pos 14)
+
+== SQL ==
+select 127Y, -128Y
+--------------^^^
+
+
+-- !query 4
+select 128Y
+-- !query 4 schema
+struct<>
+-- !query 4 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"128" Radix:10(line 1, pos 7)
+
+== SQL ==
+select 128Y
+-------^^^
+
+
+-- !query 5
+select 1S
+-- !query 5 schema
+struct<1:smallint>
+-- !query 5 output
+1
+
+
+-- !query 6
+select 32767S, -32768S
+-- !query 6 schema
+struct<>
+-- !query 6 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"32768" Radix:10(line 1, pos 16)
+
+== SQL ==
+select 32767S, -32768S
+----------------^^^
+
+
+-- !query 7
+select 32768S
+-- !query 7 schema
+struct<>
+-- !query 7 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"32768" Radix:10(line 1, pos 7)
+
+== SQL ==
+select 32768S
+-------^^^
+
+
+-- !query 8
+select 1L, 2147483648L
+-- !query 8 schema
+struct<1:bigint,2147483648:bigint>
+-- !query 8 output
+1 2147483648
+
+
+-- !query 9
+select 9223372036854775807L, -9223372036854775808L
+-- !query 9 schema
+struct<>
+-- !query 9 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+For input string: "9223372036854775808"(line 1, pos 30)
+
+== SQL ==
+select 9223372036854775807L, -9223372036854775808L
+------------------------------^^^
+
+
+-- !query 10
+select 9223372036854775808L
+-- !query 10 schema
+struct<>
+-- !query 10 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+For input string: "9223372036854775808"(line 1, pos 7)
+
+== SQL ==
+select 9223372036854775808L
+-------^^^
+
+
+-- !query 11
+select 1, -1
+-- !query 11 schema
+struct<1:int,(-1):int>
+-- !query 11 output
+1 -1
+
+
+-- !query 12
+select 2147483647, -2147483648
+-- !query 12 schema
+struct<2147483647:int,(-2147483648):bigint>
+-- !query 12 output
+2147483647 -2147483648
+
+
+-- !query 13
+select 9223372036854775807, -9223372036854775808
+-- !query 13 schema
+struct<9223372036854775807:bigint,(-9223372036854775808):decimal(19,0)>
+-- !query 13 output
+9223372036854775807 -9223372036854775808
+
+
+-- !query 14
+select 9223372036854775808, -9223372036854775809
+-- !query 14 schema
+struct<9223372036854775808:decimal(19,0),(-9223372036854775809):decimal(19,0)>
+-- !query 14 output
+9223372036854775808 -9223372036854775809
+
+
+-- !query 15
+select 1234567890123456789012345678901234567890
+-- !query 15 schema
+struct<>
+-- !query 15 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+DecimalType can only support precision up to 38
+== SQL ==
+select 1234567890123456789012345678901234567890
+
+
+-- !query 16
+select 1234567890123456789012345678901234567890.0
+-- !query 16 schema
+struct<>
+-- !query 16 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+DecimalType can only support precision up to 38
+== SQL ==
+select 1234567890123456789012345678901234567890.0
+
+
+-- !query 17
+select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1
+-- !query 17 schema
+struct<1.0:double,1.2:double,1.0E10:double,150000.0:double,0.1:double,0.1:double,10000.0:double,90.0:double,90.0:double,90.0:double,90.0:double>
+-- !query 17 output
+1.0 1.2 1.0E10 150000.0 0.1 0.1 10000.0 90.0 90.0 90.0 90.0
+
+
+-- !query 18
+select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5
+-- !query 18 schema
+struct<(-1.0):double,(-1.2):double,(-1.0E10):double,(-150000.0):double,(-0.1):double,(-0.1):double,(-10000.0):double>
+-- !query 18 output
+-1.0 -1.2 -1.0E10 -150000.0 -0.1 -0.1 -10000.0
+
+
+-- !query 19
+select .e3
+-- !query 19 schema
+struct<>
+-- !query 19 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+no viable alternative at input 'select .'(line 1, pos 7)
+
+== SQL ==
+select .e3
+-------^^^
+
+
+-- !query 20
+select 1E309, -1E309
+-- !query 20 schema
+struct<Infinity:double,(-Infinity):double>
+-- !query 20 output
+Infinity -Infinity
+
+
+-- !query 21
+select 0.3, -0.8, .5, -.18, 0.1111, .1111
+-- !query 21 schema
+struct<0.3:decimal(1,1),(-0.8):decimal(1,1),0.5:decimal(1,1),(-0.18):decimal(2,2),0.1111:decimal(4,4),0.1111:decimal(4,4)>
+-- !query 21 output
+0.3 -0.8 0.5 -0.18 0.1111 0.1111
+
+
+-- !query 22
+select 123456789012345678901234567890123456789e10, 123456789012345678901234567890123456789.1e10
+-- !query 22 schema
+struct<1.2345678901234568E48:double,1.2345678901234568E48:double>
+-- !query 22 output
+1.2345678901234568E48 1.2345678901234568E48
+
+
+-- !query 23
+select "Hello Peter!", 'hello lee!'
+-- !query 23 schema
+struct<Hello Peter!:string,hello lee!:string>
+-- !query 23 output
+Hello Peter! hello lee!
+
+
+-- !query 24
+select 'hello' 'world', 'hello' " " 'lee'
+-- !query 24 schema
+struct<helloworld:string,hello lee:string>
+-- !query 24 output
+helloworld hello lee
+
+
+-- !query 25
+select "hello 'peter'"
+-- !query 25 schema
+struct<hello 'peter':string>
+-- !query 25 output
+hello 'peter'
+
+
+-- !query 26
+select 'pattern%', 'no-pattern\%', 'pattern\\%', 'pattern\\\%'
+-- !query 26 schema
+struct<pattern%:string,no-pattern\%:string,pattern\%:string,pattern\\%:string>
+-- !query 26 output
+pattern% no-pattern\% pattern\% pattern\\%
+
+
+-- !query 27
+select '\'', '"', '\n', '\r', '\t', 'Z'
+-- !query 27 schema
+struct<':string,":string,
+:string, :string, :string,Z:string>
+-- !query 27 output
+' "
+ Z
+
+
+-- !query 28
+select '\110\145\154\154\157\041'
+-- !query 28 schema
+struct<Hello!:string>
+-- !query 28 output
+Hello!
+
+
+-- !query 29
+select '\u0057\u006F\u0072\u006C\u0064\u0020\u003A\u0029'
+-- !query 29 schema
+struct<World :):string>
+-- !query 29 output
+World :)
+
+
+-- !query 30
+select dAte '2016-03-12'
+-- !query 30 schema
+struct<DATE '2016-03-12':date>
+-- !query 30 output
+2016-03-12
+
+
+-- !query 31
+select date 'mar 11 2016'
+-- !query 31 schema
+struct<>
+-- !query 31 output
+java.lang.IllegalArgumentException
+null
+
+
+-- !query 32
+select tImEstAmp '2016-03-11 20:54:00.000'
+-- !query 32 schema
+struct<TIMESTAMP('2016-03-11 20:54:00.0'):timestamp>
+-- !query 32 output
+2016-03-11 20:54:00
+
+
+-- !query 33
+select timestamp '2016-33-11 20:54:00.000'
+-- !query 33 schema
+struct<>
+-- !query 33 output
+java.lang.IllegalArgumentException
+Timestamp format must be yyyy-mm-dd hh:mm:ss[.fffffffff]
+
+
+-- !query 34
+select interval 13.123456789 seconds, interval -13.123456789 second
+-- !query 34 schema
+struct<>
+-- !query 34 output
+scala.MatchError
+(interval 13 seconds 123 milliseconds 456 microseconds,CalendarIntervalType) (of class scala.Tuple2)
+
+
+-- !query 35
+select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond, 9 microsecond
+-- !query 35 schema
+struct<>
+-- !query 35 output
+scala.MatchError
+(interval 1 years 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds 8 milliseconds,CalendarIntervalType) (of class scala.Tuple2)
+
+
+-- !query 36
+select interval 10 nanoseconds
+-- !query 36 schema
+struct<>
+-- !query 36 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+No interval can be constructed(line 1, pos 16)
+
+== SQL ==
+select interval 10 nanoseconds
+----------------^^^
+
+
+-- !query 37
+select GEO '(10,-6)'
+-- !query 37 schema
+struct<>
+-- !query 37 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Literals of type 'GEO' are currently not supported.(line 1, pos 7)
+
+== SQL ==
+select GEO '(10,-6)'
+-------^^^