aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-10-24 18:36:35 -0700
committerJosh Rosen <joshrosen@databricks.com>2014-10-24 18:36:35 -0700
commit3a845d3c048eebb0bddb3937128746fde3e8e4d8 (patch)
tree192dbba101a08ff02de979efba297587f30fd721 /sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03
parent898b22ab1fe90e8a3935b19566465046f2256fa6 (diff)
downloadspark-3a845d3c048eebb0bddb3937128746fde3e8e4d8.tar.gz
spark-3a845d3c048eebb0bddb3937128746fde3e8e4d8.tar.bz2
spark-3a845d3c048eebb0bddb3937128746fde3e8e4d8.zip
[SQL] Update Hive test harness for Hive 12 and 13
As part of the upgrade I also copy the newest version of the query tests, and whitelist a bunch of new ones that are now passing. Author: Michael Armbrust <michael@databricks.com> Closes #2936 from marmbrus/fix13tests and squashes the following commits: d9cbdab [Michael Armbrust] Remove user specific tests 65801cd [Michael Armbrust] style and rat 8f6b09a [Michael Armbrust] Update test harness to work with both Hive 12 and 13. f044843 [Michael Armbrust] Update Hive query tests and golden files to 0.13
Diffstat (limited to 'sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03')
-rw-r--r--sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f0310
1 files changed, 5 insertions, 5 deletions
diff --git a/sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03 b/sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03
index 822897217e..05507162a9 100644
--- a/sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03
+++ b/sql/hive/src/test/resources/golden/inputddl6-4-5855e2998e26f63e927854afa86c1f03
@@ -1,10 +1,10 @@
-key string None
-value string None
-ds string None
+key string
+value string
+ds string
# Partition Information
# col_name data_type comment
-ds string None
+ds string
-Detailed Partition Information Partition(values:[2008-04-08], dbName:default, tableName:inputddl6, createTime:1389731342, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null)], location:file:/private/var/folders/36/cjkbrr953xg2p_krwrmn8h_r0000gn/T/sharkWarehouse7216708901107607121/inputddl6/ds=2008-04-08, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), parameters:{numFiles=1, transient_lastDdlTime=1389731342, numRows=0, totalSize=5812, rawDataSize=0}) \ No newline at end of file
+Detailed Partition Information Partition(values:[2008-04-08], dbName:default, tableName:inputddl6, createTime:1413882344, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null)], location:file:/private/var/folders/36/cjkbrr953xg2p_krwrmn8h_r0000gn/T/sparkHiveWarehouse1201055597819413730/inputddl6/ds=2008-04-08, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), parameters:{numFiles=1, transient_lastDdlTime=1413882344, COLUMN_STATS_ACCURATE=true, totalSize=5812, numRows=0, rawDataSize=0})