aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorCheng Hao <hao.cheng@intel.com>2014-12-17 13:39:36 -0800
committerMichael Armbrust <michael@databricks.com>2014-12-17 13:39:36 -0800
commit636d9fc450faaa0d8e82e0d34bb7b791e3812cb7 (patch)
treeab0de7c89131b6bda143dc51228df6410f3eea8a /sql/hive
parent902e4d54acbc3c88163a5c6447aff68ed57475c1 (diff)
downloadspark-636d9fc450faaa0d8e82e0d34bb7b791e3812cb7.tar.gz
spark-636d9fc450faaa0d8e82e0d34bb7b791e3812cb7.tar.bz2
spark-636d9fc450faaa0d8e82e0d34bb7b791e3812cb7.zip
[SPARK-3739] [SQL] Update the split num base on block size for table scanning
In local mode, Hadoop/Hive will ignore the "mapred.map.tasks", hence for small table file, it's always a single input split, however, SparkSQL doesn't honor that in table scanning, and we will get different result when do the Hive Compatibility test. This PR will fix that. Author: Cheng Hao <hao.cheng@intel.com> Closes #2589 from chenghao-intel/source_split and squashes the following commits: dff38e7 [Cheng Hao] Remove the extra blank line 160a2b6 [Cheng Hao] fix the compiling bug 04d67f7 [Cheng Hao] Keep 1 split for small file in table scanning
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala13
-rw-r--r--sql/hive/src/test/resources/golden/file_split_for_small_table-0-7a45831bf96814d9a7fc3d78fb7bd8dc500
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala9
3 files changed, 517 insertions, 5 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
index f60bc3788e..c368715f7c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
@@ -57,10 +57,15 @@ class HadoopTableReader(
@transient hiveExtraConf: HiveConf)
extends TableReader {
- // Choose the minimum number of splits. If mapred.map.tasks is set, then use that unless
- // it is smaller than what Spark suggests.
- private val _minSplitsPerRDD = math.max(
- sc.hiveconf.getInt("mapred.map.tasks", 1), sc.sparkContext.defaultMinPartitions)
+ // Hadoop honors "mapred.map.tasks" as hint, but will ignore when mapred.job.tracker is "local".
+ // https://hadoop.apache.org/docs/r1.0.4/mapred-default.html
+ //
+ // In order keep consistency with Hive, we will let it be 0 in local mode also.
+ private val _minSplitsPerRDD = if (sc.sparkContext.isLocal) {
+ 0 // will splitted based on block by default.
+ } else {
+ math.max(sc.hiveconf.getInt("mapred.map.tasks", 1), sc.sparkContext.defaultMinPartitions)
+ }
// TODO: set aws s3 credentials.
diff --git a/sql/hive/src/test/resources/golden/file_split_for_small_table-0-7a45831bf96814d9a7fc3d78fb7bd8dc b/sql/hive/src/test/resources/golden/file_split_for_small_table-0-7a45831bf96814d9a7fc3d78fb7bd8dc
new file mode 100644
index 0000000000..b70e127e82
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/file_split_for_small_table-0-7a45831bf96814d9a7fc3d78fb7bd8dc
@@ -0,0 +1,500 @@
+0 val_0
+0 val_0
+0 val_0
+2 val_2
+4 val_4
+5 val_5
+5 val_5
+5 val_5
+8 val_8
+9 val_9
+10 val_10
+11 val_11
+12 val_12
+12 val_12
+15 val_15
+15 val_15
+17 val_17
+18 val_18
+18 val_18
+19 val_19
+20 val_20
+24 val_24
+24 val_24
+26 val_26
+26 val_26
+27 val_27
+28 val_28
+30 val_30
+33 val_33
+34 val_34
+35 val_35
+35 val_35
+35 val_35
+37 val_37
+37 val_37
+41 val_41
+42 val_42
+42 val_42
+43 val_43
+44 val_44
+47 val_47
+51 val_51
+51 val_51
+53 val_53
+54 val_54
+57 val_57
+58 val_58
+58 val_58
+64 val_64
+65 val_65
+66 val_66
+67 val_67
+67 val_67
+69 val_69
+70 val_70
+70 val_70
+70 val_70
+72 val_72
+72 val_72
+74 val_74
+76 val_76
+76 val_76
+77 val_77
+78 val_78
+80 val_80
+82 val_82
+83 val_83
+83 val_83
+84 val_84
+84 val_84
+85 val_85
+86 val_86
+87 val_87
+90 val_90
+90 val_90
+90 val_90
+92 val_92
+95 val_95
+95 val_95
+96 val_96
+97 val_97
+97 val_97
+98 val_98
+98 val_98
+100 val_100
+100 val_100
+103 val_103
+103 val_103
+104 val_104
+104 val_104
+105 val_105
+111 val_111
+113 val_113
+113 val_113
+114 val_114
+116 val_116
+118 val_118
+118 val_118
+119 val_119
+119 val_119
+119 val_119
+120 val_120
+120 val_120
+125 val_125
+125 val_125
+126 val_126
+128 val_128
+128 val_128
+128 val_128
+129 val_129
+129 val_129
+131 val_131
+133 val_133
+134 val_134
+134 val_134
+136 val_136
+137 val_137
+137 val_137
+138 val_138
+138 val_138
+138 val_138
+138 val_138
+143 val_143
+145 val_145
+146 val_146
+146 val_146
+149 val_149
+149 val_149
+150 val_150
+152 val_152
+152 val_152
+153 val_153
+155 val_155
+156 val_156
+157 val_157
+158 val_158
+160 val_160
+162 val_162
+163 val_163
+164 val_164
+164 val_164
+165 val_165
+165 val_165
+166 val_166
+167 val_167
+167 val_167
+167 val_167
+168 val_168
+169 val_169
+169 val_169
+169 val_169
+169 val_169
+170 val_170
+172 val_172
+172 val_172
+174 val_174
+174 val_174
+175 val_175
+175 val_175
+176 val_176
+176 val_176
+177 val_177
+178 val_178
+179 val_179
+179 val_179
+180 val_180
+181 val_181
+183 val_183
+186 val_186
+187 val_187
+187 val_187
+187 val_187
+189 val_189
+190 val_190
+191 val_191
+191 val_191
+192 val_192
+193 val_193
+193 val_193
+193 val_193
+194 val_194
+195 val_195
+195 val_195
+196 val_196
+197 val_197
+197 val_197
+199 val_199
+199 val_199
+199 val_199
+200 val_200
+200 val_200
+201 val_201
+202 val_202
+203 val_203
+203 val_203
+205 val_205
+205 val_205
+207 val_207
+207 val_207
+208 val_208
+208 val_208
+208 val_208
+209 val_209
+209 val_209
+213 val_213
+213 val_213
+214 val_214
+216 val_216
+216 val_216
+217 val_217
+217 val_217
+218 val_218
+219 val_219
+219 val_219
+221 val_221
+221 val_221
+222 val_222
+223 val_223
+223 val_223
+224 val_224
+224 val_224
+226 val_226
+228 val_228
+229 val_229
+229 val_229
+230 val_230
+230 val_230
+230 val_230
+230 val_230
+230 val_230
+233 val_233
+233 val_233
+235 val_235
+237 val_237
+237 val_237
+238 val_238
+238 val_238
+239 val_239
+239 val_239
+241 val_241
+242 val_242
+242 val_242
+244 val_244
+247 val_247
+248 val_248
+249 val_249
+252 val_252
+255 val_255
+255 val_255
+256 val_256
+256 val_256
+257 val_257
+258 val_258
+260 val_260
+262 val_262
+263 val_263
+265 val_265
+265 val_265
+266 val_266
+272 val_272
+272 val_272
+273 val_273
+273 val_273
+273 val_273
+274 val_274
+275 val_275
+277 val_277
+277 val_277
+277 val_277
+277 val_277
+278 val_278
+278 val_278
+280 val_280
+280 val_280
+281 val_281
+281 val_281
+282 val_282
+282 val_282
+283 val_283
+284 val_284
+285 val_285
+286 val_286
+287 val_287
+288 val_288
+288 val_288
+289 val_289
+291 val_291
+292 val_292
+296 val_296
+298 val_298
+298 val_298
+298 val_298
+302 val_302
+305 val_305
+306 val_306
+307 val_307
+307 val_307
+308 val_308
+309 val_309
+309 val_309
+310 val_310
+311 val_311
+311 val_311
+311 val_311
+315 val_315
+316 val_316
+316 val_316
+316 val_316
+317 val_317
+317 val_317
+318 val_318
+318 val_318
+318 val_318
+321 val_321
+321 val_321
+322 val_322
+322 val_322
+323 val_323
+325 val_325
+325 val_325
+327 val_327
+327 val_327
+327 val_327
+331 val_331
+331 val_331
+332 val_332
+333 val_333
+333 val_333
+335 val_335
+336 val_336
+338 val_338
+339 val_339
+341 val_341
+342 val_342
+342 val_342
+344 val_344
+344 val_344
+345 val_345
+348 val_348
+348 val_348
+348 val_348
+348 val_348
+348 val_348
+351 val_351
+353 val_353
+353 val_353
+356 val_356
+360 val_360
+362 val_362
+364 val_364
+365 val_365
+366 val_366
+367 val_367
+367 val_367
+368 val_368
+369 val_369
+369 val_369
+369 val_369
+373 val_373
+374 val_374
+375 val_375
+377 val_377
+378 val_378
+379 val_379
+382 val_382
+382 val_382
+384 val_384
+384 val_384
+384 val_384
+386 val_386
+389 val_389
+392 val_392
+393 val_393
+394 val_394
+395 val_395
+395 val_395
+396 val_396
+396 val_396
+396 val_396
+397 val_397
+397 val_397
+399 val_399
+399 val_399
+400 val_400
+401 val_401
+401 val_401
+401 val_401
+401 val_401
+401 val_401
+402 val_402
+403 val_403
+403 val_403
+403 val_403
+404 val_404
+404 val_404
+406 val_406
+406 val_406
+406 val_406
+406 val_406
+407 val_407
+409 val_409
+409 val_409
+409 val_409
+411 val_411
+413 val_413
+413 val_413
+414 val_414
+414 val_414
+417 val_417
+417 val_417
+417 val_417
+418 val_418
+419 val_419
+421 val_421
+424 val_424
+424 val_424
+427 val_427
+429 val_429
+429 val_429
+430 val_430
+430 val_430
+430 val_430
+431 val_431
+431 val_431
+431 val_431
+432 val_432
+435 val_435
+436 val_436
+437 val_437
+438 val_438
+438 val_438
+438 val_438
+439 val_439
+439 val_439
+443 val_443
+444 val_444
+446 val_446
+448 val_448
+449 val_449
+452 val_452
+453 val_453
+454 val_454
+454 val_454
+454 val_454
+455 val_455
+457 val_457
+458 val_458
+458 val_458
+459 val_459
+459 val_459
+460 val_460
+462 val_462
+462 val_462
+463 val_463
+463 val_463
+466 val_466
+466 val_466
+466 val_466
+467 val_467
+468 val_468
+468 val_468
+468 val_468
+468 val_468
+469 val_469
+469 val_469
+469 val_469
+469 val_469
+469 val_469
+470 val_470
+472 val_472
+475 val_475
+477 val_477
+478 val_478
+478 val_478
+479 val_479
+480 val_480
+480 val_480
+480 val_480
+481 val_481
+482 val_482
+483 val_483
+484 val_484
+485 val_485
+487 val_487
+489 val_489
+489 val_489
+489 val_489
+489 val_489
+490 val_490
+491 val_491
+492 val_492
+492 val_492
+493 val_493
+494 val_494
+495 val_495
+496 val_496
+497 val_497
+498 val_498
+498 val_498
+498 val_498
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index 54c0f017d4..a0ace91060 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -44,6 +44,14 @@ class HiveTableScanSuite extends HiveComparisonTest {
|SELECT * from part_scan_test;
""".stripMargin)
+ // In unit test, kv1.txt is a small file and will be loaded as table src
+ // Since the small file will be considered as a single split, we assume
+ // Hive / SparkSQL HQL has the same output even for SORT BY
+ createQueryTest("file_split_for_small_table",
+ """
+ |SELECT key, value FROM src SORT BY key, value
+ """.stripMargin)
+
test("Spark-4041: lowercase issue") {
TestHive.sql("CREATE TABLE tb (KEY INT, VALUE STRING) STORED AS ORC")
TestHive.sql("insert into table tb select key, value from src")
@@ -68,5 +76,4 @@ class HiveTableScanSuite extends HiveComparisonTest {
=== Array(Row(java.sql.Timestamp.valueOf("2014-12-11 00:00:00")),Row(null)))
TestHive.sql("DROP TABLE timestamp_query_null")
}
-
}