aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/inst/tests/testthat/test_binaryFile.R
blob: b5c279e3156e5ac3b6430e9f84e3a83f3f9bb389 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

context("functions on binary files")

# JavaSparkContext handle
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)

mockFile <- c("Spark is pretty.", "Spark is awesome.")

test_that("saveAsObjectFile()/objectFile() following textFile() works", {
  fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
  fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
  writeLines(mockFile, fileName1)

  rdd <- textFile(sc, fileName1, 1)
  saveAsObjectFile(rdd, fileName2)
  rdd <- objectFile(sc, fileName2)
  expect_equal(collectRDD(rdd), as.list(mockFile))

  unlink(fileName1)
  unlink(fileName2, recursive = TRUE)
})

test_that("saveAsObjectFile()/objectFile() works on a parallelized list", {
  fileName <- tempfile(pattern = "spark-test", fileext = ".tmp")

  l <- list(1, 2, 3)
  rdd <- parallelize(sc, l, 1)
  saveAsObjectFile(rdd, fileName)
  rdd <- objectFile(sc, fileName)
  expect_equal(collectRDD(rdd), l)

  unlink(fileName, recursive = TRUE)
})

test_that("saveAsObjectFile()/objectFile() following RDD transformations works", {
  fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
  fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
  writeLines(mockFile, fileName1)

  rdd <- textFile(sc, fileName1)

  words <- flatMap(rdd, function(line) { strsplit(line, " ")[[1]] })
  wordCount <- lapply(words, function(word) { list(word, 1L) })

  counts <- reduceByKey(wordCount, "+", 2L)

  saveAsObjectFile(counts, fileName2)
  counts <- objectFile(sc, fileName2)

  output <- collectRDD(counts)
  expected <- list(list("awesome.", 1), list("Spark", 2), list("pretty.", 1),
                    list("is", 2))
  expect_equal(sortKeyValueList(output), sortKeyValueList(expected))

  unlink(fileName1)
  unlink(fileName2, recursive = TRUE)
})

test_that("saveAsObjectFile()/objectFile() works with multiple paths", {
  fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
  fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")

  rdd1 <- parallelize(sc, "Spark is pretty.")
  saveAsObjectFile(rdd1, fileName1)
  rdd2 <- parallelize(sc, "Spark is awesome.")
  saveAsObjectFile(rdd2, fileName2)

  rdd <- objectFile(sc, c(fileName1, fileName2))
  expect_equal(countRDD(rdd), 2)

  unlink(fileName1, recursive = TRUE)
  unlink(fileName2, recursive = TRUE)
})

sparkR.session.stop()