1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
|
# Imports from base R
importFrom(methods, setGeneric, setMethod, setOldClass)
useDynLib(SparkR, stringHashCode)
# S3 methods exported
export("sparkR.init")
export("sparkR.stop")
export("print.jobj")
exportClasses("DataFrame")
exportMethods("cache",
"collect",
"columns",
"count",
"describe",
"distinct",
"dtypes",
"except",
"explain",
"filter",
"first",
"groupBy",
"head",
"insertInto",
"intersect",
"isLocal",
"join",
"length",
"limit",
"orderBy",
"names",
"persist",
"printSchema",
"registerTempTable",
"repartition",
"sampleDF",
"saveAsParquetFile",
"saveAsTable",
"saveDF",
"schema",
"select",
"selectExpr",
"show",
"showDF",
"sortDF",
"take",
"toJSON",
"toRDD",
"unionAll",
"unpersist",
"where",
"withColumn",
"withColumnRenamed")
exportClasses("Column")
exportMethods("abs",
"alias",
"approxCountDistinct",
"asc",
"avg",
"cast",
"contains",
"countDistinct",
"desc",
"endsWith",
"getField",
"getItem",
"isNotNull",
"isNull",
"last",
"like",
"lower",
"max",
"mean",
"min",
"rlike",
"sqrt",
"startsWith",
"substr",
"sum",
"sumDistinct",
"upper")
exportClasses("GroupedData")
exportMethods("agg")
export("sparkRSQL.init",
"sparkRHive.init")
export("cacheTable",
"clearCache",
"createDataFrame",
"createExternalTable",
"dropTempTable",
"jsonFile",
"jsonRDD",
"loadDF",
"parquetFile",
"sql",
"table",
"tableNames",
"tables",
"toDF",
"uncacheTable")
export("sparkRSQL.init",
"sparkRHive.init")
export("structField",
"structField.jobj",
"structField.character",
"print.structField",
"structType",
"structType.jobj",
"structType.structField",
"print.structType")
|