aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/NAMESPACE
diff options
context:
space:
mode:
Diffstat (limited to 'R/pkg/NAMESPACE')
-rw-r--r--R/pkg/NAMESPACE182
1 files changed, 182 insertions, 0 deletions
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
new file mode 100644
index 0000000000..a354cdce74
--- /dev/null
+++ b/R/pkg/NAMESPACE
@@ -0,0 +1,182 @@
+#exportPattern("^[[:alpha:]]+")
+exportClasses("RDD")
+exportClasses("Broadcast")
+exportMethods(
+ "aggregateByKey",
+ "aggregateRDD",
+ "cache",
+ "checkpoint",
+ "coalesce",
+ "cogroup",
+ "collect",
+ "collectAsMap",
+ "collectPartition",
+ "combineByKey",
+ "count",
+ "countByKey",
+ "countByValue",
+ "distinct",
+ "Filter",
+ "filterRDD",
+ "first",
+ "flatMap",
+ "flatMapValues",
+ "fold",
+ "foldByKey",
+ "foreach",
+ "foreachPartition",
+ "fullOuterJoin",
+ "glom",
+ "groupByKey",
+ "join",
+ "keyBy",
+ "keys",
+ "length",
+ "lapply",
+ "lapplyPartition",
+ "lapplyPartitionsWithIndex",
+ "leftOuterJoin",
+ "lookup",
+ "map",
+ "mapPartitions",
+ "mapPartitionsWithIndex",
+ "mapValues",
+ "maximum",
+ "minimum",
+ "numPartitions",
+ "partitionBy",
+ "persist",
+ "pipeRDD",
+ "reduce",
+ "reduceByKey",
+ "reduceByKeyLocally",
+ "repartition",
+ "rightOuterJoin",
+ "sampleRDD",
+ "saveAsTextFile",
+ "saveAsObjectFile",
+ "sortBy",
+ "sortByKey",
+ "sumRDD",
+ "take",
+ "takeOrdered",
+ "takeSample",
+ "top",
+ "unionRDD",
+ "unpersist",
+ "value",
+ "values",
+ "zipRDD",
+ "zipWithIndex",
+ "zipWithUniqueId"
+ )
+
+# S3 methods exported
+export(
+ "textFile",
+ "objectFile",
+ "parallelize",
+ "hashCode",
+ "includePackage",
+ "broadcast",
+ "setBroadcastValue",
+ "setCheckpointDir"
+ )
+export("sparkR.init")
+export("sparkR.stop")
+export("print.jobj")
+useDynLib(SparkR, stringHashCode)
+importFrom(methods, setGeneric, setMethod, setOldClass)
+
+# SparkRSQL
+
+exportClasses("DataFrame")
+
+exportMethods("columns",
+ "distinct",
+ "dtypes",
+ "explain",
+ "filter",
+ "groupBy",
+ "head",
+ "insertInto",
+ "intersect",
+ "isLocal",
+ "limit",
+ "orderBy",
+ "names",
+ "printSchema",
+ "registerTempTable",
+ "repartition",
+ "sampleDF",
+ "saveAsParquetFile",
+ "saveAsTable",
+ "saveDF",
+ "schema",
+ "select",
+ "selectExpr",
+ "show",
+ "showDF",
+ "sortDF",
+ "subtract",
+ "toJSON",
+ "toRDD",
+ "unionAll",
+ "where",
+ "withColumn",
+ "withColumnRenamed")
+
+exportClasses("Column")
+
+exportMethods("abs",
+ "alias",
+ "approxCountDistinct",
+ "asc",
+ "avg",
+ "cast",
+ "contains",
+ "countDistinct",
+ "desc",
+ "endsWith",
+ "getField",
+ "getItem",
+ "isNotNull",
+ "isNull",
+ "last",
+ "like",
+ "lower",
+ "max",
+ "mean",
+ "min",
+ "rlike",
+ "sqrt",
+ "startsWith",
+ "substr",
+ "sum",
+ "sumDistinct",
+ "upper")
+
+exportClasses("GroupedData")
+exportMethods("agg")
+
+export("sparkRSQL.init",
+ "sparkRHive.init")
+
+export("cacheTable",
+ "clearCache",
+ "createDataFrame",
+ "createExternalTable",
+ "dropTempTable",
+ "jsonFile",
+ "jsonRDD",
+ "loadDF",
+ "parquetFile",
+ "sql",
+ "table",
+ "tableNames",
+ "tables",
+ "toDF",
+ "uncacheTable")
+
+export("print.structType",
+ "print.structField")