diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION index e5afed2d0a93e873157d5d2404ca2cf39d47bff0..5a83883089e0e8a566c6802b1b2b5fbf462b3933 100644 --- a/R/pkg/DESCRIPTION +++ b/R/pkg/DESCRIPTION @@ -2,7 +2,7 @@ Package: SparkR Type: Package Title: R Frontend for Apache Spark Version: 2.0.0 -Date: 2016-07-07 +Date: 2016-08-27 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"), email = "shivaram@cs.berkeley.edu"), person("Xiangrui", "Meng", role = "aut", @@ -11,7 +11,7 @@ Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"), email = "felixcheung@apache.org"), person(family = "The Apache Software Foundation", role = c("aut", "cph"))) URL: http://www.apache.org/ http://spark.apache.org/ -BugReports: https://issues.apache.org/jira/secure/CreateIssueDetails!init.jspa?pid=12315420&components=12325400&issuetype=4 +BugReports: https://cwiki.apache.org/confluence/display/SPARK/Contributing+to+Spark#ContributingtoSpark-ContributingBugReports Depends: R (>= 3.0), methods @@ -39,6 +39,7 @@ Collate: 'deserialize.R' 'functions.R' 'install.R' + 'jvm.R' 'mllib.R' 'serialize.R' 'sparkR.R' diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE index ad587a6b7d03aba6b6816d65ddf255e7ee6fbc37..5e625b2d8dbb430957c47e43bfb2af448380e3b9 100644 --- a/R/pkg/NAMESPACE +++ b/R/pkg/NAMESPACE @@ -364,4 +364,8 @@ S3method(structField, jobj) S3method(structType, jobj) S3method(structType, structField) +export("sparkR.newJObject") +export("sparkR.callJMethod") +export("sparkR.callJStatic") + export("install.spark") diff --git a/R/pkg/R/jvm.R b/R/pkg/R/jvm.R new file mode 100644 index 0000000000000000000000000000000000000000..bb5c77544a3dab1f4ccaf747843fcace8e350283 --- /dev/null +++ b/R/pkg/R/jvm.R @@ -0,0 +1,117 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Methods to directly access the JVM running the SparkR backend. + +#' Call Java Methods +#' +#' Call a Java method in the JVM running the Spark driver. The return +#' values are automatically converted to R objects for simple objects. Other +#' values are returned as "jobj" which are references to objects on JVM. +#' +#' @details +#' This is a low level function to access the JVM directly and should only be used +#' for advanced use cases. The arguments and return values that are primitive R +#' types (like integer, numeric, character, lists) are automatically translated to/from +#' Java types (like Integer, Double, String, Array). A full list can be found in +#' serialize.R and deserialize.R in the Apache Spark code base. +#' +#' @param x object to invoke the method on. Should be a "jobj" created by newJObject. +#' @param methodName method name to call. +#' @param ... parameters to pass to the Java method. +#' @return the return value of the Java method. Either returned as a R object +#' if it can be deserialized or returned as a "jobj". See details section for more. +#' @export +#' @seealso \link{sparkR.callJStatic}, \link{sparkR.newJObject} +#' @rdname sparkR.callJMethod +#' @examples +#' \dontrun{ +#' sparkR.session() # Need to have a Spark JVM running before calling newJObject +#' # Create a Java ArrayList and populate it +#' jarray <- sparkR.newJObject("java.util.ArrayList") +#' sparkR.callJMethod(jarray, "add", 42L) +#' sparkR.callJMethod(jarray, "get", 0L) # Will print 42 +#' } +#' @note sparkR.callJMethod since 2.0.1 +sparkR.callJMethod <- function(x, methodName, ...) { + callJMethod(x, methodName, ...) +} + +#' Call Static Java Methods +#' +#' Call a static method in the JVM running the Spark driver. The return +#' value is automatically converted to R objects for simple objects. Other +#' values are returned as "jobj" which are references to objects on JVM. +#' +#' @details +#' This is a low level function to access the JVM directly and should only be used +#' for advanced use cases. The arguments and return values that are primitive R +#' types (like integer, numeric, character, lists) are automatically translated to/from +#' Java types (like Integer, Double, String, Array). A full list can be found in +#' serialize.R and deserialize.R in the Apache Spark code base. +#' +#' @param x fully qualified Java class name that contains the static method to invoke. +#' @param methodName name of static method to invoke. +#' @param ... parameters to pass to the Java method. +#' @return the return value of the Java method. Either returned as a R object +#' if it can be deserialized or returned as a "jobj". See details section for more. +#' @export +#' @seealso \link{sparkR.callJMethod}, \link{sparkR.newJObject} +#' @rdname sparkR.callJStatic +#' @examples +#' \dontrun{ +#' sparkR.session() # Need to have a Spark JVM running before calling callJStatic +#' sparkR.callJStatic("java.lang.System", "currentTimeMillis") +#' sparkR.callJStatic("java.lang.System", "getProperty", "java.home") +#' } +#' @note sparkR.callJStatic since 2.0.1 +sparkR.callJStatic <- function(x, methodName, ...) { + callJStatic(x, methodName, ...) +} + +#' Create Java Objects +#' +#' Create a new Java object in the JVM running the Spark driver. The return +#' value is automatically converted to an R object for simple objects. Other +#' values are returned as a "jobj" which is a reference to an object on JVM. +#' +#' @details +#' This is a low level function to access the JVM directly and should only be used +#' for advanced use cases. The arguments and return values that are primitive R +#' types (like integer, numeric, character, lists) are automatically translated to/from +#' Java types (like Integer, Double, String, Array). A full list can be found in +#' serialize.R and deserialize.R in the Apache Spark code base. +#' +#' @param x fully qualified Java class name. +#' @param ... arguments to be passed to the constructor. +#' @return the object created. Either returned as a R object +#' if it can be deserialized or returned as a "jobj". See details section for more. +#' @export +#' @seealso \link{sparkR.callJMethod}, \link{sparkR.callJStatic} +#' @rdname sparkR.newJObject +#' @examples +#' \dontrun{ +#' sparkR.session() # Need to have a Spark JVM running before calling newJObject +#' # Create a Java ArrayList and populate it +#' jarray <- sparkR.newJObject("java.util.ArrayList") +#' sparkR.callJMethod(jarray, "add", 42L) +#' sparkR.callJMethod(jarray, "get", 0L) # Will print 42 +#' } +#' @note sparkR.newJObject since 2.0.1 +sparkR.newJObject <- function(x, ...) { + newJObject(x, ...) +} diff --git a/R/pkg/inst/tests/testthat/test_jvm_api.R b/R/pkg/inst/tests/testthat/test_jvm_api.R new file mode 100644 index 0000000000000000000000000000000000000000..151c52906bdb8f95dd38689c1a817cd5e08e1673 --- /dev/null +++ b/R/pkg/inst/tests/testthat/test_jvm_api.R @@ -0,0 +1,43 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +context("JVM API") + +sparkSession <- sparkR.session(enableHiveSupport = FALSE) + +test_that("Create and call methods on object", { + jarr <- newJObject("java.util.ArrayList") + # Add an element to the array + callJMethod(jarr, "add", 1L) + # Check if get returns the same element + expect_equal(callJMethod(jarr, "get", 0L), 1L) +}) + +test_that("Call static methods", { + # Convert a boolean to a string + strTrue <- callJStatic("java.lang.String", "valueOf", TRUE) + expect_equal(strTrue, "true") +}) + +test_that("Manually garbage collect objects", { + jarr <- newJObject("java.util.ArrayList") + cleanup.jobj(jarr) + # Using a jobj after GC should throw an error + expect_error(print(jarr), "Error in invokeJava.*") +}) + +sparkR.session.stop()