From 411454475a031869eb7dc0c5fd84f41b3fdfa295 Mon Sep 17 00:00:00 2001
From: Dongjoon Hyun <dongjoon@apache.org>
Date: Thu, 21 Apr 2016 16:09:50 -0700
Subject: [PATCH] [SPARK-14780] [R] Add `setLogLevel` to SparkR

## What changes were proposed in this pull request?

This PR aims to add `setLogLevel` function to SparkR shell.

**Spark Shell**
```scala
scala> sc.setLogLevel("ERROR")
```

**PySpark**
```python
>>> sc.setLogLevel("ERROR")
```

**SparkR (this PR)**
```r
> setLogLevel(sc, "ERROR")
NULL
```

## How was this patch tested?

Pass the Jenkins tests including a new R testcase.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #12547 from dongjoon-hyun/SPARK-14780.
---
 R/pkg/NAMESPACE                          |  3 +++
 R/pkg/R/context.R                        | 17 +++++++++++++++++
 R/pkg/inst/tests/testthat/test_context.R |  5 +++++
 3 files changed, 25 insertions(+)

diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 667fff7192..b3aff10b7d 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -24,6 +24,9 @@ export("setJobGroup",
        "clearJobGroup",
        "cancelJobGroup")
 
+# Export Utility methods
+export("setLogLevel")
+
 exportClasses("DataFrame")
 
 exportMethods("arrange",
diff --git a/R/pkg/R/context.R b/R/pkg/R/context.R
index b0e67c8ad2..4105a6e5c8 100644
--- a/R/pkg/R/context.R
+++ b/R/pkg/R/context.R
@@ -225,3 +225,20 @@ broadcast <- function(sc, object) {
 setCheckpointDir <- function(sc, dirName) {
   invisible(callJMethod(sc, "setCheckpointDir", suppressWarnings(normalizePath(dirName))))
 }
+
+#' Set new log level
+#'
+#' Set new log level: "ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN"
+#'
+#' @rdname setLogLevel
+#' @param sc Spark Context to use
+#' @param level New log level
+#' @export
+#' @examples
+#'\dontrun{
+#' setLogLevel(sc, "ERROR")
+#'}
+
+setLogLevel <- function(sc, level) {
+  callJMethod(sc, "setLogLevel", level)
+}
diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R
index 9f51161230..ffa067eb5e 100644
--- a/R/pkg/inst/tests/testthat/test_context.R
+++ b/R/pkg/inst/tests/testthat/test_context.R
@@ -90,6 +90,11 @@ test_that("job group functions can be called", {
   clearJobGroup(sc)
 })
 
+test_that("utility function can be called", {
+  sc <- sparkR.init()
+  setLogLevel(sc, "ERROR")
+})
+
 test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", {
   e <- new.env()
   e[["spark.driver.memory"]] <- "512m"
-- 
GitLab