diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 667fff7192b598265f12123703d9689e75a85524..b3aff10b7d08c193ec35a4e9997f2f098bdfc150 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -24,6 +24,9 @@ export("setJobGroup",
        "clearJobGroup",
        "cancelJobGroup")
 
+# Export Utility methods
+export("setLogLevel")
+
 exportClasses("DataFrame")
 
 exportMethods("arrange",
diff --git a/R/pkg/R/context.R b/R/pkg/R/context.R
index b0e67c8ad26ab028b6434f1a2dcdace873a639e6..4105a6e5c825c54e7781d4d9f08ec5bfec58b04a 100644
--- a/R/pkg/R/context.R
+++ b/R/pkg/R/context.R
@@ -225,3 +225,20 @@ broadcast <- function(sc, object) {
 setCheckpointDir <- function(sc, dirName) {
   invisible(callJMethod(sc, "setCheckpointDir", suppressWarnings(normalizePath(dirName))))
 }
+
+#' Set new log level
+#'
+#' Set new log level: "ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN"
+#'
+#' @rdname setLogLevel
+#' @param sc Spark Context to use
+#' @param level New log level
+#' @export
+#' @examples
+#'\dontrun{
+#' setLogLevel(sc, "ERROR")
+#'}
+
+setLogLevel <- function(sc, level) {
+  callJMethod(sc, "setLogLevel", level)
+}
diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R
index 9f51161230e1aa11ced03bde0dfeabe8a373053c..ffa067eb5ea167b2e14425a3430c7e06850873e3 100644
--- a/R/pkg/inst/tests/testthat/test_context.R
+++ b/R/pkg/inst/tests/testthat/test_context.R
@@ -90,6 +90,11 @@ test_that("job group functions can be called", {
   clearJobGroup(sc)
 })
 
+test_that("utility function can be called", {
+  sc <- sparkR.init()
+  setLogLevel(sc, "ERROR")
+})
+
 test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", {
   e <- new.env()
   e[["spark.driver.memory"]] <- "512m"