diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 0cfa12b997d69a3041d1b7d1b96f5c8d9fd8e209..c126f9efb475adf7b705e4c48f88b886d17e2b86 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -458,7 +458,10 @@ setMethod("registerTempTable",
 setMethod("insertInto",
           signature(x = "DataFrame", tableName = "character"),
           function(x, tableName, overwrite = FALSE) {
-            callJMethod(x@sdf, "insertInto", tableName, overwrite)
+            jmode <- convertToJSaveMode(ifelse(overwrite, "overwrite", "append"))
+            write <- callJMethod(x@sdf, "write")
+            write <- callJMethod(write, "mode", jmode)
+            callJMethod(write, "insertInto", tableName)
           })
 
 #' Cache
@@ -1948,18 +1951,15 @@ setMethod("write.df",
               source <- callJMethod(sqlContext, "getConf", "spark.sql.sources.default",
                                     "org.apache.spark.sql.parquet")
             }
-            allModes <- c("append", "overwrite", "error", "ignore")
-            # nolint start
-            if (!(mode %in% allModes)) {
-              stop('mode should be one of "append", "overwrite", "error", "ignore"')
-            }
-            # nolint end
-            jmode <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "saveMode", mode)
+            jmode <- convertToJSaveMode(mode)
             options <- varargsToEnv(...)
             if (!is.null(path)) {
                 options[["path"]] <- path
             }
-            callJMethod(df@sdf, "save", source, jmode, options)
+            write <- callJMethod(df@sdf, "write")
+            write <- callJMethod(write, "format", source)
+            write <- callJMethod(write, "mode", jmode)
+            write <- callJMethod(write, "save", path)
           })
 
 #' @rdname write.df
@@ -2013,15 +2013,14 @@ setMethod("saveAsTable",
               source <- callJMethod(sqlContext, "getConf", "spark.sql.sources.default",
                                     "org.apache.spark.sql.parquet")
             }
-            allModes <- c("append", "overwrite", "error", "ignore")
-            # nolint start
-            if (!(mode %in% allModes)) {
-              stop('mode should be one of "append", "overwrite", "error", "ignore"')
-            }
-            # nolint end
-            jmode <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "saveMode", mode)
+            jmode <- convertToJSaveMode(mode)
             options <- varargsToEnv(...)
-            callJMethod(df@sdf, "saveAsTable", tableName, source, jmode, options)
+
+            write <- callJMethod(df@sdf, "write")
+            write <- callJMethod(write, "format", source)
+            write <- callJMethod(write, "mode", jmode)
+            write <- callJMethod(write, "options", options)
+            callJMethod(write, "saveAsTable", tableName)
           })
 
 #' summary
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 9243d70e66f75acef4eb4b74933584fcd49246f1..ccc683d86a3e5ea7d65a99ff0970e507ffd7e0e1 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -256,9 +256,12 @@ jsonFile <- function(sqlContext, path) {
 
 # TODO: support schema
 jsonRDD <- function(sqlContext, rdd, schema = NULL, samplingRatio = 1.0) {
+  .Deprecated("read.json")
   rdd <- serializeToString(rdd)
   if (is.null(schema)) {
-    sdf <- callJMethod(sqlContext, "jsonRDD", callJMethod(getJRDD(rdd), "rdd"), samplingRatio)
+    read <- callJMethod(sqlContext, "read")
+    # samplingRatio is deprecated
+    sdf <- callJMethod(read, "json", callJMethod(getJRDD(rdd), "rdd"))
     dataFrame(sdf)
   } else {
     stop("not implemented")
@@ -289,10 +292,7 @@ read.parquet <- function(sqlContext, path) {
 # TODO: Implement saveasParquetFile and write examples for both
 parquetFile <- function(sqlContext, ...) {
   .Deprecated("read.parquet")
-  # Allow the user to have a more flexible definiton of the text file path
-  paths <- lapply(list(...), function(x) suppressWarnings(normalizePath(x)))
-  sdf <- callJMethod(sqlContext, "parquetFile", paths)
-  dataFrame(sdf)
+  read.parquet(sqlContext, unlist(list(...)))
 }
 
 #' SQL Query
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 356bcee3cf5c625ce64b4d5ddbb75a2d5772c063..3ffd9a9890b2e2226dce0df800d53bc9de34974b 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -209,7 +209,7 @@ setMethod("cast",
 setMethod("%in%",
           signature(x = "Column"),
           function(x, table) {
-            jc <- callJMethod(x@jc, "in", as.list(table))
+            jc <- callJMethod(x@jc, "isin", as.list(table))
             return(column(jc))
           })
 
diff --git a/R/pkg/R/utils.R b/R/pkg/R/utils.R
index 43105aaa38424b3f25e0bca683c105d238e88ffc..aa386e5da933b9cb0cc28a4dc333e76856cf159c 100644
--- a/R/pkg/R/utils.R
+++ b/R/pkg/R/utils.R
@@ -641,3 +641,12 @@ assignNewEnv <- function(data) {
 splitString <- function(input) {
   Filter(nzchar, unlist(strsplit(input, ",|\\s")))
 }
+
+convertToJSaveMode <- function(mode) {
+ allModes <- c("append", "overwrite", "error", "ignore")
+ if (!(mode %in% allModes)) {
+   stop('mode should be one of "append", "overwrite", "error", "ignore"')  # nolint
+ }
+ jmode <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "saveMode", mode)
+ jmode
+}
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 9e5d0ebf60720154d95408defb9ca8f80b4d3c0a..ebe8faa34cf7dae0c17426f268a6e48f92e60a02 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -423,12 +423,12 @@ test_that("read/write json files", {
 test_that("jsonRDD() on a RDD with json string", {
   rdd <- parallelize(sc, mockLines)
   expect_equal(count(rdd), 3)
-  df <- jsonRDD(sqlContext, rdd)
+  df <- suppressWarnings(jsonRDD(sqlContext, rdd))
   expect_is(df, "DataFrame")
   expect_equal(count(df), 3)
 
   rdd2 <- flatMap(rdd, function(x) c(x, x))
-  df <- jsonRDD(sqlContext, rdd2)
+  df <- suppressWarnings(jsonRDD(sqlContext, rdd2))
   expect_is(df, "DataFrame")
   expect_equal(count(df), 6)
 })
diff --git a/dev/run-tests.py b/dev/run-tests.py
index acc9450586fe3f8f7bf6aac60149ee96ad57475b..8726889cbc777479ad4236715ca99739d7be3cf5 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -425,13 +425,12 @@ def run_build_tests():
 
 
 def run_sparkr_tests():
-    # set_title_and_block("Running SparkR tests", "BLOCK_SPARKR_UNIT_TESTS")
+    set_title_and_block("Running SparkR tests", "BLOCK_SPARKR_UNIT_TESTS")
 
-    # if which("R"):
-    #     run_cmd([os.path.join(SPARK_HOME, "R", "run-tests.sh")])
-    # else:
-    #     print("Ignoring SparkR tests as R was not found in PATH")
-    pass
+    if which("R"):
+        run_cmd([os.path.join(SPARK_HOME, "R", "run-tests.sh")])
+    else:
+        print("Ignoring SparkR tests as R was not found in PATH")
 
 
 def parse_opts():