Skip to content
Snippets Groups Projects
Commit 3c4c1f96 authored by Iulian Dragos's avatar Iulian Dragos Committed by Patrick Wendell
Browse files

[SPARK-7726] Fix Scaladoc false errors

Visibility rules for static members are different in Scala and Java, and this case requires an explicit static import. Even though these are Java files, they are run through scaladoc, which enforces Scala rules.

Also reverted the commit that reverts the upgrade to 2.11.6

Author: Iulian Dragos <jaguarul@gmail.com>

Closes #6260 from dragos/issue/scaladoc-false-error and squashes the following commits:

f2e998e [Iulian Dragos] Revert "[HOTFIX] Revert "[SPARK-7092] Update spark scala version to 2.11.6""
0bad052 [Iulian Dragos] Fix scaladoc faux-error.
parent 7b16e9f2
No related branches found
No related tags found
No related merge requests found
......@@ -24,6 +24,9 @@ import io.netty.buffer.ByteBuf;
import org.apache.spark.network.protocol.Encoders;
// Needed by ScalaDoc. See SPARK-7726
import static org.apache.spark.network.shuffle.protocol.BlockTransferMessage.Type;
/** Request to read a set of blocks. Returns {@link StreamHandle}. */
public class OpenBlocks extends BlockTransferMessage {
public final String appId;
......
......@@ -22,6 +22,9 @@ import io.netty.buffer.ByteBuf;
import org.apache.spark.network.protocol.Encoders;
// Needed by ScalaDoc. See SPARK-7726
import static org.apache.spark.network.shuffle.protocol.BlockTransferMessage.Type;
/**
* Initial registration message between an executor and its local shuffle server.
* Returns nothing (empty bye array).
......
......@@ -20,6 +20,9 @@ package org.apache.spark.network.shuffle.protocol;
import com.google.common.base.Objects;
import io.netty.buffer.ByteBuf;
// Needed by ScalaDoc. See SPARK-7726
import static org.apache.spark.network.shuffle.protocol.BlockTransferMessage.Type;
/**
* Identifier for a fixed number of chunks to read from a stream created by an "open blocks"
* message. This is used by {@link org.apache.spark.network.shuffle.OneForOneBlockFetcher}.
......
......@@ -24,6 +24,9 @@ import io.netty.buffer.ByteBuf;
import org.apache.spark.network.protocol.Encoders;
// Needed by ScalaDoc. See SPARK-7726
import static org.apache.spark.network.shuffle.protocol.BlockTransferMessage.Type;
/** Request to upload a block with a certain StorageLevel. Returns nothing (empty byte array). */
public class UploadBlock extends BlockTransferMessage {
......
......@@ -1799,9 +1799,9 @@
<property><name>scala-2.11</name></property>
</activation>
<properties>
<scala.version>2.11.2</scala.version>
<scala.version>2.11.6</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<jline.version>2.12</jline.version>
<jline.version>2.12.1</jline.version>
<jline.groupid>jline</jline.groupid>
</properties>
</profile>
......
......@@ -1129,7 +1129,7 @@ class SparkIMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings
def apply(line: String): Result = debugging(s"""parse("$line")""") {
var isIncomplete = false
currentRun.reporting.withIncompleteHandler((_, _) => isIncomplete = true) {
currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true) {
reporter.reset()
val trees = newUnitParser(line).parseStats()
if (reporter.hasErrors) Error
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment