diff --git a/python/pyspark/accumulators.py b/python/pyspark/accumulators.py index 5a9269f9bb251484773c3c8b9f805760bb38aff8..61fcbbd37679fa806ff380b07cf1a941ff71bbd7 100644 --- a/python/pyspark/accumulators.py +++ b/python/pyspark/accumulators.py @@ -25,7 +25,8 @@ >>> a.value 13 ->>> class VectorAccumulatorParam(object): +>>> from pyspark.accumulators import AccumulatorParam +>>> class VectorAccumulatorParam(AccumulatorParam): ... def zero(self, value): ... return [0.0] * len(value) ... def addInPlace(self, val1, val2): @@ -61,7 +62,6 @@ Traceback (most recent call last): Exception:... """ -from abc import ABCMeta, abstractmethod import struct import SocketServer import threading @@ -138,23 +138,20 @@ class AccumulatorParam(object): """ Helper object that defines how to accumulate values of a given type. """ - __metaclass__ = ABCMeta - @abstractmethod def zero(self, value): """ Provide a "zero value" for the type, compatible in dimensions with the provided C{value} (e.g., a zero vector) """ - return + raise NotImplementedError - @abstractmethod def addInPlace(self, value1, value2): """ Add two values of the accumulator's data type, returning a new value; for efficiency, can also update C{value1} in place and return it. """ - return + raise NotImplementedError class AddingAccumulatorParam(AccumulatorParam):