object
SparkContext extends Logging
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
-
-
-
-
final
def
asInstanceOf[T0]: T0
-
implicit
def
boolToBoolWritable(b: Boolean): BooleanWritable
-
implicit
def
booleanWritableConverter(): WritableConverter[Boolean]
-
implicit
def
bytesToBytesWritable(aob: Array[Byte]): BytesWritable
-
implicit
def
bytesWritableConverter(): WritableConverter[Array[Byte]]
-
def
clone(): AnyRef
-
implicit
def
doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]): DoubleRDDFunctions
-
implicit
def
doubleToDoubleWritable(d: Double): DoubleWritable
-
implicit
def
doubleWritableConverter(): WritableConverter[Double]
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
implicit
def
floatToFloatWritable(f: Float): FloatWritable
-
implicit
def
floatWritableConverter(): WritableConverter[Float]
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
implicit
def
intToIntWritable(i: Int): IntWritable
-
implicit
def
intWritableConverter(): WritableConverter[Int]
-
final
def
isInstanceOf[T0]: Boolean
-
def
isTraceEnabled(): Boolean
-
def
jarOfClass(cls: Class[_]): Option[String]
-
def
jarOfObject(obj: AnyRef): Option[String]
-
def
log: Logger
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
-
def
logDebug(msg: ⇒ String): Unit
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
-
def
logError(msg: ⇒ String): Unit
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
-
def
logInfo(msg: ⇒ String): Unit
-
def
logName: String
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
-
def
logTrace(msg: ⇒ String): Unit
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
-
def
logWarning(msg: ⇒ String): Unit
-
implicit
def
longToLongWritable(l: Long): LongWritable
-
implicit
def
longWritableConverter(): WritableConverter[Long]
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
implicit
def
numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: Numeric[T]): DoubleRDDFunctions
-
implicit
def
rddToAsyncRDDActions[T](rdd: RDD[T])(implicit arg0: ClassTag[T]): AsyncRDDActions[T]
-
implicit
def
rddToOrderedRDDFunctions[K, V](rdd: RDD[(K, V)])(implicit arg0: Ordering[K], arg1: ClassTag[K], arg2: ClassTag[V]): OrderedRDDFunctions[K, V, (K, V)]
-
implicit
def
rddToPairRDDFunctions[K, V](rdd: RDD[(K, V)])(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null): PairRDDFunctions[K, V]
-
implicit
def
rddToSequenceFileRDDFunctions[K, V](rdd: RDD[(K, V)])(implicit arg0: (K) ⇒ Writable, arg1: ClassTag[K], arg2: (V) ⇒ Writable, arg3: ClassTag[V]): SequenceFileRDDFunctions[K, V]
-
implicit
def
stringToText(s: String): Text
-
implicit
def
stringWritableConverter(): WritableConverter[String]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
implicit
def
writableWritableConverter[T <: Writable](): WritableConverter[T]
Inherited from AnyRef
Inherited from Any
The SparkContext object contains a number of implicit conversions and parameters for use with various Spark features.