class
SparkEnv extends Logging
Instance Constructors
-
new
SparkEnv(executorId: String, rpcEnv: RpcEnv, serializer: Serializer, closureSerializer: Serializer, cacheManager: CacheManager, mapOutputTracker: MapOutputTracker, shuffleManager: ShuffleManager, broadcastManager: BroadcastManager, blockTransferService: BlockTransferService, blockManager: BlockManager, securityManager: SecurityManager, httpFileServer: HttpFileServer, sparkFilesDir: String, metricsSystem: MetricsSystem, shuffleMemoryManager: ShuffleMemoryManager, executorMemoryManager: ExecutorMemoryManager, outputCommitCoordinator: OutputCommitCoordinator, conf: SparkConf)
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
val
blockManager: BlockManager
-
val
blockTransferService: BlockTransferService
-
val
broadcastManager: BroadcastManager
-
val
cacheManager: CacheManager
-
def
clone(): AnyRef
-
val
closureSerializer: Serializer
-
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
val
executorId: String
-
val
executorMemoryManager: ExecutorMemoryManager
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
val
httpFileServer: HttpFileServer
-
final
def
isInstanceOf[T0]: Boolean
-
def
isTraceEnabled(): Boolean
-
def
log: Logger
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
-
def
logDebug(msg: ⇒ String): Unit
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
-
def
logError(msg: ⇒ String): Unit
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
-
def
logInfo(msg: ⇒ String): Unit
-
def
logName: String
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
-
def
logTrace(msg: ⇒ String): Unit
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
-
def
logWarning(msg: ⇒ String): Unit
-
val
mapOutputTracker: MapOutputTracker
-
val
metricsSystem: MetricsSystem
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
val
outputCommitCoordinator: OutputCommitCoordinator
-
val
securityManager: SecurityManager
-
-
val
shuffleManager: ShuffleManager
-
val
shuffleMemoryManager: ShuffleMemoryManager
-
val
sparkFilesDir: String
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Deprecated Value Members
-
val
actorSystem: ActorSystem
Inherited from AnyRef
Inherited from Any
:: DeveloperApi :: Holds all the runtime environment objects for a running Spark instance (either master or worker), including the serializer, Akka actor system, block manager, map output tracker, etc. Currently Spark code finds the SparkEnv through a global variable, so all the threads can access the same SparkEnv. It can be accessed by SparkEnv.get (e.g. after creating a SparkContext).
NOTE: This is not intended for external use. This is exposed for Shark and may be made private in a future release.