public class SparkEnv extends Object implements Logging
NOTE: This is not intended for external use. This is exposed for Shark and may be made private in a future release.
Constructor and Description |
---|
SparkEnv(String executorId,
akka.actor.ActorSystem actorSystem,
Serializer serializer,
Serializer closureSerializer,
CacheManager cacheManager,
MapOutputTracker mapOutputTracker,
org.apache.spark.shuffle.ShuffleManager shuffleManager,
BroadcastManager broadcastManager,
org.apache.spark.network.BlockTransferService blockTransferService,
BlockManager blockManager,
SecurityManager securityManager,
HttpFileServer httpFileServer,
String sparkFilesDir,
MetricsSystem metricsSystem,
org.apache.spark.shuffle.ShuffleMemoryManager shuffleMemoryManager,
SparkConf conf) |
Modifier and Type | Method and Description |
---|---|
akka.actor.ActorSystem |
actorSystem() |
BlockManager |
blockManager() |
org.apache.spark.network.BlockTransferService |
blockTransferService() |
BroadcastManager |
broadcastManager() |
CacheManager |
cacheManager() |
Serializer |
closureSerializer() |
SparkConf |
conf() |
static SparkEnv |
createDriverEnv(SparkConf conf,
boolean isLocal,
LiveListenerBus listenerBus)
Create a SparkEnv for the driver.
|
static SparkEnv |
createExecutorEnv(SparkConf conf,
String executorId,
String hostname,
int port,
int numCores,
boolean isLocal,
akka.actor.ActorSystem actorSystem)
Create a SparkEnv for an executor.
|
java.net.Socket |
createPythonWorker(String pythonExec,
scala.collection.immutable.Map<String,String> envVars) |
void |
destroyPythonWorker(String pythonExec,
scala.collection.immutable.Map<String,String> envVars,
java.net.Socket worker) |
static String |
driverActorSystemName() |
static scala.collection.immutable.Map<String,scala.collection.Seq<scala.Tuple2<String,String>>> |
environmentDetails(SparkConf conf,
String schedulingMode,
scala.collection.Seq<String> addedJars,
scala.collection.Seq<String> addedFiles)
Return a map representation of jvm information, Spark properties, system properties, and
class paths.
|
static String |
executorActorSystemName() |
String |
executorId() |
static SparkEnv |
get()
Returns the SparkEnv.
|
static SparkEnv |
getThreadLocal()
Returns the ThreadLocal SparkEnv.
|
java.util.concurrent.ConcurrentMap<String,Object> |
hadoopJobMetadata() |
HttpFileServer |
httpFileServer() |
boolean |
isStopped() |
MapOutputTracker |
mapOutputTracker() |
MetricsSystem |
metricsSystem() |
void |
releasePythonWorker(String pythonExec,
scala.collection.immutable.Map<String,String> envVars,
java.net.Socket worker) |
SecurityManager |
securityManager() |
Serializer |
serializer() |
static void |
set(SparkEnv e) |
org.apache.spark.shuffle.ShuffleManager |
shuffleManager() |
org.apache.spark.shuffle.ShuffleMemoryManager |
shuffleMemoryManager() |
String |
sparkFilesDir() |
void |
stop() |
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public SparkEnv(String executorId, akka.actor.ActorSystem actorSystem, Serializer serializer, Serializer closureSerializer, CacheManager cacheManager, MapOutputTracker mapOutputTracker, org.apache.spark.shuffle.ShuffleManager shuffleManager, BroadcastManager broadcastManager, org.apache.spark.network.BlockTransferService blockTransferService, BlockManager blockManager, SecurityManager securityManager, HttpFileServer httpFileServer, String sparkFilesDir, MetricsSystem metricsSystem, org.apache.spark.shuffle.ShuffleMemoryManager shuffleMemoryManager, SparkConf conf)
public static String driverActorSystemName()
public static String executorActorSystemName()
public static void set(SparkEnv e)
public static SparkEnv get()
public static SparkEnv getThreadLocal()
public static SparkEnv createDriverEnv(SparkConf conf, boolean isLocal, LiveListenerBus listenerBus)
public static SparkEnv createExecutorEnv(SparkConf conf, String executorId, String hostname, int port, int numCores, boolean isLocal, akka.actor.ActorSystem actorSystem)
public static scala.collection.immutable.Map<String,scala.collection.Seq<scala.Tuple2<String,String>>> environmentDetails(SparkConf conf, String schedulingMode, scala.collection.Seq<String> addedJars, scala.collection.Seq<String> addedFiles)
public String executorId()
public akka.actor.ActorSystem actorSystem()
public Serializer serializer()
public Serializer closureSerializer()
public CacheManager cacheManager()
public MapOutputTracker mapOutputTracker()
public org.apache.spark.shuffle.ShuffleManager shuffleManager()
public BroadcastManager broadcastManager()
public org.apache.spark.network.BlockTransferService blockTransferService()
public BlockManager blockManager()
public SecurityManager securityManager()
public HttpFileServer httpFileServer()
public String sparkFilesDir()
public MetricsSystem metricsSystem()
public org.apache.spark.shuffle.ShuffleMemoryManager shuffleMemoryManager()
public SparkConf conf()
public boolean isStopped()
public java.util.concurrent.ConcurrentMap<String,Object> hadoopJobMetadata()
public void stop()
public java.net.Socket createPythonWorker(String pythonExec, scala.collection.immutable.Map<String,String> envVars)
public void destroyPythonWorker(String pythonExec, scala.collection.immutable.Map<String,String> envVars, java.net.Socket worker)
public void releasePythonWorker(String pythonExec, scala.collection.immutable.Map<String,String> envVars, java.net.Socket worker)