SparkConf负责配置参数,主要通过ConcurrentHaspMap来维护各种Spark的配置属性。
class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Serializable {
import SparkConf._
/** Create a SparkConf that loads defaults from system properties and the classpath */
def this() = this(true)
private val settings = new ConcurrentHashMap[String, String]()
@transient private lazy val reader: ConfigReader = {
val _reader = new ConfigReader(new SparkConfigProvider(settings))
_reader.bindEnv(new ConfigProvider {
override def get(key: String): Option[String] = Option(getenv(key))
})
_reader
}
....
《深入理解Spark-核心思想与源码分析》(三)第三章SparkContext的初始化
原文:https://www.cnblogs.com/sunrunzhi/p/10169868.html