【发布时间】:2017-01-14 21:18:43
【问题描述】:
我想在提交 spark 作业时加载一个属性配置文件,这样我可以根据不同的环境加载正确的配置,例如测试环境或产品环境。但是我不知道属性文件放在哪里,这里是加载属性文件的代码:
object HbaseRDD {
val QUORUM_DEFAULT = "172.16.1.10,172.16.1.11,172.16.1.12"
val TIMEOUT_DEFAULT = "120000"
val config = Try {
val prop = new Properties()
prop.load(new FileInputStream("hbase.properties"))
(
prop.getProperty("hbase.zookeeper.quorum", QUORUM_DEFAULT),
prop.getProperty("timeout", TIMEOUT_DEFAULT)
)
}
def getHbaseRDD(tableName: String, appName:String = "test", master:String = "spark://node0:7077") = {
val sparkConf = new SparkConf().setAppName(appName).setMaster(master)
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
config match {
case Success((quorum, timeout)) =>
conf.set("hbase.zookeeper.quorum", quorum)
conf.set("timeout", timeout)
case Failure(ex) =>
ex.printStackTrace()
conf.set("hbase.zookeepr.quorum", QUORUM_DEFAULT)
conf.set("timeout", TIMEOUT_DEFAULT)
}
conf.set(TableInputFormat.INPUT_TABLE, tableName)
val hbaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])
hbaseRDD
}
}
问题是我将hbase.properties 文件放在哪里以便spark 可以找到并加载它?或者如何通过spark-submit指定?
【问题讨论】:
-
那么什么参数技巧对你有用?你在哪里放置了属性文件?
标签: apache-spark