mongo-spark-读取不同的库数据和写入不同的库中
2024-08-20 11:25:13
mongo-spark-读取不同的库数据和写入不同的库中
package com.example.app
import com.mongodb.spark.config.{ReadConfig, WriteConfig}
import com.mongodb.spark.sql._
object App {
def main(args: Array[String]): Unit = {
val MongoUri1 = args(0).toString
val MongoUri2 = args(1).toString
val SparkMasterUri= args(2).toString
def makeMongoURI(uri:String,database:String,collection:String) = (s"${uri}/${database}.${collection}")
val mongoURI1 = s"mongodb://${MongoUri1}:27017"
val mongoURI2 = s"mongodb://${MongoUri2}:27017"
val CONFdb1 = makeMongoURI(s"${mongoURI1}","MyColletion1,"df")
val CONFdb2 = makeMongoURI(s"${mongoURI2}","MyColletion2,"df")
val WRITEdb1: WriteConfig = WriteConfig(scala.collection.immutable.Map("uri"->CONFdb1))
val READdb1: ReadConfig = ReadConfig(Map("uri" -> CONFdb1))
val WRITEdb2: WriteConfig = WriteConfig(scala.collection.immutable.Map("uri"->CONFdb2))
val READdb2: ReadConfig = ReadConfig(Map("uri" -> CONFdb2))
val spark = SparkSession
.builder
.appName("AppMongo")
.config("spark.worker.cleanup.enabled", "true")
.config("spark.scheduler.mode", "FAIR")
.getOrCreate()
val df1 = spark.read.mongo(READdb1)
val df2 = spark.read.mongo(READdb2)
df1.write.mode("overwrite").mongo(WRITEdb1)
df2.write.mode("overwrite").mongo(WRITEdb2)
}
}
最新文章
- 【转】Java并发编程:Lock
- [NHibernate]一对多关系(关联查询)
- Python--过滤Mysql慢日志
- com.alibaba.fastjson.JSONObject学习
- expect入门--自动化linux交互式命令
- vc判断文件是否存在
- BZOJ 1043 HAOI2008 下落的圆盘 计算几何
- VIM 及其插件使用快捷键汇总
- Windows下载地址
- web开发与IC卡读卡器
- mysql数据库基本使用(增删改查)
- LeetCode专题-Python实现之第26题:Remove Duplicates from Sorted Array
- Java基础static的探究
- PXE自动装机
- keil_v5生成.bin文件方法
- [Hibernate] hibernate.cfg.xml 配置文件的一些设置
- Arcmap连接数据库需管理员获取许可——创建ArcSDE连接文件
- (转载)centos7启用端口
- Sping boot 之 @Value(";${xxx}";) 注解获取配置文件内容
- multiMap遍历方法