import com.huinong.truffle.push.process.domain.common.constant.Constants;
import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.io.Serializable;
import java.util.*;
@Slf4j
public class SynchronizeData implements Serializable{
public static SparkSession getSparkSession(String mongoUrl ,String dbName ,String outputDabase ,String outputCollection){
SparkSession spark = null;
try{
spark = SparkSession.builder().master("local[*]")
.appName("SparkHive")
.config("spark.sql.warehouse.dir", Constants.WAREHOUSE_DIR).enableHiveSupport()
.config("spark.mongodb.output.uri", mongoUrl + dbName)
.config("spark.mongodb.output.database",outputDabase)
.config("spark.mongodb.output.collection",outputCollection)
.getOrCreate();
spark.sql("show databases").show();
spark.sql("show tables").show();
}catch (Exception e){
log.error("创建spark session失败",e);
}
return spark;
}
public static void sync(String sql ,String mongoUrl ,String dbName ,String outputDabase ,String outputCollection) throws Exception{
SparkSession spark = getSparkSession(mongoUrl ,dbName ,outputDabase ,outputCollection);
JavaSparkContext jc = new JavaSparkContext(spark.sparkContext());
System.out.println("===========================开始.........."+System.currentTimeMillis());
Dataset<Row> dataset = spark.sql(sql);
if (dataset != null && dataset.count() > 0){
MongoSpark.save(dataset);
}
System.out.println("===========================结束.........."+System.currentTimeMillis());
jc.close();
}
}
调用:
private String mongoUrl = "mongodb://10.10.3.241:27017/";
public void synchronizeUserinfos() throws Exception{
String sql = "select * from hn_application.push_userinfos";
SynchronizeData.sync(sql , mongoUrl, "push_userinfos" ,"push" ,"push_userinfos");
}
public static final String WAREHOUSE_DIR="/user/hive/warehouse";
参考资料:
https://www.cnblogs.com/kaiwen1/p/9179035.html
资料说要把集群三个配置文件放到resource目录下,我这边只放hive-site.xml文件没有问题。
本文介绍了一种使用Apache Spark进行数据处理并与MongoDB进行数据同步的方法。通过具体代码示例,展示了如何配置SparkSession连接MongoDB,执行SQL查询,并将结果集保存到MongoDB中。特别关注了在本地环境中配置SparkSession的过程,以及如何处理可能发生的异常。
4244

被折叠的 条评论
为什么被折叠?



