使用Spark和Java将问题连接到MongoDB

时间:2017-03-12 19:57:04

标签: java mongodb apache-spark

我一直在尝试使用Spark连接Mongo

My Spring Config是:

@Bean
public SparkConf sparkConf() {
        return new SparkConf()
                .setMaster("local[*]")
                .setAppName("test")
                .set("spark.app.id", "test")
                .set("spark.mongodb.input.uri", "mongodb://127.0.0.1/")
                .set("spark.mongodb.output.uri", "mongodb://127.0.0.1/")
                .set("spark.mongodb.input.database", "myDataBase")
                .set("spark.mongodb.output.database", "myDataBase");
 }

 @Bean
 public JavaSparkContext javaSparkContext() {
        return new JavaSparkContext(sparkConf());
 }

 @Bean
 public SQLContext sqlContext() {
        return new SQLContext(SparkSession
                .builder()
                .appName("eat")
                .master("local[*]")
                .config(sparkConf())
                .getOrCreate());
}

我正在使用mongo-spark连接器

<dependency>
    <groupId>org.mongodb.spark</groupId>
    <artifactId>mongo-spark-connector_2.11</artifactId>
    <version>2.0.0</version>
</dependency>

当我尝试在我的Service类中以这种方式从Mongo中检索数据时 -

ReadConfig readConfig = ReadConfig.create(sparkContext)
                .withOption("spark.mongodb.output.collection", "myCollection");
JavaRDD<Document> rdd = MongoSpark.load(sparkContext, readConfig);

我有一个例外 -

"Missing collection name. Set via the 'spark.mongodb.input.uri'
 or 'spark.mongodb.input.collection' property"

如果在Spring配置中设置 - 像这样的SparkConf -

          SparkConf()
            .setMaster("local[*]")
            .setAppName("test")
            .set("spark.app.id", "test")
            .set("spark.mongodb.input.uri", "mongodb://127.0.0.1/myDataBase.myCollection")
            .set("spark.mongodb.output.uri", "mongodb://127.0.0.1/myDataBase.myCollection")
一切都好。但我想管理不同的收藏品。我做错了什么?

-------------------------更新--------------------- ----- 解决。必须使用某个默认集合创建Spark上下文。可以从该集合中检索数据,而无需定义其他集合选项的选项。示例:

@SpringBootConfiguration
public class SparkConfiguration {

    private final String MONGO_PREFIX = "mongodb://";
    private final String MONGO_INPUT_COLLECTION = "faqs";

    @Value(value = "${spring.data.mongodb.name}")
    private String mongoName;

    @Value(value = "${spring.data.mongodb.net.bindIp}")
    private String mongoHost;

    @Bean
    public SparkSession sparkSession() {
        return SparkSession.builder()
                .master("local[*]")
                .appName("eat-spark-cluster")
                .config("spark.app.id", "Eat")
                .config("spark.mongodb.input.uri", MONGO_PREFIX.concat(mongoHost).concat("/"))
                .config("spark.mongodb.input.database", mongoName)
                .config("spark.mongodb.input.collection", MONGO_INPUT_COLLECTION)
                .getOrCreate();
    }

    @Bean
    public JavaSparkContext javaSparkContext() {
        return JavaSparkContext.fromSparkContext(sparkSession().sparkContext());
    }
}


ReadConfig readConfig = ReadConfig.create(getJavaSparkContext()).withOption("collection", "my_collection");
JavaMongoRDD<Document> placesRdd = MongoSpark.load(getJavaSparkContext(), readConfig);

return placesRdd.collect();

1 个答案:

答案 0 :(得分:0)

Mongo版本3.4.8  Spark版本2.2

package mongo;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;

public class Connector {
    String db1="mongodb://127.0.0.1/";
    String db2=    "mongodb://192.168.4.180/";
    String dbUrl = db1;

    String user = ";";
    String pass = "";
    String dbName = "test";
    String collName="spark";


    public static void main(String[] args) {
        Connector con=new Connector();
        JavaSparkContext jsc = con.connection();
       // con.writeToMongo(jsc);
        con.readFromMongo(jsc);

        Scanner sc= new Scanner(System.in);
        sc.next();
    }
    JavaSparkContext connection() {

        SparkSession ss = SparkSession.builder()
                .master("local")
                .appName("MongoConnector")
                .config("spark.mongodb.input.uri", dbUrl + dbName)
                .config("spark.mongodb.output.uri", dbUrl + dbName)
                .config("spark.mongodb.output.collection",collName)
                .config("spark.mongodb.input.collection",collName)
                .getOrCreate();

        JavaSparkContext jsc=new JavaSparkContext(ss.sparkContext());


        return jsc;

        // jsc.close();
    }


    void readFromMongo(JavaSparkContext jsc){
        JavaMongoRDD<Document> rdd = MongoSpark.load(jsc);
        System.out.print(rdd.collect());
    }

    void writeToMongo(JavaSparkContext jsc){
        JavaRDD<Document> rdd = jsc.parallelize(Arrays.asList(1, 2, 3))
                .map(x -> Document.parse("{spark: "+x+"}"));



        Map<String,String > writeconf=new HashMap<String,String>();

        writeconf.put("collection","spark");
        writeconf.put("writeConcern.w", "majority");

        WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeconf);
        MongoSpark.save(rdd,writeConfig);

    }
}