有人可以帮我解决这个问题吗?我正在尝试使用java,spark和kafka编译maven项目。但我收到错误no suitable method found for createDirectStream
。我可以找到与no suitable method found
相关的大量解决方案,但不是我的情况,所以我决定从这里寻求帮助。任何帮助将不胜感激。
以下是更多详细信息中的错误
[ERROR] /home/geek-tech/play-ground/bigdata/iot-traffic-monitor/iot-spark-processor/src/main/java/com/iot/app/spark/processor/IoTDataProcessor.java:[68,86] no suitable method found for createDirectStream(org.apache.spark.streaming.api.java.JavaStreamingContext,java.lang.Class<java.lang.String>,java.lang.Class<com.iot.app.spark.vo.IoTData>,java.lang.Class<kafka.serializer.StringDecoder>,java.lang.Class<com.iot.app.spark.util.IoTDataDecoder>,java.util.Map<java.lang.String,java.lang.String>,java.util.Set<java.lang.String>)
method org.apache.spark.streaming.kafka010.KafkaUtils.<K,V>createDirectStream(org.apache.spark.streaming.api.java.JavaStreamingContext,org.apache.spark.streaming.kafka010.LocationStrategy,org.apache.spark.streaming.kafka010.ConsumerStrategy<K,V>) is not applicable
(cannot infer type-variable(s) K,V
(actual and formal argument lists differ in length))
method org.apache.spark.streaming.kafka010.KafkaUtils.<K,V>createDirectStream(org.apache.spark.streaming.StreamingContext,org.apache.spark.streaming.kafka010.LocationStrategy,org.apache.spark.streaming.kafka010.ConsumerStrategy<K,V>) is not applicable
(cannot infer type-variable(s) K,V
(actual and formal argument lists differ in length))
这是源代码( IoTDataProcessor )
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function3;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.State;
import org.apache.spark.streaming.StateSpec;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaMapWithStateDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import com.google.common.base.Optional;
import com.iot.app.spark.util.IoTDataDecoder;
import com.iot.app.spark.util.PropertyFileReader;
import com.iot.app.spark.vo.IoTData;
import com.iot.app.spark.vo.POIData;
import kafka.serializer.StringDecoder;
import scala.Tuple2;
import scala.Tuple3;
/**
* This class consumes Kafka IoT messages and creates stream for processing the IoT data.
*/
public class IoTDataProcessor {
private static final Logger logger = Logger.getLogger(IoTDataProcessor.class);
public static void main(String[] args) throws Exception {
//read Spark and Cassandra properties and create SparkConf
Properties prop = PropertyFileReader.readPropertyFile();
SparkConf conf = new SparkConf()
.setAppName(prop.getProperty("com.iot.app.spark.app.name"))
.setMaster(prop.getProperty("com.iot.app.spark.master"))
.set("spark.cassandra.connection.host", prop.getProperty("com.iot.app.cassandra.host"))
.set("spark.cassandra.connection.port", prop.getProperty("com.iot.app.cassandra.port"))
.set("spark.cassandra.connection.keep_alive_ms", prop.getProperty("com.iot.app.cassandra.keep_alive"));
//batch interval of 5 seconds for incoming stream
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));
//add check point directory
jssc.checkpoint(prop.getProperty("com.iot.app.spark.checkpoint.dir"));
//read and set Kafka properties
Map<String, String> kafkaParams = new HashMap<String, String>();
// or Map<String, String> kafkaParams = new HashMap<>();
kafkaParams.put("zookeeper.connect", prop.getProperty("com.iot.app.kafka.zookeeper"));
kafkaParams.put("metadata.broker.list", prop.getProperty("com.iot.app.kafka.brokerlist"));
// kafkaParams.put("auto.offset.reset", "smallest");
String topic = prop.getProperty("com.iot.app.kafka.topic");
Set<String> topicsSet = new HashSet<String>();
topicsSet.add(topic);
//create direct kafka stream
JavaPairInputDStream<String, IoTData> directKafkaStream = KafkaUtils.createDirectStream(
jssc,
String.class,
IoTData.class,
StringDecoder.class,
IoTDataDecoder.class,
kafkaParams,
topicsSet
);
logger.info("Starting Stream Processing");
//start context
jssc.start();
jssc.awaitTermination();
}
}
此外,还有 pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.iot.app.spark</groupId>
<artifactId>iot-spark-processor</artifactId>
<version>1.0.0</version>
<name>IoT Spark Processor</name>
<dependencies>
<!-- Spark dependencies -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.0.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>2.0.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>2.0.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>2.0.2</version>
</dependency>
<!-- Spark cassandra -->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>2.0.2</version>
</dependency>
<!-- other -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>${basedir}/src/main/resources</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>Cp1252</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.iot.app.spark.processor.IoTDataProcessor</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>