java.lang.ClassNotFoundException:org.apache.spark.streaming.kafka010.KafkaRDDPartition

时间:2018-06-21 14:43:37

标签: scala apache-spark apache-kafka spark-streaming

我有3个经纪人组成的spark和kafka集群,我想使用spark流从一个特定的主题读取数据,执行我的 App.scala 时出现此错误:

  

18/06/21 07:26:52信息DA计划程序:ResultStage 7(打印在   App.scala:51)在0.258 s内由于作业因阶段中止而失败   失败:阶段7.0中的任务0失败4次,最近一次失败:丢失   阶段7.0中的任务0.3(TID 31,192.168.57.1​​33,执行者1):   java.lang.ClassNotFoundException:   org.apache.spark.streaming.kafka010.KafkaRDDPartition

这是我的App.scala:

package com.company
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.kafka
import org.apache.spark.streaming.kafka010._
import org.apache.spark._


import org.apache.kafka.common.TopicPartition
import org.apache.spark.Partition
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka.KafkaRDDPartition


import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.kafka010.KafkaUtils

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.LongDeserializer

import org.apache.spark.streaming.kafka010.LocationStrategies
import org.apache.spark.streaming.kafka010.ConsumerStrategies
//import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferBrokers

/**
 * @author ${user.name}
 */
object App {
  //def foo(x : Array[String]) = x.foldLeft("")((a,b) => a + b)

  def main(args : Array[String]) {
   val conf = new SparkConf().setMaster("spark://srv-rt001:7077").setAppName("App")
                conf.set("spark.testing.memory","472000000")
                 val ssc = new StreamingContext(conf, Seconds(1))
                    val kafkaParams = Map(
                            "bootstrap.servers" -> "192.168.57.129:9092",
                            "key.deserializer" -> classOf[LongDeserializer],
                            "value.deserializer" -> classOf[StringDeserializer],
                            "group.id" -> "spark-streaming-notes2",
                            "auto.offset.reset" -> "latest"
                            )
                    // List of topics you want to listen for from Kafka
                    val topics = List("fraude")
                    val lines = KafkaUtils.createDirectStream[String, String](ssc,
                            PreferBrokers,
                            ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
                            )

                    val word = lines.map(_.value())
                   word.print()
                   // insertIntoIndexes()
                    println("hellllllloloololohokhok okgokgog")
                        ssc.start()
                    ssc.awaitTermination()
  }

}

这是我的pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>com.compan</groupId>
  <artifactId>project</artifactId>
  <version>0.1-SNAPSHOT</version>
  <name>${project.artifactId}</name>
  <description>My wonderfull scala app</description>
  <inceptionYear>2018</inceptionYear>
  <licenses>
    <license>
      <name>My License</name>
      <url>http://....</url>
      <distribution>repo</distribution>
    </license>
  </licenses>

  <properties>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <encoding>UTF-8</encoding>
    <scala.version>2.11.8</scala.version>
    <scala.compat.version>2.11</scala.compat.version>
    <spec2.version>4.2.0</spec2.version>
  </properties>

  <dependencies>
    <dependency>
      <groupId>org.scala-lang</groupId>
      <artifactId>scala-library</artifactId>
      <version>${scala.version}</version>
    </dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-core_2.11</artifactId>
    <version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming -->


<!--jar file fih KafkaRDDPartition-->
<dependency>
   <groupId>org.apache.spark</groupId>
   <artifactId>spark-streaming-kafka_2.11</artifactId>
   <version>1.6.3</version>
</dependency>



<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-streaming_2.11</artifactId>
    <version>2.3.0</version>
    <!--scope>provided</scope-->
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka_2.11</artifactId>
    <version>0.10.0.0</version>
</dependency>



<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka-0-10 -->
<dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
    <version>2.3.1</version>
</dependency>




    <!-- Test -->
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.12</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.scalatest</groupId>
      <artifactId>scalatest_${scala.compat.version}</artifactId>
      <version>3.0.5</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.specs2</groupId>
      <artifactId>specs2-core_${scala.compat.version}</artifactId>
      <version>${spec2.version}</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.specs2</groupId>
      <artifactId>specs2-junit_${scala.compat.version}</artifactId>
      <version>${spec2.version}</version>
      <scope>test</scope>
    </dependency>
  </dependencies>

  <build>
    <sourceDirectory>src/main/scala</sourceDirectory>
    <testSourceDirectory>src/test/scala</testSourceDirectory>
    <plugins>
      <plugin>
        <!-- see http://davidb.github.com/scala-maven-plugin -->
        <groupId>net.alchim31.maven</groupId>
        <artifactId>scala-maven-plugin</artifactId>
        <version>3.3.2</version>
        <executions>
          <execution>
            <goals>
              <goal>compile</goal>
              <goal>testCompile</goal>
            </goals>
            <configuration>
              <args>
                <arg>-dependencyfile</arg>
                <arg>${project.build.directory}/.scala_dependencies</arg>
              </args>
            </configuration>
          </execution>
        </executions>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-surefire-plugin</artifactId>
        <version>2.21.0</version>
        <configuration>
          <!-- Tests will be run with scalatest-maven-plugin instead -->
          <skipTests>true</skipTests>
        </configuration>
      </plugin>
      <plugin>
        <groupId>org.scalatest</groupId>
        <artifactId>scalatest-maven-plugin</artifactId>
        <version>2.0.0</version>
        <configuration>
          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
          <junitxml>.</junitxml>
          <filereports>TestSuiteReport.txt</filereports>
          <!-- Comma separated list of JUnit test class names to execute -->
          <jUnitClasses>samples.AppTest</jUnitClasses>
        </configuration>
        <executions>
          <execution>
            <id>test</id>
            <goals>
              <goal>test</goal>
            </goals>
          </execution>
        </executions>
      </plugin>
    </plugins>
  </build>
</project>

有人可以帮助我解决此问题吗? 提前致谢。

0 个答案:

没有答案