Akka/Scala example source code file (ActorDocSpec.scala)

本文档详细介绍了 Akka 框架中 Actor 的创建、配置与使用方法,包括 Actor 的基本行为定义、消息处理流程、状态切换、超时处理、子 Actor 监视、Actor 选择及 Actor 生命周期管理等核心概念。

from:http://alvinalexander.com/java/jwarehouse/akka-2.3/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala.shtml

/**
 * Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
 */
package docs.actor

import language.postfixOps

//#imports1
import akka.actor.Actor
import akka.actor.Props
import akka.event.Logging

//#imports1

import scala.concurrent.Future
import akka.actor.{ ActorRef, ActorSystem, PoisonPill, Terminated, ActorLogging }
import org.scalatest.{ BeforeAndAfterAll, WordSpec }
import org.scalatest.Matchers
import akka.testkit._
import akka.util._
import scala.concurrent.duration._
import scala.concurrent.Await

//#my-actor
class MyActor extends Actor {
  val log = Logging(context.system, this)
  def receive = {
    case "test" => log.info("received test")
    case _      => log.info("received unknown message")
  }
}
//#my-actor

final case class DoIt(msg: ImmutableMessage)
final case class Message(s: String)

//#context-actorOf
class FirstActor extends Actor {
  val child = context.actorOf(Props[MyActor], name = "myChild")
  //#plus-some-behavior
  def receive = {
    case x => sender() ! x
  }
  //#plus-some-behavior
}
//#context-actorOf

class ActorWithArgs(arg: String) extends Actor {
  def receive = { case _ => () }
}

class DemoActorWrapper extends Actor {
  //#props-factory
  object DemoActor {
    /**
     * Create Props for an actor of this type.
     * @param magciNumber The magic number to be passed to this actor鈥檚 constructor.
     * @return a Props for creating this actor, which can then be further configured
     *         (e.g. calling `.withDispatcher()` on it)
     */
    def props(magicNumber: Int): Props = Props(new DemoActor(magicNumber))
  }

  class DemoActor(magicNumber: Int) extends Actor {
    def receive = {
      case x: Int => sender() ! (x + magicNumber)
    }
  }

  class SomeOtherActor extends Actor {
    // Props(new DemoActor(42)) would not be safe
    context.actorOf(DemoActor.props(42), "demo")
    // ...
    //#props-factory
    def receive = {
      case msg =>
    }
    //#props-factory
  }
  //#props-factory

  def receive = Actor.emptyBehavior
}

class Hook extends Actor {
  var child: ActorRef = _
  //#preStart
  override def preStart() {
    child = context.actorOf(Props[MyActor], "child")
  }
  //#preStart
  def receive = Actor.emptyBehavior
  //#postStop
  override def postStop() {
    //#clean-up-some-resources
    ()
    //#clean-up-some-resources
  }
  //#postStop
}

class ReplyException extends Actor {
  def receive = {
    case _ =>
      //#reply-exception
      try {
        val result = operation()
        sender() ! result
      } catch {
        case e: Exception =>
          sender() ! akka.actor.Status.Failure(e)
          throw e
      }
    //#reply-exception
  }

  def operation(): String = { "Hi" }

}

//#gracefulStop-actor
object Manager {
  case object Shutdown
}

class Manager extends Actor {
  import Manager._
  val worker = context.watch(context.actorOf(Props[Cruncher], "worker"))

  def receive = {
    case "job" => worker ! "crunch"
    case Shutdown =>
      worker ! PoisonPill
      context become shuttingDown
  }

  def shuttingDown: Receive = {
    case "job" => sender() ! "service unavailable, shutting down"
    case Terminated(`worker`) =>
      context stop self
  }
}
//#gracefulStop-actor

class Cruncher extends Actor {
  def receive = {
    case "crunch" => // crunch...
  }
}

//#swapper
case object Swap
class Swapper extends Actor {
  import context._
  val log = Logging(system, this)

  def receive = {
    case Swap =>
      log.info("Hi")
      become({
        case Swap =>
          log.info("Ho")
          unbecome() // resets the latest 'become' (just for fun)
      }, discardOld = false) // push on top instead of replace
  }
}

object SwapperApp extends App {
  val system = ActorSystem("SwapperSystem")
  val swap = system.actorOf(Props[Swapper], name = "swapper")
  swap ! Swap // logs Hi
  swap ! Swap // logs Ho
  swap ! Swap // logs Hi
  swap ! Swap // logs Ho
  swap ! Swap // logs Hi
  swap ! Swap // logs Ho
}
//#swapper

//#receive-orElse

trait ProducerBehavior {
  this: Actor =>

  val producerBehavior: Receive = {
    case GiveMeThings =>
      sender() ! Give("thing")
  }
}

trait ConsumerBehavior {
  this: Actor with ActorLogging =>

  val consumerBehavior: Receive = {
    case ref: ActorRef =>
      ref ! GiveMeThings

    case Give(thing) =>
      log.info("Got a thing! It's {}", thing)
  }
}

class Producer extends Actor with ProducerBehavior {
  def receive = producerBehavior
}

class Consumer extends Actor with ActorLogging with ConsumerBehavior {
  def receive = consumerBehavior
}

class ProducerConsumer extends Actor with ActorLogging
  with ProducerBehavior with ConsumerBehavior {

  def receive = producerBehavior orElse consumerBehavior
}

// protocol
case object GiveMeThings
final case class Give(thing: Any)

//#receive-orElse

class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {

  "import context" in {
    new AnyRef {
      //#import-context
      class FirstActor extends Actor {
        import context._
        val myActor = actorOf(Props[MyActor], name = "myactor")
        def receive = {
          case x => myActor ! x
        }
      }
      //#import-context

      val first = system.actorOf(Props(classOf[FirstActor], this), name = "first")
      system.stop(first)
    }
  }

  "creating actor with system.actorOf" in {
    val myActor = system.actorOf(Props[MyActor])

    // testing the actor

    // TODO: convert docs to AkkaSpec(Map(...))
    val filter = EventFilter.custom {
      case e: Logging.Info => true
      case _               => false
    }
    system.eventStream.publish(TestEvent.Mute(filter))
    system.eventStream.subscribe(testActor, classOf[Logging.Info])

    myActor ! "test"
    expectMsgPF(1 second) { case Logging.Info(_, _, "received test") => true }

    myActor ! "unknown"
    expectMsgPF(1 second) { case Logging.Info(_, _, "received unknown message") => true }

    system.eventStream.unsubscribe(testActor)
    system.eventStream.publish(TestEvent.UnMute(filter))

    system.stop(myActor)
  }

  "creating a Props config" in {
    //#creating-props
    import akka.actor.Props

    val props1 = Props[MyActor]
    val props2 = Props(new ActorWithArgs("arg")) // careful, see below
    val props3 = Props(classOf[ActorWithArgs], "arg")
    //#creating-props

    //#creating-props-deprecated
    // NOT RECOMMENDED within another actor:
    // encourages to close over enclosing class
    val props7 = Props(new MyActor)
    //#creating-props-deprecated
  }

  "creating actor with Props" in {
    //#system-actorOf
    import akka.actor.ActorSystem

    // ActorSystem is a heavy object: create only one per application
    val system = ActorSystem("mySystem")
    val myActor = system.actorOf(Props[MyActor], "myactor2")
    //#system-actorOf
    shutdown(system)
  }

  "creating actor with IndirectActorProducer" in {
    class Echo(name: String) extends Actor {
      def receive = {
        case n: Int => sender() ! name
        case message =>
          val target = testActor
          //#forward
          target forward message
        //#forward
      }
    }

    val a: { def actorRef: ActorRef } = new AnyRef {
      val applicationContext = this

      //#creating-indirectly
      import akka.actor.IndirectActorProducer

      class DependencyInjector(applicationContext: AnyRef, beanName: String)
        extends IndirectActorProducer {

        override def actorClass = classOf[Actor]
        override def produce =
          //#obtain-fresh-Actor-instance-from-DI-framework
          new Echo(beanName)

        def this(beanName: String) = this("", beanName)
        //#obtain-fresh-Actor-instance-from-DI-framework
      }

      val actorRef = system.actorOf(
        Props(classOf[DependencyInjector], applicationContext, "hello"),
        "helloBean")
      //#creating-indirectly
    }
    val actorRef = {
      import scala.language.reflectiveCalls
      a.actorRef
    }

    val message = 42
    implicit val self = testActor
    //#tell
    actorRef ! message
    //#tell
    expectMsg("hello")
    actorRef ! "huhu"
    expectMsg("huhu")
  }

  "using implicit timeout" in {
    val myActor = system.actorOf(Props[FirstActor])
    //#using-implicit-timeout
    import scala.concurrent.duration._
    import akka.util.Timeout
    import akka.pattern.ask
    implicit val timeout = Timeout(5 seconds)
    val future = myActor ? "hello"
    //#using-implicit-timeout
    Await.result(future, timeout.duration) should be("hello")

  }

  "using explicit timeout" in {
    val myActor = system.actorOf(Props[FirstActor])
    //#using-explicit-timeout
    import scala.concurrent.duration._
    import akka.pattern.ask
    val future = myActor.ask("hello")(5 seconds)
    //#using-explicit-timeout
    Await.result(future, 5 seconds) should be("hello")
  }

  "using receiveTimeout" in {
    //#receive-timeout
    import akka.actor.ReceiveTimeout
    import scala.concurrent.duration._
    class MyActor extends Actor {
      // To set an initial delay
      context.setReceiveTimeout(30 milliseconds)
      def receive = {
        case "Hello" =>
          // To set in a response to a message
          context.setReceiveTimeout(100 milliseconds)
        case ReceiveTimeout =>
          // To turn it off
          context.setReceiveTimeout(Duration.Undefined)
          throw new RuntimeException("Receive timed out")
      }
    }
    //#receive-timeout
  }

  //#hot-swap-actor
  class HotSwapActor extends Actor {
    import context._
    def angry: Receive = {
      case "foo" => sender() ! "I am already angry?"
      case "bar" => become(happy)
    }

    def happy: Receive = {
      case "bar" => sender() ! "I am already happy :-)"
      case "foo" => become(angry)
    }

    def receive = {
      case "foo" => become(angry)
      case "bar" => become(happy)
    }
  }
  //#hot-swap-actor

  "using hot-swap" in {
    val actor = system.actorOf(Props(classOf[HotSwapActor], this), name = "hot")
  }

  "using Stash" in {
    //#stash
    import akka.actor.Stash
    class ActorWithProtocol extends Actor with Stash {
      def receive = {
        case "open" =>
          unstashAll()
          context.become({
            case "write" => // do writing...
            case "close" =>
              unstashAll()
              context.unbecome()
            case msg => stash()
          }, discardOld = false) // stack on top instead of replacing
        case msg => stash()
      }
    }
    //#stash
  }

  "using watch" in {
    new AnyRef {
      //#watch
      import akka.actor.{ Actor, Props, Terminated }

      class WatchActor extends Actor {
        val child = context.actorOf(Props.empty, "child")
        context.watch(child) // <-- this is the only call needed for registration
        var lastSender = system.deadLetters

        def receive = {
          case "kill" =>
            context.stop(child); lastSender = sender()
          case Terminated(`child`) => lastSender ! "finished"
        }
      }
      //#watch
      val a = system.actorOf(Props(classOf[WatchActor], this))
      implicit val sender = testActor
      a ! "kill"
      expectMsg("finished")
    }
  }

  "demonstrate ActorSelection" in {
    val context = system
    //#selection-local
    // will look up this absolute path
    context.actorSelection("/user/serviceA/aggregator")
    // will look up sibling beneath same supervisor
    context.actorSelection("../joe")
    //#selection-local
    //#selection-wildcard
    // will look all children to serviceB with names starting with worker
    context.actorSelection("/user/serviceB/worker*")
    // will look up all siblings beneath same supervisor
    context.actorSelection("../*")
    //#selection-wildcard
    //#selection-remote
    context.actorSelection("akka.tcp://app@otherhost:1234/user/serviceB")
    //#selection-remote
  }

  "using Identify" in {
    new AnyRef {
      //#identify
      import akka.actor.{ Actor, Props, Identify, ActorIdentity, Terminated }

      class Follower extends Actor {
        val identifyId = 1
        context.actorSelection("/user/another") ! Identify(identifyId)

        def receive = {
          case ActorIdentity(`identifyId`, Some(ref)) =>
            context.watch(ref)
            context.become(active(ref))
          case ActorIdentity(`identifyId`, None) => context.stop(self)

        }

        def active(another: ActorRef): Actor.Receive = {
          case Terminated(`another`) => context.stop(self)
        }
      }
      //#identify

      val a = system.actorOf(Props.empty)
      val b = system.actorOf(Props(classOf[Follower], this))
      watch(b)
      system.stop(a)
      expectMsgType[akka.actor.Terminated].actor should be(b)
    }
  }

  "using pattern gracefulStop" in {
    val actorRef = system.actorOf(Props[Manager])
    //#gracefulStop
    import akka.pattern.gracefulStop
    import scala.concurrent.Await

    try {
      val stopped: Future[Boolean] = gracefulStop(actorRef, 5 seconds, Manager.Shutdown)
      Await.result(stopped, 6 seconds)
      // the actor has been stopped
    } catch {
      // the actor wasn't stopped within 5 seconds
      case e: akka.pattern.AskTimeoutException =>
    }
    //#gracefulStop
  }

  "using pattern ask / pipeTo" in {
    val actorA, actorB, actorC, actorD = system.actorOf(Props.empty)
    //#ask-pipeTo
    import akka.pattern.{ ask, pipe }
    import system.dispatcher // The ExecutionContext that will be used
    final case class Result(x: Int, s: String, d: Double)
    case object Request

    implicit val timeout = Timeout(5 seconds) // needed for `?` below

    val f: Future[Result] =
      for {
        x <- ask(actorA, Request).mapTo[Int] // call pattern directly
        s <- (actorB ask Request).mapTo[String] // call by implicit conversion
        d <- (actorC ? Request).mapTo[Double] // call by symbolic name
      } yield Result(x, s, d)

    f pipeTo actorD // .. or ..
    pipe(f) to actorD
    //#ask-pipeTo
  }

  class Replier extends Actor {
    def receive = {
      case ref: ActorRef =>
        //#reply-with-sender
        sender().tell("reply", context.parent) // replies will go back to parent
        sender().!("reply")(context.parent) // alternative syntax (beware of the parens!)
      //#reply-with-sender
      case x =>
        //#reply-without-sender
        sender() ! x // replies will go to this actor
      //#reply-without-sender
    }
  }

  "replying with own or other sender" in {
    val actor = system.actorOf(Props(classOf[Replier], this))
    implicit val me = testActor
    actor ! 42
    expectMsg(42)
    lastSender should be(actor)
    actor ! me
    expectMsg("reply")
    lastSender.path.toStringWithoutAddress should be("/user")
    expectMsg("reply")
    lastSender.path.toStringWithoutAddress should be("/user")
  }

  "using ActorDSL outside of akka.actor package" in {
    import akka.actor.ActorDSL._
    actor(new Act {
      superviseWith(OneForOneStrategy() { case _ => Stop; Restart; Resume; Escalate })
      superviseWith(AllForOneStrategy() { case _ => Stop; Restart; Resume; Escalate })
    })
  }

}

 

报D:\soft\biancheng\conda\az\envs\pyflink\python.exe D:/soft/biancheng/pycharm/pyflink代码练习/data_stream_api/kafka_data.py Traceback (most recent call last): File "D:/soft/biancheng/pycharm/pyflink代码练习/data_stream_api/kafka_data.py", line 34, in <module> env.execute("Kafka PyFlink Source Example") File "D:\soft\biancheng\conda\az\envs\pyflink\lib\site-packages\pyflink\datastream\stream_execution_environment.py", line 764, in execute return JobExecutionResult(self._j_stream_execution_environment.execute(j_stream_graph)) File "D:\soft\biancheng\conda\az\envs\pyflink\lib\site-packages\py4j\java_gateway.py", line 1321, in __call__ return_value = get_return_value( File "D:\soft\biancheng\conda\az\envs\pyflink\lib\site-packages\pyflink\util\exceptions.py", line 146, in deco return f(*a, **kw) File "D:\soft\biancheng\conda\az\envs\pyflink\lib\site-packages\py4j\protocol.py", line 326, in get_return_value raise Py4JJavaError( py4j.protocol.Py4JJavaError: An error occurred while calling o0.execute. : org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144) at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:141) at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616) at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:268) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1277) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47) at akka.dispatch.OnComplete.internal(Future.scala:300) at akka.dispatch.OnComplete.internal(Future.scala:297) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65) at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23) at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63) at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81) at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:175) Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:139) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:83) at org.apache.flink.runtime.scheduler.DefaultScheduler.recordTaskFailure(DefaultScheduler.java:256) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:247) at org.apache.flink.runtime.scheduler.DefaultScheduler.onTaskFailed(DefaultScheduler.java:240) at org.apache.flink.runtime.scheduler.SchedulerBase.onTaskExecutionStateUpdate(SchedulerBase.java:738) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:715) at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:78) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:477) at sun.reflect.GeneratedMethodAccessor12.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:309) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:307) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:222) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:84) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:168) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at akka.actor.Actor.aroundReceive(Actor.scala:537) at akka.actor.Actor.aroundReceive$(Actor.scala:535) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at akka.actor.ActorCell.invoke(ActorCell.scala:548) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at akka.dispatch.Mailbox.run(Mailbox.scala:231) at akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 more Caused by: org.apache.kafka.common.KafkaException: Failed to construct kafka consumer at org.apache.kafka.clients.consumer.KafkaConsumer.<init>(KafkaConsumer.java:823) at org.apache.kafka.clients.consumer.KafkaConsumer.<init>(KafkaConsumer.java:665) at org.apache.kafka.clients.consumer.KafkaConsumer.<init>(KafkaConsumer.java:646) at org.apache.kafka.clients.consumer.KafkaConsumer.<init>(KafkaConsumer.java:626) at org.apache.flink.streaming.connectors.kafka.internals.KafkaPartitionDiscoverer.initializeConnections(KafkaPartitionDiscoverer.java:55) at org.apache.flink.streaming.connectors.kafka.internals.AbstractPartitionDiscoverer.open(AbstractPartitionDiscoverer.java:94) at org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.open(FlinkKafkaConsumerBase.java:574) at org.apache.flink.api.common.functions.util.FunctionUtils.openFunction(FunctionUtils.java:34) at org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.open(AbstractUdfStreamOperator.java:100) at org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.initializeStateAndOpenOperators(RegularOperatorChain.java:107) at org.apache.flink.streaming.runtime.tasks.StreamTask.restoreGates(StreamTask.java:726) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.call(StreamTaskActionExecutor.java:100) at org.apache.flink.streaming.runtime.tasks.StreamTask.restoreInternal(StreamTask.java:702) at org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:669) at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:935) at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:904) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:728) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:550) at java.lang.Thread.run(Thread.java:750) Caused by: org.apache.kafka.common.KafkaException: class org.apache.kafka.common.serialization.ByteArrayDeserializer is not an instance of org.apache.kafka.common.serialization.Deserializer at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstance(AbstractConfig.java:399) at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstance(AbstractConfig.java:430) at org.apache.kafka.common.config.AbstractConfig.getConfiguredInstance(AbstractConfig.java:415) at org.apache.kafka.clients.consumer.KafkaConsumer.<init>(KafkaConsumer.java:709) ... 18 more Process finished with exit code 1
最新发布
07-31
/opt/jdk1.8.0_111/bin/java -javaagent:/zh/idea/lib/idea_rt.jar=36123:/zh/idea/bin -Dfile.encoding=UTF-8 -classpath /opt/jdk1.8.0_111/jre/lib/charsets.jar:/opt/jdk1.8.0_111/jre/lib/deploy.jar:/opt/jdk1.8.0_111/jre/lib/ext/cldrdata.jar:/opt/jdk1.8.0_111/jre/lib/ext/dnsns.jar:/opt/jdk1.8.0_111/jre/lib/ext/jaccess.jar:/opt/jdk1.8.0_111/jre/lib/ext/jfxrt.jar:/opt/jdk1.8.0_111/jre/lib/ext/localedata.jar:/opt/jdk1.8.0_111/jre/lib/ext/nashorn.jar:/opt/jdk1.8.0_111/jre/lib/ext/sunec.jar:/opt/jdk1.8.0_111/jre/lib/ext/sunjce_provider.jar:/opt/jdk1.8.0_111/jre/lib/ext/sunpkcs11.jar:/opt/jdk1.8.0_111/jre/lib/ext/zipfs.jar:/opt/jdk1.8.0_111/jre/lib/javaws.jar:/opt/jdk1.8.0_111/jre/lib/jce.jar:/opt/jdk1.8.0_111/jre/lib/jfr.jar:/opt/jdk1.8.0_111/jre/lib/jfxswt.jar:/opt/jdk1.8.0_111/jre/lib/jsse.jar:/opt/jdk1.8.0_111/jre/lib/management-agent.jar:/opt/jdk1.8.0_111/jre/lib/plugin.jar:/opt/jdk1.8.0_111/jre/lib/resources.jar:/opt/jdk1.8.0_111/jre/lib/rt.jar:/root/IdeaProjects/FinkDemo1/target/classes:/opt/scala-2.11.8/lib/scala-parser-combinators_2.11-1.0.4.jar:/opt/scala-2.11.8/lib/scala-library.jar:/opt/scala-2.11.8/lib/scala-actors-migration_2.11-1.1.0.jar:/opt/scala-2.11.8/lib/scala-xml_2.11-1.0.4.jar:/opt/scala-2.11.8/lib/scala-reflect.jar:/opt/scala-2.11.8/lib/scala-actors-2.11.0.jar:/opt/scala-2.11.8/lib/scala-swing_2.11-1.0.2.jar:/root/.m2/repository/org/apache/flink/flink-scala_2.11/1.13.0/flink-scala_2.11-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-core/1.13.0/flink-core-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-annotations/1.13.0/flink-annotations-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-metrics-core/1.13.0/flink-metrics-core-1.13.0.jar:/root/.m2/repository/org/apache/commons/commons-lang3/3.3.2/commons-lang3-3.3.2.jar:/root/.m2/repository/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/root/.m2/repository/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/root/.m2/repository/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/root/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/root/.m2/repository/org/apache/commons/commons-compress/1.20/commons-compress-1.20.jar:/root/.m2/repository/org/apache/flink/flink-shaded-guava/18.0-13.0/flink-shaded-guava-18.0-13.0.jar:/root/.m2/repository/org/apache/flink/flink-java/1.13.0/flink-java-1.13.0.jar:/root/.m2/repository/org/apache/commons/commons-math3/3.5/commons-math3-3.5.jar:/root/.m2/repository/org/apache/flink/flink-shaded-asm-7/7.1-13.0/flink-shaded-asm-7-7.1-13.0.jar:/root/.m2/repository/org/scala-lang/scala-reflect/2.11.12/scala-reflect-2.11.12.jar:/root/.m2/repository/org/scala-lang/scala-library/2.11.12/scala-library-2.11.12.jar:/root/.m2/repository/org/scala-lang/scala-compiler/2.11.12/scala-compiler-2.11.12.jar:/root/.m2/repository/org/scala-lang/modules/scala-xml_2.11/1.0.5/scala-xml_2.11-1.0.5.jar:/root/.m2/repository/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/root/.m2/repository/org/slf4j/slf4j-api/1.7.15/slf4j-api-1.7.15.jar:/root/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/root/.m2/repository/org/apache/flink/force-shading/1.13.0/force-shading-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-streaming-scala_2.11/1.13.0/flink-streaming-scala_2.11-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-streaming-java_2.11/1.13.0/flink-streaming-java_2.11-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-file-sink-common/1.13.0/flink-file-sink-common-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-clients_2.11/1.13.0/flink-clients_2.11-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-runtime_2.11/1.13.0/flink-runtime_2.11-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-queryable-state-client-java/1.13.0/flink-queryable-state-client-java-1.13.0.jar:/root/.m2/repository/org/apache/flink/flink-hadoop-fs/1.13.0/flink-hadoop-fs-1.13.0.jar:/root/.m2/repository/commons-io/commons-io/2.7/commons-io-2.7.jar:/root/.m2/repository/org/apache/flink/flink-shaded-netty/4.1.49.Final-13.0/flink-shaded-netty-4.1.49.Final-13.0.jar:/root/.m2/repository/org/apache/flink/flink-shaded-jackson/2.12.1-13.0/flink-shaded-jackson-2.12.1-13.0.jar:/root/.m2/repository/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-13.0/flink-shaded-zookeeper-3-3.4.14-13.0.jar:/root/.m2/repository/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/root/.m2/repository/com/typesafe/akka/akka-actor_2.11/2.5.21/akka-actor_2.11-2.5.21.jar:/root/.m2/repository/com/typesafe/config/1.3.3/config-1.3.3.jar:/root/.m2/repository/org/scala-lang/modules/scala-java8-compat_2.11/0.7.0/scala-java8-compat_2.11-0.7.0.jar:/root/.m2/repository/com/typesafe/akka/akka-stream_2.11/2.5.21/akka-stream_2.11-2.5.21.jar:/root/.m2/repository/org/reactivestreams/reactive-streams/1.0.2/reactive-streams-1.0.2.jar:/root/.m2/repository/com/typesafe/ssl-config-core_2.11/0.3.7/ssl-config-core_2.11-0.3.7.jar:/root/.m2/repository/com/typesafe/akka/akka-protobuf_2.11/2.5.21/akka-protobuf_2.11-2.5.21.jar:/root/.m2/repository/com/typesafe/akka/akka-slf4j_2.11/2.5.21/akka-slf4j_2.11-2.5.21.jar:/root/.m2/repository/org/clapper/grizzled-slf4j_2.11/1.3.2/grizzled-slf4j_2.11-1.3.2.jar:/root/.m2/repository/com/github/scopt/scopt_2.11/3.5.0/scopt_2.11-3.5.0.jar:/root/.m2/repository/org/xerial/snappy/snappy-java/1.1.8.3/snappy-java-1.1.8.3.jar:/root/.m2/repository/com/twitter/chill_2.11/0.7.6/chill_2.11-0.7.6.jar:/root/.m2/repository/com/twitter/chill-java/0.7.6/chill-java-0.7.6.jar:/root/.m2/repository/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar:/root/.m2/repository/org/apache/flink/flink-optimizer_2.11/1.13.0/flink-optimizer_2.11-1.13.0.jar:/root/.m2/repository/commons-cli/commons-cli/1.3.1/commons-cli-1.3.1.jar output.WordCountJob SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". SLF4J: Defaulting to no-operation (NOP) logger implementation SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. Process finished with exit code 0 无结果怎莫解决
07-03
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值