使用 fabric8 API 在 kubernetes 集群上建立 events() 监视导致 JsonMappingException:找不到 kind:Event 的资源类型

using fabric8 API to establish an events() watch on kubernetes cluster results in JsonMappingException: No resource type found for kind:Event

我一直在尝试使用 fabric8 events() API 来尝试为 找到一个可行的解决方案,我刚才问过。

上述问题的答案是 "use the events() API !",这似乎是个好主意...但是,当我尝试使用 API 时,我 运行 陷入了一个奇怪的错误甚至以我能想象的最基本的方式。我已经包含(在示例程序部分)一个简单的程序来说明问题。以下是节选 最相关的代码:

  val eventWatcher: Watcher[Event] = new Watcher[Event]() {
    def eventReceived(action: Action, event: Event) {
      logger.info("notified!")
    }
  }

   ... 
  kube = getConnection  // get client connection to k8s server
  kube.events().watch(eventWatcher)

设置事件监视后,我创建了一个广告连播。豆荚创造得很好。但是,当调用 'eventReceived' 回调时,我得到了 我在下面的错误中列出的错误和堆栈跟踪。我从来没有进入 eventReceived 方法(并且字符串 'notified' 从未出现在日志中)。

任何提示、建议或指导,不胜感激! - 克里斯

示例程序

import com.fasterxml.jackson.databind.ObjectMapper
import scala.collection.JavaConverters._
import com.ning.http.client.ws.WebSocket
import com.typesafe.scalalogging.StrictLogging
import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.client.DefaultKubernetesClient.ConfigBuilder
import io.fabric8.kubernetes.client.Watcher.Action
import io.fabric8.kubernetes.client.dsl.Resource
import io.fabric8.kubernetes.client.{DefaultKubernetesClient, Watcher}

object ErrorTest extends App with StrictLogging {
  // corresponds to --insecure-skip-tls-verify=true, according to io.fabric8.kubernetes.api.model.Cluster
  val trustCerts = true
  val k8sUrl = "http://localhost:8080"
  val namespaceName = "default" // replace this with name of a namespace that you know exists
  val imageName: String = "nginx" //  make image name to load to pod a variable so we can experiment with err conditions

  val eventWatcher: Watcher[Event] = new Watcher[Event]() {
    def eventReceived(action: Action, event: Event) {
      logger.info("notified!")
    }
  }

  def go(): Unit = {
    val kube = getConnection
    dumpNamespaces(kube)
    kube.events().watch(eventWatcher)
    deployPodWithWatch(kube, getPod(image = imageName))
  }

  def deployPodWithWatch(kube: DefaultKubernetesClient, pod: Pod): Unit = {
    kube.pods().inNamespace(namespaceName).create(pod) /*  create the pod ! */
  }

  def getPod(image: String): Pod = {
    val jsonTemplate =
      """
    |{
    | "kind": "Pod",
    | "apiVersion": "v1",
    | "metadata": {
    |   "name": "podboy",
    |   "labels": {
    |     "app": "nginx"
    |   }
    | },
    | "spec": {
    |   "containers": [
    |     {
    |     "name": "podboy",
    |     "image": "<image>",
    |     "ports": [
    |       {
    |         "containerPort": 80,
    |         "protocol": "TCP"
    |       }
    |     ]
    |     }
    |   ]
    | }
    |}
      """.
    stripMargin
    val replacement: String = "image\": \"" + image
    val json = jsonTemplate.replaceAll("image\": \"<image>", replacement)
    System.out.println("json:" + json);
    new ObjectMapper().readValue(json, classOf[Pod])
  }

  def dumpNamespaces(kube: DefaultKubernetesClient): Unit = {
    val namespaceNames = kube.namespaces().list().getItems.asScala.map {
      (ns: Namespace) => {
    ns.getMetadata.getName
      }
    }
    System.out.println("namespaces are:" + namespaceNames);
  }

  def getConnection = {
    val configBuilder = new ConfigBuilder()
    val config =
      configBuilder.
    trustCerts(trustCerts).
    masterUrl(k8sUrl).
    build()
    new DefaultKubernetesClient(config)
  }

  go()
}

错误

    22:03:16.656 [New I/O worker #2] ERROR i.f.k.c.dsl.internal.BaseOperation - Could not deserialize watch event: {"type":"MODIFIED","object":{"kind":"Event","apiVersion":"v1","metadata":{"name":"spark-master-rc-zlxpp.144f898e0549644b","namespace":"dummyowner-workflow-dda91220-b63-1544803905","selfLink":"/api/v1/namespaces/dummyowner-workflow-dda91220-b63-1544803905/events/spark-master-rc-zlxpp.144f898e0549644b","resourceVersion":"204015294","creationTimestamp":null,"deletionTimestamp":"2016-05-18T06:03:06Z"},"involvedObject":{"kind":"Pod","namespace":"dummyowner-workflow-dda91220-b63-1544803905","name":"spark-master-rc-zlxpp","uid":"d413698c-19c7-11e6-9f26-74dbd1a09231","apiVersion":"v1","resourceVersion":"203578210","fieldPath":"spec.containers{spark-master}"},"reason":"Failed","message":"Failed to pull image \"ecr.vip.ebayc3.com/krylov/spark-1.5.1:${env}\": image pull failed for ecr.vip.ebayc3.com/krylov/spark-1.5.1:${env}, this may be because there are no credentials on this request.  details: (Tag ${env} not found in repository ecr.vip.ebayc3.com/krylov/spark-1.5.1)","source":{"component":"kubelet","host":"kubernetes-minion-105-4040.slc01.dev.ebayc3.com"},"firstTimestamp":"2016-05-18T02:47:26Z","lastTimestamp":"2016-05-18T05:03:05Z","count":815}}
    com.fasterxml.jackson.databind.JsonMappingException: No resource type found for kind:Event
     at [Source: {"type":"MODIFIED","object":{"kind":"Event","apiVersion":"v1","metadata":{"name":"spark-master-rc-zlxpp.144f898e0549644b","namespace":"dummyowner-workflow-dda91220-b63-1544803905","selfLink":"/api/v1/namespaces/dummyowner-workflow-dda91220-b63-1544803905/events/spark-master-rc-zlxpp.144f898e0549644b","resourceVersion":"204015294","creationTimestamp":null,"deletionTimestamp":"2016-05-18T06:03:06Z"},"involvedObject":{"kind":"Pod","namespace":"dummyowner-workflow-dda91220-b63-1544803905","name":"spark-master-rc-zlxpp","uid":"d413698c-19c7-11e6-9f26-74dbd1a09231","apiVersion":"v1","resourceVersion":"203578210","fieldPath":"spec.containers{spark-master}"},"reason":"Failed","message":"Failed to pull image \"ecr.vip.ebayc3.com/krylov/spark-1.5.1:${env}\": image pull failed for ecr.vip.ebayc3.com/krylov/spark-1.5.1:${env}, this may be because there are no credentials on this request.  details: (Tag ${env} not found in repository ecr.vip.ebayc3.com/krylov/spark-1.5.1)","source":{"component":"kubelet","host":"kubernetes-minion-105-4040.slc01.dev.ebayc3.com"},"firstTimestamp":"2016-05-18T02:47:26Z","lastTimestamp":"2016-05-18T05:03:05Z","count":815}}; line: 1, column: 1156] (through reference chain: io.fabric8.kubernetes.api.model.WatchEvent["object"])
            at com.fasterxml.jackson.databind.JsonMappingException.from(JsonMappingException.java:164) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.DeserializationContext.mappingException(DeserializationContext.java:757) ~[jackson-databind-2.4.1.jar:2.4.1]
            at io.fabric8.kubernetes.internal.KubernetesDeserializer.deserialize(KubernetesDeserializer.java:41) ~[kubernetes-model-1.0.3.jar:1.0.3]
            at io.fabric8.kubernetes.internal.KubernetesDeserializer.deserialize(KubernetesDeserializer.java:29) ~[kubernetes-model-1.0.3.jar:1.0.3]
            at com.fasterxml.jackson.databind.deser.SettableBeanProperty.deserialize(SettableBeanProperty.java:538) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.deser.impl.MethodProperty.deserializeAndSet(MethodProperty.java:99) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:242) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:118) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.ObjectReader._bindAndClose(ObjectReader.java:1269) ~[jackson-databind-2.4.1.jar:2.4.1]
            at com.fasterxml.jackson.databind.ObjectReader.readValue(ObjectReader.java:896) ~[jackson-databind-2.4.1.jar:2.4.1]
            at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.onMessage(BaseOperation.java:422) ~[kubernetes-client-1.2.2.jar:na]
            at com.ning.http.client.providers.netty.ws.NettyWebSocket.notifyTextListeners(NettyWebSocket.java:240) [async-http-client-1.9.29.jar:na]
            at com.ning.http.client.providers.netty.ws.NettyWebSocket.onTextFragment(NettyWebSocket.java:281) [async-http-client-1.9.29.jar:na]
            at com.ning.http.client.providers.netty.handler.WebSocketProtocol.handle(WebSocketProtocol.java:162) [async-http-client-1.9.29.jar:na]
            at com.ning.http.client.providers.netty.handler.Processor.messageReceived(Processor.java:88) [async-http-client-1.9.29.jar:na]
            at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.handler.codec.oneone.OneToOneDecoder.handleUpstream(OneToOneDecoder.java:68) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:296) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.handler.codec.frame.FrameDecoder.unfoldAndFireMessageReceived(FrameDecoder.java:462) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.handler.codec.replay.ReplayingDecoder.callDecode(ReplayingDecoder.java:536) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.handler.codec.replay.ReplayingDecoder.messageReceived(ReplayingDecoder.java:435) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:559) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:268) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:255) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.socket.nio.NioWorker.read(NioWorker.java:88) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.socket.nio.AbstractNioWorker.process(AbstractNioWorker.java:108) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.socket.nio.AbstractNioWorker.run(AbstractNioWorker.java:89) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.channel.socket.nio.NioWorker.run(NioWorker.java:178) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108) [netty-3.10.3.Final.jar:na]
            at org.jboss.netty.util.internal.DeadLockProofWorker.run(DeadLockProofWorker.java:42) [netty-3.10.3.Final.jar:na]
            at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_45]
            at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_45]
            at java.lang.Thread.run(Thread.java:745) [na:1.8.0_45]

查看堆栈跟踪表明您使用的是旧版本的 fabric8 kubernetes 客户端(我们不久前从 async-http-client 迁移到 okhttp)。首先,您需要升级到最新版本(撰写本文时 io.fabric8:kubernetes-client:1.3.90)。以下是观看活动的片段:

Watch watch = client.events().inAnyNamespace().watch(new Watcher<Event>() {
    @Override
    public void eventReceived(Action action, Event resource) {
        logger.info("{}: {}", action, resource);
    }

    @Override
    public void onClose(KubernetesClientException e) {
        if (e != null) {
            e.printStackTrace();
            logger.error(e.getMessage(), e);
        }
    }
});

除非你想用client.events().inNamespace(...).watch(...)观看你想观看的命名空间。