开发者社区> 问答> 正文

Hive Source With Kerberos认证问题怎么解决?

背景:flink任务还是source&sink带有kerberos的hive,相同代码在本地进行测试是能通过kerberos认证,并且能够查询和插入数据到hive。但是任务提交到集群就报kerberos认证失败的错误。 

Flink:1.9.1, flink-1.9.1/lib/有flink-dist_2.11-1.9.1.jar,  flink-shaded-hadoop-2-uber-2.7.5-7.0.jar,log4j-1.2.17.jar,  slf4j-log4j12-1.7.15.jar  Hive:2.1.1  flink任务主要依赖的jar:  [INFO] +- org.apache.flink:flink-table-api-java:jar:flink-1.9.1:compile  [INFO] | +- org.apache.flink:flink-table-common:jar:flink-1.9.1:compile  [INFO] | | - org.apache.flink:flink-core:jar:flink-1.9.1:compile  [INFO] | | +-  org.apache.flink:flink-annotations:jar:flink-1.9.1:compile  [INFO] | | +-  org.apache.flink:flink-metrics-core:jar:flink-1.9.1:compile  [INFO] | | - com.esotericsoftware.kryo:kryo:jar:2.24.0:compile  [INFO] | | +- com.esotericsoftware.minlog:minlog:jar:1.2:compile  [INFO] | | - org.objenesis:objenesis:jar:2.1:compile  [INFO] | +- com.google.code.findbugs:jsr305:jar:1.3.9:compile  [INFO] | - org.apache.flink:force-shading:jar:1.9.1:compile  [INFO] +-  org.apache.flink:flink-table-planner-blink_2.11:jar:flink-1.9.1:compile  [INFO] | +-  org.apache.flink:flink-table-api-scala_2.11:jar:flink-1.9.1:compile  [INFO] | | +- org.scala-lang:scala-reflect:jar:2.11.12:compile  [INFO] | | - org.scala-lang:scala-compiler:jar:2.11.12:compile  [INFO] | +-  org.apache.flink:flink-table-api-java-bridge_2.11:jar:flink-1.9.1:compile  [INFO] | | +- org.apache.flink:flink-java:jar:flink-1.9.1:compile  [INFO] | | -  org.apache.flink:flink-streaming-java_2.11:jar:1.9.1:compile  [INFO] | +-  org.apache.flink:flink-table-api-scala-bridge_2.11:jar:flink-1.9.1:compile  [INFO] | | - org.apache.flink:flink-scala_2.11:jar:flink-1.9.1:compile  [INFO] | +-  org.apache.flink:flink-table-runtime-blink_2.11:jar:flink-1.9.1:compile  [INFO] | | +- org.codehaus.janino:janino:jar:3.0.9:compile  [INFO] | | - org.apache.calcite.avatica:avatica-core:jar:1.15.0:compile  [INFO] | - org.reflections:reflections:jar:0.9.10:compile  [INFO] +- org.apache.flink:flink-table-planner_2.11:jar:flink-1.9.1:compile  [INFO] +- org.apache.commons:commons-lang3:jar:3.9:compile  [INFO] +- com.typesafe.akka:akka-actor_2.11:jar:2.5.21:compile  [INFO] | +- org.scala-lang:scala-library:jar:2.11.8:compile  [INFO] | +- com.typesafe:config:jar:1.3.3:compile  [INFO] | -  org.scala-lang.modules:scala-java8-compat_2.11:jar:0.7.0:compile  [INFO] +- org.apache.flink:flink-sql-client_2.11:jar:1.9.1:compile  [INFO] | +- org.apache.flink:flink-clients_2.11:jar:1.9.1:compile  [INFO] | | - org.apache.flink:flink-optimizer_2.11:jar:1.9.1:compile  [INFO] | +- org.apache.flink:flink-streaming-scala_2.11:jar:1.9.1:compile  [INFO] | +- log4j:log4j:jar:1.2.17:compile  [INFO] | - org.apache.flink:flink-shaded-jackson:jar:2.9.8-7.0:compile  [INFO] +- org.apache.flink:flink-json:jar:1.9.1:compile  [INFO] +- org.apache.flink:flink-csv:jar:1.9.1:compile  [INFO] +- org.apache.flink:flink-hbase_2.11:jar:1.9.1:compile  [INFO] +- org.apache.hbase:hbase-server:jar:2.2.1:compile  [INFO] | +-  org.apache.hbase.thirdparty:hbase-shaded-protobuf:jar:2.2.1:compile  [INFO] | +-  org.apache.hbase.thirdparty:hbase-shaded-netty:jar:2.2.1:compile  [INFO] | +-  org.apache.hbase.thirdparty:hbase-shaded-miscellaneous:jar:2.2.1:compile  [INFO] | | -  com.google.errorprone:error_prone_annotations:jar:2.3.3:compile  [INFO] | +- org.apache.hbase:hbase-common:jar:2.2.1:compile  [INFO] | | -  com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1:compile  [INFO] | +- org.apache.hbase:hbase-http:jar:2.2.1:compile  [INFO] | | +- org.eclipse.jetty:jetty-util:jar:9.3.27.v20190418:compile  [INFO] | | +-  org.eclipse.jetty:jetty-util-ajax:jar:9.3.27.v20190418:compile  [INFO] | | +- org.eclipse.jetty:jetty-http:jar:9.3.27.v20190418:compile  [INFO] | | +-  org.eclipse.jetty:jetty-security:jar:9.3.27.v20190418:compile  [INFO] | | +- org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile  [INFO] | | | +-  org.glassfish.jersey.core:jersey-common:jar:2.25.1:compile  [INFO] | | | | +-  org.glassfish.jersey.bundles.repackaged:jersey-guava:jar:2.25.1:compile  [INFO] | | | | -  org.glassfish.hk2:osgi-resource-locator:jar:1.0.1:compile  [INFO] | | | +-  org.glassfish.jersey.core:jersey-client:jar:2.25.1:compile  [INFO] | | | +-  org.glassfish.jersey.media:jersey-media-jaxb:jar:2.25.1:compile  [INFO] | | | +- javax.annotation:javax.annotation-api:jar:1.2:compile  [INFO] | | | +- org.glassfish.hk2:hk2-api:jar:2.5.0-b32:compile  [INFO] | | | | +- org.glassfish.hk2:hk2-utils:jar:2.5.0-b32:compile  [INFO] | | | | -  org.glassfish.hk2.external:aopalliance-repackaged:jar:2.5.0-b32:compile  [INFO] | | | +-  org.glassfish.hk2.external:javax.inject:jar:2.5.0-b32:compile  [INFO] | | | - org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile  [INFO] | | +-  org.glassfish.jersey.containers:jersey-container-servlet-core:jar:2.25.1:compile  [INFO] | | - javax.ws.rs:javax.ws.rs-api:jar:2.0.1:compile  [INFO] | +- org.apache.hbase:hbase-protocol:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-protocol-shaded:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-procedure:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-client:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-zookeeper:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-replication:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-metrics-api:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-metrics:jar:2.2.1:compile  [INFO] | +- commons-codec:commons-codec:jar:1.10:compile  [INFO] | +- org.apache.hbase:hbase-hadoop-compat:jar:2.2.1:compile  [INFO] | +- org.apache.hbase:hbase-hadoop2-compat:jar:2.2.1:compile  [INFO] | +- org.eclipse.jetty:jetty-server:jar:9.3.27.v20190418:compile  [INFO] | | - org.eclipse.jetty:jetty-io:jar:9.3.27.v20190418:compile  [INFO] | +- org.eclipse.jetty:jetty-servlet:jar:9.3.27.v20190418:compile  [INFO] | +- org.eclipse.jetty:jetty-webapp:jar:9.3.27.v20190418:compile  [INFO] | | - org.eclipse.jetty:jetty-xml:jar:9.3.27.v20190418:compile  [INFO] | +- org.glassfish.web:javax.servlet.jsp:jar:2.3.2:compile  [INFO] | | - org.glassfish:javax.el:jar:3.0.1-b11:compile (version  selected from constraint [3.0.0,))  [INFO] | +- javax.servlet.jsp:javax.servlet.jsp-api:jar:2.3.1:compile  [INFO] | +- io.dropwizard.metrics:metrics-core:jar:3.2.6:compile  [INFO] | +- commons-io:commons-io:jar:2.5:compile  [INFO] | +- org.apache.commons:commons-math3:jar:3.6.1:compile  [INFO] | +- org.apache.zookeeper:zookeeper:jar:3.4.10:compile  [INFO] | +- javax.servlet:javax.servlet-api:jar:3.1.0:compile  [INFO] | +- org.apache.htrace:htrace-core4:jar:4.2.0-incubating:compile  [INFO] | +- com.lmax:disruptor:jar:3.3.6:compile  [INFO] | +- commons-logging:commons-logging:jar:1.2:compile  [INFO] | +- org.apache.commons:commons-crypto:jar:1.0.0:compile  [INFO] | +- org.apache.hadoop:hadoop-distcp:jar:2.8.5:compile  [INFO] | - org.apache.yetus:audience-annotations:jar:0.5.0:compile  [INFO] +- com.google.protobuf:protobuf-java:jar:2.5.0:compile  [INFO] +- mysql:mysql-connector-java:jar:8.0.18:compile  [INFO] +- org.apache.flink:flink-connector-hive_2.11:jar:1.9.1:compile  [INFO] +-  org.apache.flink:flink-hadoop-compatibility_2.11:jar:1.9.1:compile  [INFO] +-  org.apache.flink:flink-shaded-hadoop-2-uber:jar:2.7.5-7.0:provided  [INFO] +- org.apache.hive:hive-exec:jar:2.1.1:compile  [INFO] | +- org.apache.hive:hive-ant:jar:2.1.1:compile  [INFO] | | - org.apache.velocity:velocity:jar:1.5:compile  [INFO] | | - oro:oro:jar:2.0.8:compile  [INFO] | +- org.apache.hive:hive-llap-tez:jar:2.1.1:compile  [INFO] | | +- org.apache.hive:hive-common:jar:2.1.1:compile  [INFO] | | | +- org.apache.hive:hive-storage-api:jar:2.1.1:compile  [INFO] | | | +- org.apache.hive:hive-orc:jar:2.1.1:compile  [INFO] | | | | - org.iq80.snappy:snappy:jar:0.2:compile  [INFO] | | | +-  org.eclipse.jetty.aggregate:jetty-all:jar:7.6.0.v20120127:compile  [INFO] | | | | +-  org.apache.geronimo.specs:geronimo-jta_1.1_spec:jar:1.1.1:compile  [INFO] | | | | +- javax.mail:mail:jar:1.4.1:compile  [INFO] | | | | +- javax.activation:activation:jar:1.1:compile  [INFO] | | | | +-  org.apache.geronimo.specs:geronimo-jaspic_1.0_spec:jar:1.0:compile  [INFO] | | | | +-  org.apache.geronimo.specs:geronimo-annotation_1.0_spec:jar:1.1.1:compile  [INFO] | | | | - asm:asm-commons:jar:3.1:compile  [INFO] | | | | - asm:asm-tree:jar:3.1:compile  [INFO] | | | | - asm:asm:jar:3.1:compile  [INFO] | | | +-  org.eclipse.jetty.orbit:javax.servlet:jar:3.0.0.v201112011016:compile  [INFO] | | | +- joda-time:joda-time:jar:2.8.1:compile  [INFO] | | | +- org.json:json:jar:20160810:compile  [INFO] | | | +- io.dropwizard.metrics:metrics-jvm:jar:3.1.0:compile  [INFO] | | | +- io.dropwizard.metrics:metrics-json:jar:3.1.0:compile  [INFO] | | | -  com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter:jar:0.1.2:compile  [INFO] | | - org.apache.hive:hive-llap-client:jar:2.1.1:compile  [INFO] | | - org.apache.hive:hive-llap-common:jar:2.1.1:compile  [INFO] | | - org.apache.hive:hive-serde:jar:2.1.1:compile  [INFO] | | +- org.apache.hive:hive-service-rpc:jar:2.1.1:compile  [INFO] | | | +- tomcat:jasper-compiler:jar:5.5.23:compile  [INFO] | | | | +- javax.servlet:jsp-api:jar:2.0:compile  [INFO] | | | | - ant:ant:jar:1.6.5:compile  [INFO] | | | +- tomcat:jasper-runtime:jar:5.5.23:compile  [INFO] | | | | +- javax.servlet:servlet-api:jar:2.4:compile  [INFO] | | | | - commons-el:commons-el:jar:1.0:compile  [INFO] | | | - org.apache.thrift:libfb303:jar:0.9.3:compile  [INFO] | | +- org.apache.avro:avro:jar:1.7.7:compile  [INFO] | | | -  com.thoughtworks.paranamer:paranamer:jar:2.3:compile  [INFO] | | +- net.sf.opencsv:opencsv:jar:2.3:compile  [INFO] | | -  org.apache.parquet:parquet-hadoop-bundle:jar:1.8.1:compile  [INFO] | +- org.apache.hive:hive-shims:jar:2.1.1:compile  [INFO] | | +- org.apache.hive.shims:hive-shims-common:jar:2.1.1:compile  [INFO] | | | - org.apache.thrift:libthrift:jar:0.9.3:compile  [INFO] | | +- org.apache.hive.shims:hive-shims-0.23:jar:2.1.1:runtime  [INFO] | | | -  org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.6.1:runtime  [INFO] | | | +-  org.apache.hadoop:hadoop-annotations:jar:2.6.1:runtime  [INFO] | | | +-  com.google.inject.extensions:guice-servlet:jar:3.0:runtime  [INFO] | | | +- com.google.inject:guice:jar:3.0:runtime  [INFO] | | | | +- javax.inject:javax.inject:jar:1:runtime  [INFO] | | | | - aopalliance:aopalliance:jar:1.0:runtime  [INFO] | | | +- com.sun.jersey:jersey-json:jar:1.9:runtime  [INFO] | | | | +- com.sun.xml.bind:jaxb-impl:jar:2.2.3-1:runtime  [INFO] | | | | +-  org.codehaus.jackson:jackson-core-asl:jar:1.8.3:compile  [INFO] | | | | +-  org.codehaus.jackson:jackson-mapper-asl:jar:1.8.3:compile  [INFO] | | | | +-  org.codehaus.jackson:jackson-jaxrs:jar:1.8.3:runtime  [INFO] | | | | - org.codehaus.jackson:jackson-xc:jar:1.8.3:runtime  [INFO] | | | +- com.sun.jersey.contribs:jersey-guice:jar:1.9:runtime  [INFO] | | | | - com.sun.jersey:jersey-server:jar:1.9:runtime  [INFO] | | | +-  org.apache.hadoop:hadoop-yarn-common:jar:2.6.1:runtime  [INFO] | | | +- org.apache.hadoop:hadoop-yarn-api:jar:2.6.1:runtime  [INFO] | | | +- javax.xml.bind:jaxb-api:jar:2.2.2:runtime  [INFO] | | | | - javax.xml.stream:stax-api:jar:1.0-2:runtime  [INFO] | | | +- org.codehaus.jettison:jettison:jar:1.1:runtime  [INFO] | | | +- com.sun.jersey:jersey-core:jar:1.9:runtime  [INFO] | | | +- com.sun.jersey:jersey-client:jar:1.9:runtime  [INFO] | | | +- org.mortbay.jetty:jetty-util:jar:6.1.26:runtime  [INFO] | | | +-  org.apache.hadoop:hadoop-yarn-server-common:jar:2.6.1:runtime  [INFO] | | | | -  org.fusesource.leveldbjni:leveldbjni-all:jar:1.8:runtime  [INFO] | | | +-  org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice:jar:2.6.1:runtime  [INFO] | | | -  org.apache.hadoop:hadoop-yarn-server-web-proxy:jar:2.6.1:runtime  [INFO] | | | - org.mortbay.jetty:jetty:jar:6.1.26:runtime  [INFO] | | -  org.apache.hive.shims:hive-shims-scheduler:jar:2.1.1:runtime  [INFO] | +- commons-httpclient:commons-httpclient:jar:3.0.1:compile  [INFO] | +- org.antlr:antlr-runtime:jar:3.4:compile  [INFO] | | +- org.antlr:stringtemplate:jar:3.2.1:compile  [INFO] | | - antlr:antlr:jar:2.7.7:compile  [INFO] | +- org.antlr:ST4:jar:4.0.4:compile  [INFO] | +- org.apache.ant:ant:jar:1.9.1:compile  [INFO] | | - org.apache.ant:ant-launcher:jar:1.9.1:compile  [INFO] | +- org.apache.commons:commons-compress:jar:1.10:compile  [INFO] | +- org.apache.ivy:ivy:jar:2.4.0:compile  [INFO] | +- org.apache.curator:curator-framework:jar:2.6.0:compile  [INFO] | | - org.apache.curator:curator-client:jar:2.6.0:compile  [INFO] | +- org.apache.curator:apache-curator:pom:2.6.0:compile  [INFO] | +- org.codehaus.groovy:groovy-all:jar:2.4.4:compile  [INFO] | +- org.apache.calcite:calcite-core:jar:1.6.0:compile  [INFO] | | +- org.apache.calcite:calcite-linq4j:jar:1.6.0:compile  [INFO] | | +- commons-dbcp:commons-dbcp:jar:1.4:compile  [INFO] | | | - commons-pool:commons-pool:jar:1.5.4:compile  [INFO] | | +- net.hydromatic:aggdesigner-algorithm:jar:6.0:compile  [INFO] | | - org.codehaus.janino:commons-compiler:jar:2.7.6:compile  [INFO] | +- org.apache.calcite:calcite-avatica:jar:1.6.0:compile  [INFO] | +- stax:stax-api:jar:1.0.1:compile  [INFO] | - jline:jline:jar:2.12:compile  [INFO] +- org.datanucleus:datanucleus-core:jar:4.1.6:compile  [INFO] +- org.datanucleus:datanucleus-api-jdo:jar:4.2.4:compile  [INFO] +- org.datanucleus:javax.jdo:jar:3.2.0-m3:compile  [INFO] | - javax.transaction:transaction-api:jar:1.1:compile  [INFO] +- org.datanucleus:datanucleus-rdbms:jar:4.1.9:compile  [INFO] +- hadoop-lzo:hadoop-lzo:jar:0.4.14:compile  [INFO] - org.apache.flink:flink-runtime-web_2.11:jar:1.9.1:provided  [INFO] +- org.apache.flink:flink-runtime_2.11:jar:1.9.1:compile  [INFO] | +-  org.apache.flink:flink-queryable-state-client-java:jar:1.9.1:compile  [INFO] | +- org.apache.flink:flink-hadoop-fs:jar:1.9.1:compile  [INFO] | +- org.apache.flink:flink-shaded-asm-6:jar:6.2.1-7.0:compile  [INFO] | +- com.typesafe.akka:akka-stream_2.11:jar:2.5.21:compile  [INFO] | | +- org.reactivestreams:reactive-streams:jar:1.0.2:compile  [INFO] | | - com.typesafe:ssl-config-core_2.11:jar:0.3.7:compile  [INFO] | +- com.typesafe.akka:akka-protobuf_2.11:jar:2.5.21:compile  [INFO] | +- com.typesafe.akka:akka-slf4j_2.11:jar:2.4.11:compile  [INFO] | +- org.clapper:grizzled-slf4j_2.11:jar:1.3.2:compile  [INFO] | +- com.github.scopt:scopt_2.11:jar:3.5.0:compile  [INFO] | +- org.xerial.snappy:snappy-java:jar:1.1.4:compile  [INFO] | - com.twitter:chill_2.11:jar:0.7.6:compile  [INFO] | - com.twitter:chill-java:jar:0.7.6:compile  [INFO] +-  org.apache.flink:flink-shaded-netty:jar:4.1.32.Final-7.0:compile  [INFO] +- org.apache.flink:flink-shaded-guava:jar:18.0-7.0:compile  [INFO] - org.javassist:javassist:jar:3.19.0-GA:compile  [INFO] ————————————————————————————————————    日志:    2020-02-28 17:17:07,890 INFO  org.apache.hadoop.security.UserGroupInformation - Login  successful for user /dev@***.COM using keytab file /home/***/key.keytab  上面这条是flink日志打印的,从这条日志可以看出 kerberos认证是通过的,能够正常登录,但还是报了以下异常:  2020-02-28 17:17:08,658 INFO org.apache.hadoop.hive.metastore.ObjectStore  - Setting MetaStore object pin classes with  hive.metastore.cache.pinobjtypes="Table,Database,Type,FieldSchema,Order"  2020-02-28 17:17:09,280 INFO  org.apache.hadoop.hive.metastore.MetaStoreDirectSql - Using  direct SQL, underlying DB is MYSQL  2020-02-28 17:17:09,283 INFO org.apache.hadoop.hive.metastore.ObjectStore  - Initialized ObjectStore  2020-02-28 17:17:09,450 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore - Added  admin role in metastore  2020-02-28 17:17:09,452 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore - Added  public role in metastore  2020-02-28 17:17:09,474 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore - No user is  added in admin role, since config is empty  2020-02-28 17:17:09,634 INFO  org.apache.flink.table.catalog.hive.HiveCatalog - Connected  to Hive metastore  2020-02-28 17:17:09,635 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore - 0:  get_database: ***  2020-02-28 17:17:09,637 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore.audit - ugi=  ip=unknown-ip-addr cmd=get_database: ***  2020-02-28 17:17:09,658 INFO org.apache.hadoop.hive.ql.metadata.HiveUtils  - Adding metastore authorization provider:  org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider  2020-02-28 17:17:10,166 WARN  org.apache.hadoop.hdfs.shortcircuit.DomainSocketFactory - The  short-circuit local reads feature cannot be used because libhadoop cannot  be loaded.  2020-02-28 17:17:10,391 WARN org.apache.hadoop.ipc.Client  - Exception encountered while connecting to the server :  org.apache.hadoop.security.AccessControlException: Client cannot  authenticate via:[TOKEN, KERBEROS]  2020-02-28 17:17:10,397 WARN org.apache.hadoop.ipc.Client  - Exception encountered while connecting to the server :  org.apache.hadoop.security.AccessControlException: Client cannot  authenticate via:[TOKEN, KERBEROS]  2020-02-28 17:17:10,398 INFO  org.apache.hadoop.io.retry.RetryInvocationHandler - Exception  while invoking getFileInfo of class ClientNamenodeProtocolTranslatorPB over  ******.org/***.***.***.***:8020 after 1 fail over attempts. Trying to fail  over immediately.  java.io.IOException: Failed on local exception: java.io.IOException:  org.apache.hadoop.security.AccessControlException: Client cannot  authenticate via:[TOKEN, KERBEROS]; Host Details : local host is:  "***.***.***.org/***.***.***.***"; destination host is: "******.org":8020;  at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:776)  at org.apache.hadoop.ipc.Client.call(Client.java:1480)  at org.apache.hadoop.ipc.Client.call(Client.java:1413)  at  org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)  at com.sun.proxy.$Proxy41.getFileInfo(Unknown Source)  at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.  getFileInfo(ClientNamenodeProtocolTranslatorPB.java:776)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at  sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  at  sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)  at java.lang.reflect.Method.invoke(Method.java:498)  at  org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)  at  org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)  at com.sun.proxy.$Proxy42.getFileInfo(Unknown Source)  at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2117)  at  org.apache.hadoop.hdfs.DistributedFileSystem$22.doCall(DistributedFileSystem.java:1305)  at  org.apache.hadoop.hdfs.DistributedFileSystem$22.doCall(DistributedFileSystem.java:1301)  at  org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)  at  org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1317)  at  org.apache.hadoop.hive.common.FileUtils.getFileStatusOrNull(FileUtils.java:770)  at  org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider.checkPermissions(StorageBasedAuthorizationProvider.java:368)  at  org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider.authorize(StorageBasedAuthorizationProvider.java:343)  at  org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider.authorize(StorageBasedAuthorizationProvider.java:152)  at  org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener.authorizeReadDatabase(AuthorizationPreEventListener.java:204)  at  org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener.onEvent(AuthorizationPreEventListener.java:152)  at  org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.firePreEvent(HiveMetaStore.java:2153)  at  org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_database(HiveMetaStore.java:932)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at  sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  at  sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)  at java.lang.reflect.Method.invoke(Method.java:498)  at  org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:140)  at  org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:99)  at com.sun.proxy.$Proxy35.get_database(Unknown Source)  at  org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabase(HiveMetaStoreClient.java:1280)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at  sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  at  sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)  at java.lang.reflect.Method.invoke(Method.java:498)  at  org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:150)  at com.sun.proxy.$Proxy36.getDatabase(Unknown Source)  at org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.  getDatabase(HiveMetastoreClientWrapper.java:102)  at  org.apache.flink.table.catalog.hive.HiveCatalog.databaseExists(HiveCatalog.java:347)  at  org.apache.flink.table.catalog.hive.HiveCatalog.open(HiveCatalog.java:244)  at  org.apache.flink.table.catalog.CatalogManager.registerCatalog(CatalogManager.java:153)  at  org.apache.flink.table.api.internal.TableEnvironmentImpl.registerCatalog(TableEnvironmentImpl.java:170)    ……在这段省略的代码里做了UserGroupInformation.loginUserFromKeytab(principal,keytab);并成功通过认证  at this is my code.main(MyMainClass.java:24)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at  sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  at  sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)  at java.lang.reflect.Method.invoke(Method.java:498)  at  org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:576)  at  org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:438)  at  org.apache.flink.client.program.OptimizerPlanEnvironment.getOptimizedPlan(OptimizerPlanEnvironment.java:83)  at  org.apache.flink.client.program.PackagedProgramUtils.createJobGraph(PackagedProgramUtils.java:80)  at  org.apache.flink.client.program.PackagedProgramUtils.createJobGraph(PackagedProgramUtils.java:122)  at org.apache.flink.client.cli.CliFrontend.runProgram(CliFrontend.java:227)  at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:205)  at  org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:1010)  at  org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:1083)  at java.security.AccessController.doPrivileged(Native Method)  at javax.security.auth.Subject.doAs(Subject.java:422)  at  org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)  at  org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)  at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1083)  Caused by: java.io.IOException:  org.apache.hadoop.security.AccessControlException: Client cannot  authenticate via:[TOKEN, KERBEROS]  at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:688)  at java.security.AccessController.doPrivileged(Native Method)  at javax.security.auth.Subject.doAs(Subject.java:422)  at  org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)  at  org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:651)  at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:738)  at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:376)  at org.apache.hadoop.ipc.Client.getConnection(Client.java:1529)  at org.apache.hadoop.ipc.Client.call(Client.java:1452)  ... 67 more  Caused by: org.apache.hadoop.security.AccessControlException: Client  cannot authenticate via:[TOKEN, KERBEROS]  at  org.apache.hadoop.security.SaslRpcClient.selectSaslClient(SaslRpcClient.java:172)  at  org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:396)  at  org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:561)  at org.apache.hadoop.ipc.Client$Connection.access$1900(Client.java:376)  at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:730)  at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:726)  at java.security.AccessController.doPrivileged(Native Method)  at javax.security.auth.Subject.doAs(Subject.java:422)  at  org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)  at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:726)  ... 70 more  目前诊断看起来像是jar被污染导致。麻烦请指点一二。谢谢!  *来自志愿者整理的flink邮件归档

展开
收起
玛丽莲梦嘉 2021-12-02 16:27:50 920 0
1 条回答
写回答
取消 提交回答
  • 从你贴的log来看似乎是创建了embedded metastore。可以检查一下HiveCatalog是不是读到了不正确的hive  conf?另外你贴的maven的这些依赖都打到你flink作业的jar里了么?像datanucleus的依赖应该是不需要的。*来自志愿者整理的FLINK邮件归档

    2021-12-02 17:22:04
    赞同 展开评论 打赏
问答排行榜
最热
最新

相关电子书

更多
Hive Bucketing in Apache Spark 立即下载
spark替代HIVE实现ETL作业 立即下载
2019大数据技术公开课第五季—Hive迁移到MaxCompute最佳实践 立即下载