spark 2.2 读取 Hadoop3.0 数据异常
Exception in thread "main" java.lang.IllegalAccessError: class org.apache.hadoop.hdfs.web.HftpFileSystem cannot access its superinterface org.apache.hadoop.hdfs.web.TokenAspect$TokenManagementDelegatorat java.lang.ClassLoader.defineClass1(Native Method)at java.lang.ClassLoader.defineClass(ClassLoader.java:763)at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)at java.net.URLClassLoader.access$100(URLClassLoader.java:73)at java.net.URLClassLoader$1.run(URLClassLoader.java:368)at java.net.URLClassLoader$1.run(URLClassLoader.java:362)at java.security.AccessController.doPrivileged(Native Method)at java.net.URLClassLoader.findClass(URLClassLoader.java:361)at java.lang.ClassLoader.loadClass(ClassLoader.java:424)at java.lang.ClassLoader.loadClass(ClassLoader.java:357)at java.lang.Class.forName0(Native Method)at java.lang.Class.forName(Class.java:348)at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:370)at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)at java.util.ServiceLoader$1.next(ServiceLoader.java:480)at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:3202)at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3247)at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3286)at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123)at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3337)at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3305)at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:476)at org.apache.spark.util.Utils$.getHadoopFileSystem(Utils.scala:1857)at org.apache.spark.scheduler.EventLoggingListener.<init>(EventLoggingListener.scala:68)at org.apache.spark.SparkContext.<init>(SparkContext.scala:532)at com.izhonghong.utils.SparkContextHelper$.sc$lzycompute$1(SparkContextHelper.scala:46)at com.izhonghong.utils.SparkContextHelper$.sc$1(SparkContextHelper.scala:46)at com.izhonghong.utils.SparkContextHelper$.getContext(SparkContextHelper.scala:47)at com.izhonghong.data.filter.WeChatDataFilter$.<init>(WeChatDataFilter.scala:18)at com.izhonghong.data.filter.WeChatDataFilter$.<clinit>(WeChatDataFilter.scala)at com.izhonghong.data.filter.WeChatDataFilter.main(WeChatDataFilter.scala)at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)at java.lang.reflect.Method.invoke(Method.java:498)at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
解决方式,spark2.2默认引用hadoop2.6.*版本的jar包,删除2.6版本jar包,手动引用3.0jar包再次启动,成功
配置如下
<properties><hbase.version>2.0.0</hbase.version><spark.version>2.2.0</spark.version><scala.version>2.11.8</scala.version><hadoop.version>3.0.0</hadoop.version></properties><dependencies><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>${hadoop.version}</version></dependency><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>${hadoop.version}</version><exclusions><exclusion><artifactId>jackson-databind</artifactId><groupId>com.fasterxml.jackson.core</groupId></exclusion></exclusions></dependency><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop.version}</version><exclusions><exclusion><artifactId>jackson-databind</artifactId><groupId>com.fasterxml.jackson.core</groupId></exclusion></exclusions></dependency><dependency><groupId>org.apache.spark</groupId><artifactId>spark-core_2.11</artifactId><version>${spark.version}</version><exclusions><exclusion><artifactId>hadoop-client</artifactId><groupId>org.apache.hadoop</groupId></exclusion></exclusions></dependency><dependency><groupId>org.apache.spark</groupId><artifactId>spark-sql_2.11</artifactId><version>${spark.version}</version></dependency><dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-it</artifactId><version>${hbase.version}</version><exclusions><exclusion><artifactId>jackson-databind</artifactId><groupId>com.fasterxml.jackson.core</groupId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-server</artifactId><version>${hbase.version}</version></dependency><dependency><groupId>org.scala-lang</groupId><artifactId>scala-library</artifactId><version>${scala.version}</version></dependency></dependencies>