hadoop - EC2 cluster created by spark using old HDFS 1.0 instead of 2.5 -
i created cluster using spark-ec2 script. installs hdfs version 1.0. use cluster connect hive installed on cloudera cdh 5.3 cluster. getting following error:-
org.apache.hadoop.ipc.remoteexception: server ipc version 9 cannot communicate client vers ion 4 @ org.apache.hadoop.ipc.client.call(client.java:1070) @ org.apache.hadoop.ipc.rpc$invoker.invoke(rpc.java:225) @ com.sun.proxy.$proxy10.getprotocolversion(unknown source) @ org.apache.hadoop.ipc.rpc.getproxy(rpc.java:396) @ org.apache.hadoop.ipc.rpc.getproxy(rpc.java:379) @ org.apache.hadoop.hdfs.dfsclient.createrpcnamenode(dfsclient.java:119) @ org.apache.hadoop.hdfs.dfsclient.<init>(dfsclient.java:238) @ org.apache.hadoop.hdfs.dfsclient.<init>(dfsclient.java:203) @ org.apache.hadoop.hdfs.distributedfilesystem.initialize(distributedfilesystem.java:8 9) @ org.apache.hadoop.fs.filesystem.createfilesystem(filesystem.java:1386) @ org.apache.hadoop.fs.filesystem.access$200(filesystem.java:66) @ org.apache.hadoop.fs.filesystem$cache.get(filesystem.java:1404) @ org.apache.hadoop.fs.filesystem.get(filesystem.java:254) @ org.apache.hadoop.fs.path.getfilesystem(path.java:187) @ org.apache.hadoop.mapred.fileinputformat.liststatus(fileinputformat.java:176) @ org.apache.hadoop.mapred.sequencefileinputformat.liststatus(sequencefileinputformat. java:40) @ org.apache.hadoop.mapred.fileinputformat.getsplits(fileinputformat.java:208) @ org.apache.spark.rdd.hadooprdd.getpartitions(hadooprdd.scala:203) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:219) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:217) @ scala.option.getorelse(option.scala:120) @ org.apache.spark.rdd.rdd.partitions(rdd.scala:217) @ org.apache.spark.rdd.mappartitionsrdd.getpartitions(mappartitionsrdd.scala:32) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:219) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:217) @ scala.option.getorelse(option.scala:120) @ org.apache.spark.rdd.rdd.partitions(rdd.scala:217) @ org.apache.spark.rdd.mappartitionsrdd.getpartitions(mappartitionsrdd.scala:32) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:219) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:217) @ scala.option.getorelse(option.scala:120) @ org.apache.spark.rdd.rdd.partitions(rdd.scala:217) @ org.apache.spark.rdd.unionrdd$$anonfun$1.apply(unionrdd.scala:66) @ org.apache.spark.rdd.unionrdd$$anonfun$1.apply(unionrdd.scala:66) @ scala.collection.traversablelike$$anonfun$map$1.apply(traversablelike.scala:244) @ scala.collection.traversablelike$$anonfun$map$1.apply(traversablelike.scala:244) @ scala.collection.immutable.list.foreach(list.scala:318) @ scala.collection.traversablelike$class.map(traversablelike.scala:244) @ scala.collection.abstracttraversable.map(traversable.scala:105) @ org.apache.spark.rdd.unionrdd.getpartitions(unionrdd.scala:66) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:219) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:217) @ scala.option.getorelse(option.scala:120) @ org.apache.spark.rdd.rdd.partitions(rdd.scala:217) @ org.apache.spark.rdd.mappartitionsrdd.getpartitions(mappartitionsrdd.scala:32) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:219) @ org.apache.spark.rdd.rdd$$anonfun$partitions$2.apply(rdd.scala:217) @ scala.option.getorelse(option.scala:120) @ org.apache.spark.rdd.rdd.partitions(rdd.scala:217) @ org.apache.spark.sparkcontext.runjob(sparkcontext.scala:1511) @ org.apache.spark.rdd.rdd.collect(rdd.scala:813) @ org.apache.spark.sql.execution.sparkplan.executecollect(sparkplan.scala:83) @ org.apache.spark.sql.dataframe.collect(dataframe.scala:815) @ $iwc$$iwc$$iwc$$iwc$$iwc$$iwc$$iwc$$iwc.<init>(<console>:23) @ $iwc$$iwc$$iwc$$iwc$$iwc$$iwc$$iwc.<init>(<console>:28) @ $iwc$$iwc$$iwc$$iwc$$iwc$$iwc.<init>(<console>:30) @ $iwc$$iwc$$iwc$$iwc$$iwc.<init>(<console>:32) @ $iwc$$iwc$$iwc$$iwc.<init>(<console>:34) @ $iwc$$iwc$$iwc.<init>(<console>:36) @ $iwc$$iwc.<init>(<console>:38) @ $iwc.<init>(<console>:40) @ <init>(<console>:42) @ .<init>(<console>:46) @ .<clinit>(<console>) @ .<init>(<console>:7) @ .<clinit>(<console>) @ $print(<console>) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43 ) @ java.lang.reflect.method.invoke(method.java:606) @ org.apache.spark.repl.sparkimain$readevalprint.call(sparkimain.scala:1065) @ org.apache.spark.repl.sparkimain$request.loadandrun(sparkimain.scala:1338) @ org.apache.spark.repl.sparkimain.loadandrunreq$1(sparkimain.scala:840) @ org.apache.spark.repl.sparkimain.interpret(sparkimain.scala:871) @ org.apache.spark.repl.sparkimain.interpret(sparkimain.scala:819) @ org.apache.spark.repl.sparkiloop.reallyinterpret$1(sparkiloop.scala:856) @ org.apache.spark.repl.sparkiloop.interpretstartingwith(sparkiloop.scala:901) @ org.apache.spark.repl.sparkiloop.command(sparkiloop.scala:813) @ org.apache.spark.repl.sparkiloop.processline$1(sparkiloop.scala:656) @ org.apache.spark.repl.sparkiloop.innerloop$1(sparkiloop.scala:664) @ org.apache.spark.repl.sparkiloop.org$apache$spark$repl$sparkiloop$$loop(sparkiloop.s cala:669) @ org.apache.spark.repl.sparkiloop$$anonfun$org$apache$spark$repl$sparkiloop$$process$ 1.apply$mcz$sp(sparkiloop.scala:996) @ org.apache.spark.repl.sparkiloop$$anonfun$org$apache$spark$repl$sparkiloop$$process$ 1.apply(sparkiloop.scala:944) @ org.apache.spark.repl.sparkiloop$$anonfun$org$apache$spark$repl$sparkiloop$$process$ 1.apply(sparkiloop.scala:944) @ scala.tools.nsc.util.scalaclassloader$.savingcontextloader(scalaclassloader.scala:13 5) @ org.apache.spark.repl.sparkiloop.org$apache$spark$repl$sparkiloop$$process(sparkiloo p.scala:944) @ org.apache.spark.repl.sparkiloop.process(sparkiloop.scala:1058) @ org.apache.spark.repl.main$.main(main.scala:31) @ org.apache.spark.repl.main.main(main.scala) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43 ) @ java.lang.reflect.method.invoke(method.java:606) @ org.apache.spark.deploy.sparksubmit$.org$apache$spark$deploy$sparksubmit$$runmain(sp arksubmit.scala:569) @ org.apache.spark.deploy.sparksubmit$.dorunmain$1(sparksubmit.scala:166) @ org.apache.spark.deploy.sparksubmit$.submit(sparksubmit.scala:189) @ org.apache.spark.deploy.sparksubmit$.main(sparksubmit.scala:110) @ org.apache.spark.deploy.sparksubmit.main(sparksubmit.scala)
Comments
Post a Comment