maven - Error in running spark program "java.lang.IllegalArgumentException: Unsupported type: com.spark.example.main.App$Product" -
i trying run sample spark-cassandra program in java trying create tables, join , compute , store stores results in table program fails error:
java.lang.illegalargumentexception: unsupported type: com.spark.example.main.app$product
here program:
package com.spark.example.main; import com.datastax.driver.core.session; import com.datastax.spark.connector.cql.cassandraconnector; import com.google.common.base.optional; import org.apache.spark.sparkconf; import org.apache.spark.api.java.javapairrdd; import org.apache.spark.api.java.javardd; import org.apache.spark.api.java.javasparkcontext; import org.apache.spark.api.java.function.flatmapfunction; import org.apache.spark.api.java.function.function; import org.apache.spark.api.java.function.function2; import org.apache.spark.api.java.function.pairflatmapfunction; import scala.tuple2; import static com.datastax.spark.connector.japi.cassandrajavautil.*; import java.io.serializable; import java.math.bigdecimal; import java.text.messageformat; import java.util.*; //import static com.datastax.spark.connector.cassandrajavautil.*; public class app implements serializable { private transient sparkconf conf; private app(sparkconf conf) { this.conf = conf; } private void run() { javasparkcontext sc = new javasparkcontext(conf); generatedata(sc); compute(sc); // showresults(sc); sc.stop(); } private void generatedata(javasparkcontext sc) { cassandraconnector connector = cassandraconnector.apply(sc.getconf()); // prepare schema try (session session = connector.opensession()) { session.execute("drop keyspace if exists java_api"); session.execute("create keyspace java_api replication = {'class': 'simplestrategy', 'replication_factor': 1}"); session.execute("create table java_api.products (id int primary key, name text, parents list<int>)"); session.execute("create table java_api.sales (id uuid primary key, product int, price decimal)"); session.execute("create table java_api.summaries (product int primary key, summary decimal)"); } // prepare products hierarchy list<product> products = arrays.aslist(new product(0, "all products", collections.<integer> emptylist()), new product(1, "product a", arrays.aslist(0)), new product(4, "product a1", arrays.aslist(0, 1)), new product(5, "product a2", arrays.aslist(0, 1)), new product(2, "product b", arrays.aslist(0)), new product(6, "product b1", arrays.aslist(0, 2)), new product(7, "product b2", arrays.aslist(0, 2)), new product(3, "product c", arrays.aslist(0)), new product(8, "product c1", arrays.aslist(0, 3)), new product(9, "product c2", arrays.aslist(0, 3))); javardd<product> productsrdd = sc.parallelize(products); javafunctions(productsrdd).writerbuilder("java_api", "products", maptorow(product.class)).savetocassandra(); javardd<sale> salesrdd = productsrdd.filter(new function<product, boolean>() { @override public boolean call(product product) throws exception { return product.getparents().size() == 2; } }).flatmap(new flatmapfunction<product, sale>() { @override public iterable<sale> call(product product) throws exception { random random = new random(); list<sale> sales = new arraylist<>(1000); (int = 0; < 1000; i++) { sales.add(new sale(uuid.randomuuid(), product.getid(), bigdecimal.valueof(random.nextdouble()))); } return sales; } }); javafunctions(salesrdd).writerbuilder("java_api", "sales", maptorow(sale.class)).savetocassandra(); } private void compute(javasparkcontext sc) { javapairrdd<integer, product> productsrdd = javafunctions(sc).cassandratable("java_api", "products", mapcolumnto(product.class)).keyby(new function<product, integer>() { @override public integer call(product product) throws exception { return product.getid(); } }); javapairrdd<integer, sale> salesrdd = javafunctions(sc).cassandratable("java_api", "sales", mapcolumnto(sale.class)).keyby(new function<sale, integer>() { @override public integer call(sale sale) throws exception { return sale.getproduct(); } }); javapairrdd<integer, tuple2<sale, product>> joinedrdd = salesrdd.join(productsrdd); javapairrdd<integer, bigdecimal> allsalesrdd = joinedrdd .flatmap(new pairflatmapfunction<tuple2<integer, tuple2<sale, product>>, integer, bigdecimal>() { @override public iterable<tuple2<integer, bigdecimal>> call(tuple2<integer, tuple2<sale, product>> input) throws exception { tuple2<sale, product> salewithproduct = input._2(); list<tuple2<integer, bigdecimal>> allsales = new arraylist<>(salewithproduct._2().getparents() .size() + 1); allsales.add(new tuple2<>(salewithproduct._1().getproduct(), salewithproduct._1().getprice())); (integer parentproduct : salewithproduct._2().getparents()) { allsales.add(new tuple2<>(parentproduct, salewithproduct._1().getprice())); } return allsales; } }); javardd<summary> summariesrdd = allsalesrdd.reducebykey(new function2<bigdecimal, bigdecimal, bigdecimal>() { @override public bigdecimal call(bigdecimal v1, bigdecimal v2) throws exception { return v1.add(v2); } }).map(new function<tuple2<integer, bigdecimal>, summary>() { @override public summary call(tuple2<integer, bigdecimal> input) throws exception { return new summary(input._1(), input._2()); } }); javafunctions(summariesrdd).writerbuilder("java_api", "summaries", maptorow(summary.class)).savetocassandra(); } private void showresults(javasparkcontext sc) { javapairrdd<integer, summary> summariesrdd = javafunctions(sc).cassandratable("java_api", "summaries", mapcolumnto(summary.class)).keyby(new function<summary, integer>() { @override public integer call(summary summary) throws exception { return summary.getproduct(); } }); javapairrdd<integer, product> productsrdd = javafunctions(sc).cassandratable("java_api", "products", mapcolumnto(product.class)).keyby(new function<product, integer>() { @override public integer call(product product) throws exception { return product.getid(); } }); list<tuple2<product, optional<summary>>> results = productsrdd.leftouterjoin(summariesrdd).values().toarray(); (tuple2<product, optional<summary>> result : results) { system.out.println(result); } } public static void main(string[] args) { string master = ""; string host = ""; if (args.length != 2) { system.err.println("syntax: com.datastax.spark.demo.javademo <spark master url> <cassandra contact point>"); master = "local[4]"; host = "localhost"; } else { master = args[0]; host = args[1]; } sparkconf conf = new sparkconf(); conf.setappname("java api demo"); conf.setmaster(master); conf.set("spark.cassandra.connection.host", host); app app = new app(conf); app.run(); } public static class product implements serializable { private integer id; private string name; private list<integer> parents; public product() { } public product(integer id, string name, list<integer> parents) { this.id = id; this.name = name; this.parents = parents; } public integer getid() { return id; } public void setid(integer id) { this.id = id; } public string getname() { return name; } public void setname(string name) { this.name = name; } public list<integer> getparents() { return parents; } public void setparents(list<integer> parents) { this.parents = parents; } @override public string tostring() { return messageformat.format("product'{'id={0}, name=''{1}'', parents={2}'}'", id, name, parents); } } public static class sale implements serializable { private uuid id; private integer product; private bigdecimal price; public sale() { } public sale(uuid id, integer product, bigdecimal price) { this.id = id; this.product = product; this.price = price; } public uuid getid() { return id; } public void setid(uuid id) { this.id = id; } public integer getproduct() { return product; } public void setproduct(integer product) { this.product = product; } public bigdecimal getprice() { return price; } public void setprice(bigdecimal price) { this.price = price; } @override public string tostring() { return messageformat.format("sale'{'id={0}, product={1}, price={2}'}'", id, product, price); } } public static class summary implements serializable { private integer product; private bigdecimal summary; public summary() { } public summary(integer product, bigdecimal summary) { this.product = product; this.summary = summary; } public integer getproduct() { return product; } public void setproduct(integer product) { this.product = product; } public bigdecimal getsummary() { return summary; } public void setsummary(bigdecimal summary) { this.summary = summary; } @override public string tostring() { return messageformat.format("summary'{'product={0}, summary={1}'}'", product, summary); } }
}
the pom.xml is:
<project xmlns="http://maven.apache.org/pom/4.0.0" xmlns:xsi="http://www.w3.org/2001/xmlschema-instance" xsi:schemalocation="http://maven.apache.org/pom/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelversion>4.0.0</modelversion> <groupid>spark</groupid> <artifactid>wm</artifactid> <version>0.0.1-snapshot</version> <dependencies> <dependency> <groupid>org.apache.spark</groupid> <artifactid>spark-core_2.10</artifactid> <version>0.9.2</version> </dependency> <dependency> <groupid>org.apache.spark</groupid> <artifactid>spark-streaming_2.10</artifactid> <version>0.9.2</version> </dependency> <dependency> <groupid>com.datastax.cassandra</groupid> <artifactid>cassandra-driver-core</artifactid> <version>2.1.4</version> <exclusions> <exclusion> <groupid>org.slf4j</groupid> <artifactid>slf4j-log4j12</artifactid> </exclusion> <exclusion> <groupid>log4j</groupid> <artifactid>log4j</artifactid> </exclusion> <exclusion> <groupid>org.apache.thrift</groupid> <artifactid>libthrift</artifactid> </exclusion> </exclusions> </dependency> <dependency> <groupid>com.datastax.spark</groupid> <artifactid>spark-cassandra-connector_2.10</artifactid> <version>1.1.0</version> </dependency> <dependency> <groupid>org.scala-lang</groupid> <artifactid>scala-library </artifactid> <version>2.10.3</version> </dependency> <dependency> <groupid>com.datastax.spark</groupid> <artifactid>spark-cassandra-connector-java_2.10</artifactid> <version>1.1.0</version> </dependency> <dependency> <groupid>org.apache.thrift</groupid> <artifactid>libthrift</artifactid> <version>0.9.1</version> </dependency> </dependencies> </project>
and error is:
exception in thread "main" java.lang.illegalargumentexception: unsupported type: com.spark.example.main.app$product @ com.datastax.spark.connector.types.typeconverter$.forcollectiontype(typeconverter.scala:755) @ com.datastax.spark.connector.types.typeconverter$.fortype(typeconverter.scala:767) @ com.datastax.spark.connector.types.typeconverter$.fortype(typeconverter.scala:776) @ com.datastax.spark.connector.japi.cassandrajavautil.typeconverter(cassandrajavautil.java:181) @ com.datastax.spark.connector.japi.cassandrajavautil.mapcolumnto(cassandrajavautil.java:218) @ com.spark.example.main.app.compute(app.java:91) @ com.spark.example.main.app.run(app.java:37) @ com.spark.example.main.app.main(app.java:182) 15/03/20 14:30:38 info cql.cassandraconnector: disconnected cassandra cluster: test cluster
please me
try changing this:
javapairrdd<integer, product> productsrdd = javafunctions(sc).cassandratable("java_api", "products", mapcolumnto(product.class)).keyby(new function<product, integer>() { @override public integer call(product product) throws exception { return product.getid(); } });
to this:
javapairrdd<integer, product> productsrdd = javafunctions(sc).cassandratable("java_api", "products", maprowto(product.class)).keyby(new function<product, integer>() { @override public integer call(product product) throws exception { return product.getid(); } });
i.e. maprowto() instead of mapcolumnto()
all other table reads need changed well.
Comments
Post a Comment