我已经安装并配置了DSE 4.8.5,并尝试运行一些Spark查询。然而,我从Spark查询中得到了预期的结果;每个查询都在java堆栈错误之后进行转储。
>>> rdd.first()
WARN 2016-05-03 16:14:14,245 com.datastax.driver.core.NettyUtil: Found Netty's native epoll transport in the classpath, but epoll is not available. Using NIO instead.
java.lang.UnsatisfiedLinkError: /tmp/libnetty-transport-native-epoll3810431011156928603.so: /lib64/libc.so.6: version `GLIBC_2.10' not found (required by /tmp/libnetty-transport-native-epoll3810431011156928603.so)
at java.lang.ClassLoader$NativeLibrary.load(Native Method) ~[na:1.8.0_77]
at java.lang.ClassLoader.loadLibrary0(ClassLoader.java:1941) ~[na:1.8.0_77]
at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1824) ~[na:1.8.0_77]
at java.lang.Runtime.load0(Runtime.java:809) ~[na:1.8.0_77]
at java.lang.System.load(System.java:1086) ~[na:1.8.0_77]
at io.netty.util.internal.NativeLibraryLoader.load(NativeLibraryLoader.java:193) ~[netty-all-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.epoll.Native.<clinit>(Native.java:48) ~[netty-all-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.epoll.Epoll.<clinit>(Epoll.java:32) ~[netty-all-4.0.34.Final.jar:4.0.34.Final]
at java.lang.Class.forName0(Native Method) ~[na:1.8.0_77]
at java.lang.Class.forName(Class.java:264) ~[na:1.8.0_77]
at com.datastax.driver.core.NettyUtil.<clinit>(NettyUtil.java:68) ~[cassandra-driver-core-2.1.7.1.jar:na]
at com.datastax.driver.core.NettyOptions.eventLoopGroup(NettyOptions.java:101) [cassandra-driver-core-2.1.7.1.jar:na]
at com.datastax.driver.core.Connection$Factory.<init>(Connection.java:695) [cassandra-driver-core-2.1.7.1.jar:na]
at com.datastax.driver.core.Cluster$Manager.init(Cluster.java:1286) [cassandra-driver-core-2.1.7.1.jar:na]
at com.datastax.driver.core.Cluster.getMetadata(Cluster.java:339) [cassandra-driver-core-2.1.7.1.jar:na]
at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:157) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:150) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:150) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:81) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:150) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143) [spark-cassandra-connector_2.10-1.4.2.jar:1.4.2]
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at scala.Option.getOrElse(Option.scala:120) [scala-library-2.10.5.jar:na]
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:32) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at scala.Option.getOrElse(Option.scala:120) [scala-library-2.10.5.jar:na]
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.api.java.JavaRDDLike$class.partitions(JavaRDDLike.scala:65) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at org.apache.spark.api.java.AbstractJavaRDDLike.partitions(JavaRDDLike.scala:47) [spark-core_2.10-1.4.2.2.jar:1.4.2.2]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_77]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_77]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_77]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_77]
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231) [py4j-0.8.1.jar:na]
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379) [py4j-0.8.1.jar:na]
at py4j.Gateway.invoke(Gateway.java:259) [py4j-0.8.1.jar:na]
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) [py4j-0.8.1.jar:na]
at py4j.commands.CallCommand.execute(CallCommand.java:79) [py4j-0.8.1.jar:na]
at py4j.GatewayConnection.run(GatewayConnection.java:207) [py4j-0.8.1.jar:na]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_77]
Row(key=1, value=u'abc')我试过用谷歌搜索。但是,找不到与此问题相关的任何内容。你能指导我解决这个问题吗?
谢谢。
发布于 2016-05-03 20:16:43
修复方法是使用匹配的正确glibc版本重新编译更新的Java驱动程序
https://stackoverflow.com/questions/37002764
复制相似问题