我正在写一个spark程序,它将把一个记录放到hbase上。spark版本1.3.1 hbase 1.0.0-cdh5.4.2程序如下。
import org.apache.hadoop.hbase.client.{HBaseAdmin, HTable, Put}
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, HColumnDescriptor, HTableDescriptor}
object Employee {
def main(args: Array[String]) {
if (args.length != 1) {
System.err.println("In correct number of arguments " + args.length)
System.out.println("Please provide correct arguments.")
System.exit(1)
}
val hbaseConf = HBaseConfiguration.create()
val tableName = "employee"
hbaseConf.set(TableOutputFormat.OUTPUT_TABLE, tableName)
hbaseConf.set("hbase.zookeeper.quorum","quickstart.cloudera")
hbaseConf.set("hbase.zookeeper.property.client.port","2181")
val admin = new HBaseAdmin(hbaseConf)
val cfProfessionalData = Bytes.toBytes("professional_data")
val cfPersonalData = Bytes.toBytes("personal_data")
if (!admin.isTableAvailable(tableName)) {
val tableDesc = new HTableDescriptor(tableName)
tableDesc.addFamily(new HColumnDescriptor(cfProfessionalData))
tableDesc.addFamily(new HColumnDescriptor(cfPersonalData))
}
val hTable = new HTable(hbaseConf,tableName)
//val records = sc.textFile(args(0))
val put = new Put(Bytes.toBytes("e_1"))
val eId = Bytes.toBytes("Emp_id")
val name = Bytes.toBytes("Name")
val dsgtn = Bytes.toBytes("Designation")
val doj = Bytes.toBytes("DOJ")
val addr = Bytes.toBytes("Address")
val phn = Bytes.toBytes("Phone")
val dob = Bytes.toBytes("DOB")
put.add(cfProfessionalData, eId, Bytes.toBytes(1))
put.add(cfProfessionalData, name, Bytes.toBytes("Mridul Gohain"))
put.add(cfProfessionalData, dsgtn, Bytes.toBytes("SE"))
put.add(cfProfessionalData, doj, Bytes.toBytes("15-07-2015"))
put.add(cfPersonalData, addr, Bytes.toBytes("Chabua"))
put.add(cfPersonalData, phn, Bytes.toBytes("9859559606"))
put.add(cfPersonalData, dob, Bytes.toBytes("04-10-1991"))
hTable.put(put)
hTable.close()
}
}
但是当我执行代码时,我得到以下异常
16/08/03 04:09:00 INFO zookeeper.ZooKeeper: Client environment:user.home=/home/cloudera
16/08/03 04:09:00 INFO zookeeper.ZooKeeper: Client environment:user.dir=/home/cloudera/tars
16/08/03 04:09:00 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=quickstart.cloudera:2181 sessionTimeout=90000 watcher=hconnection-0x3fa7e1a80x0, quorum=quickstart.cloudera:2181, baseZNode=/hbase
Exception in thread "main" java.io.IOException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
at org.apache.hadoop.hbase.client.ConnectionManager.createConnection(ConnectionManager.java:414)
at org.apache.hadoop.hbase.client.ConnectionManager.createConnection(ConnectionManager.java:407)
at org.apache.hadoop.hbase.client.ConnectionManager.getConnectionInternal(ConnectionManager.java:285)
at org.apache.hadoop.hbase.client.HBaseAdmin.<init>(HBaseAdmin.java:207)
at com.zaloni.mgohain.sparkHbaseIntegration.services.Employee$.main(Employee.scala:20)
at com.zaloni.mgohain.sparkHbaseIntegration.services.Employee.main(Employee.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
... 15 more
Caused by: java.lang.NoClassDefFoundError: org/apache/htrace/Trace
at org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper.exists(RecoverableZooKeeper.java:218)
at org.apache.hadoop.hbase.zookeeper.ZKUtil.checkExists(ZKUtil.java:481)
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:65)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:86)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.retrieveClusterId(ConnectionManager.java:850)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.<init>(ConnectionManager.java:635)
... 20 more
Caused by: java.lang.ClassNotFoundException: org.apache.htrace.Trace
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 26 more
16/08/03 04:09:00 INFO zookeeper.ClientCnxn: Opening socket connection to server quickstart.cloudera/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
你能给我点光吗?
1条答案
按热度按时间unguejic1#
请注意您尝试访问的hbase版本。与hbase客户端将要使用的htrace版本特别相关。我将pom.xml文件设置为:
这是正当的。