package demo.hive; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TSocket; import org.apache.thrift.protocol.TProtocol; import org.apache.hadoop.hive.service.ThriftHive; import java.util.List; /** * Created by Administrator on 2017/8/24. */ public class HiveThriftclient { public static void main(String[] args) throws Exception{ //创建Socket连接 final TSocket tSocket = new TSocket("192.168.0.108",10000); //创建协议 final TProtocol tProtcal = new TBinaryProtocol(tSocket); //创建一个连接 final ThriftHive.Client client = new ThriftHive.Client(tProtcal); //打开Socket tSocket.open(); //执行HQL语句 client.execute("desc job51"); //处理结果 List<String> columns = client.fetchAll(); for(String col:columns){ System.out.println(col); } //释放资源 tSocket.close(); } }
报错信息
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
Exception in thread "main" org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused: connect
at org.apache.thrift.transport.TSocket.open(TSocket.java:187)
at demo.hive.HiveThriftclient.main(HiveThriftclient.java:23)
Caused by: java.net.ConnectException: Connection refused: connect
at java.net.DualStackPlainSocketImpl.connect0(Native Method)
at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:79)
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172)
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
at java.net.Socket.connect(Socket.java:589)
at org.apache.thrift.transport.TSocket.open(TSocket.java:182)
... 1 more
hiveserver2启动信息
hiveserver2
ls: cannot access /root/app/spark-2.1.0-bin-2.6.0-cdh5.7.0/lib/spark-assembly-*.jar: No such file or directory
which: no hbase in (/root/app/sqoop-1.4.6.bin__hadoop-0.23/bin:/root/app/spark-2.1.0-bin-2.6.0-cdh5.7.0/bin:/root/app/hive-1.1.0-cdh5.7.0/bin:/root/app/hadoop-2.6.0-cdh5.7.0/bin:/root/app/scala-2.11.8/bin:/root/app/apache-maven-3.3.9/bin:/root/app/jdk1.7.0_51/bin:/usr/lib64/qt-3.3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin)
哪位大神帮忙瞅瞅