message=(“Could not start SASL: %s“ % self.sasl.getError())) thrift.transport.TTransport.TTransport

本文介绍了一种在Windows环境下解决使用PyHive连接Hive时遇到的问题的方法。通过一条命令修改注册表设置,成功解决了连接问题。

在window环境下使用pyhive连接hive报错,通过以下命令解决

FOR /F "usebackq delims=" %A IN (python -c “from importlib import util;import os;print(os.path.join(os.path.dirname(util.find_spec(‘sasl’).origin),‘sasl2’))”) DO ( REG ADD "HKEY_LOCAL_MACHINE\SOFTWARE\Carnegie Mellon\Project Cyrus\SASL Library" /v SearchPath /t REG_SZ /d "%A" )

管理员cmd运行

在这里插入图片描述

效果

在这里插入图片描述
在这里插入图片描述

beeline> !connect jdbc:hive2://node1:10000 Connecting to jdbc:hive2://node1:10000 Enter username for jdbc:hive2://node1:10000: hadoop Enter password for jdbc:hive2://node1:10000: 25/07/20 16:07:35 [main]: ERROR jdbc.HiveConnection: Error opening session org.apache.thrift.transport.TTransportException: null at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TSaslTransport.readLength(TSaslTransport.java:376) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TSaslTransport.readFrame(TSaslTransport.java:453) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TSaslTransport.read(TSaslTransport.java:435) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TSaslClientTransport.read(TSaslClientTransport.java:37) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.hive.service.rpc.thrift.TCLIService$Client.recv_OpenSession(TCLIService.java:176) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.hive.service.rpc.thrift.TCLIService$Client.OpenSession(TCLIService.java:163) ~[hive-exec-3.1.3.jar:3.1.3] at org.apache.hive.jdbc.HiveConnection.openSession(HiveConnection.java:728) [hive-jdbc-3.1.3.jar:3.1.3] at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:232) [hive-jdbc-3.1.3.jar:3.1.3] at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107) [hive-jdbc-3.1.3.jar:3.1.3] at java.sql.DriverManager.getConnection(DriverManager.java:664) [?:1.8.0_361] at java.sql.DriverManager.getConnection(DriverManager.java:208) [?:1.8.0_361] at org.apache.hive.beeline.DatabaseConnection.connect(DatabaseConnection.java:145) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.DatabaseConnection.getConnection(DatabaseConnection.java:209) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.Commands.connect(Commands.java:1641) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.Commands.connect(Commands.java:1536) [hive-beeline-3.1.3.jar:3.1.3] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_361] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_361] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_361] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_361] at org.apache.hive.beeline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:56) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.execCommandWithPrefix(BeeLine.java:1384) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1423) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1287) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1071) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:538) [hive-beeline-3.1.3.jar:3.1.3] at org.apache.hive.beeline.BeeLine.main(BeeLine.java:520) [hive-beeline-3.1.3.jar:3.1.3] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_361] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_361] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_361] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_361] at org.apache.hadoop.util.RunJar.run(RunJar.java:330) [hadoop-common-3.4.0.jar:?] at org.apache.hadoop.util.RunJar.main(RunJar.java:245) [hadoop-common-3.4.0.jar:?] 25/07/20 16:07:35 [main]: WARN jdbc.HiveConnection: Failed to connect to node1:10000 Error: Could not open client transport with JDBC Uri: jdbc:hive2://node1:10000: Could not establish connection to jdbc:hive2://node1:10000: null (state=08S01,code=0) beeline> 这个怎么解决
07-21
Traceback (most recent call last): File "T05_NAC_MSG_RCVER_TGYH_I0200.py", line 250, in main raise ex File "T05_NAC_MSG_RCVER_TGYH_I0200.py", line 200, in main results = cursor.fetchall() File "/home/tgbd/.local/lib/python3.7/site-packages/pyhive/common.py", line 136, in fetchall return list(iter(self.fetchone, None)) File "/home/tgbd/.local/lib/python3.7/site-packages/pyhive/common.py", line 105, in fetchone self._fetch_while(lambda: not self._data and self._state != self._STATE_FINISHED) File "/home/tgbd/.local/lib/python3.7/site-packages/pyhive/common.py", line 45, in _fetch_while self._fetch_more() File "/home/tgbd/.local/lib/python3.7/site-packages/pyhive/hive.py", line 387, in _fetch_more response = self._connection.client.FetchResults(req) File "/usr/lib/python3.7/site-packages/TCLIService/TCLIService.py", line 714, in FetchResults return self.recv_FetchResults() File "/usr/lib/python3.7/site-packages/TCLIService/TCLIService.py", line 726, in recv_FetchResults (fname, mtype, rseqid) = iprot.readMessageBegin() File "/usr/lib/python3.7/site-packages/thrift/protocol/TBinaryProtocol.py", line 135, in readMessageBegin sz = self.readI32() File "/usr/lib/python3.7/site-packages/thrift/protocol/TBinaryProtocol.py", line 218, in readI32 buff = self.trans.readAll(4) File "/usr/lib/python3.7/site-packages/thrift/transport/TTransport.py", line 62, in readAll chunk = self.read(sz - have) File "/usr/lib/python3.7/site-packages/thrift_sasl/__init__.py", line 173, in read self._read_frame() File "/usr/lib/python3.7/site-packages/thrift_sasl/__init__.py", line 187, in _read_frame message=self.sasl.getError()) thrift.transport.TTransport.TTransportException: b'Error in sasl_decode (-1) SASL(-1): generic failure: Unable to find a callback: 32775' 这个报错优先排查哪个py文件?
最新发布
08-12
2025/08/01 11:02:41 - 执行SQL脚本.0 - ERROR (version 9.4.0.0-343, build 0.0 from 2022-11-08 07.50.27 by buildguy) : 错误初始化步骤[执行SQL脚本] 2025/08/01 11:02:41 - 执行SQL脚本.0 - ERROR (version 9.4.0.0-343, build 0.0 from 2022-11-08 07.50.27 by buildguy) : java.lang.NoSuchMethodError: org.apache.hadoop.hive.common.auth.HiveAuthUtils.getSocketTransport(Ljava/lang/String;III)Lorg/apache/hive/org/apache/thrift/transport/TTransport; 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveConnection.createUnderlyingTransport(HiveConnection.java:915) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:951) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:507) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:379) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:284) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:94) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at java.sql.DriverManager.getConnection(DriverManager.java:664) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at java.sql.DriverManager.getConnection(DriverManager.java:208) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.core.database.Database.connectUsingClass(Database.java:589) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.core.database.Database.normalConnect(Database.java:481) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.core.database.Database.connect(Database.java:389) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.core.database.Database.connect(Database.java:360) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.trans.steps.sql.ExecSQL.init(ExecSQL.java:297) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at org.pentaho.di.trans.step.StepInitThread.run(StepInitThread.java:69) 2025/08/01 11:02:41 - 执行SQL脚本.0 - at java.lang.Thread.run(Thread.java:748) 2025/08/01 11:02:41 - 转换 1 - ERROR (version 9.4.0.0-343, build 0.0 from 2022-11-08 07.50.27 by buildguy) : 步骤 [执行SQL脚本.0] 初始化失败!
08-02
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

BigData-0

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值