4.10号的日志-(s01串)

本文探讨了一种生成特定字串的算法,通过递归深度优先搜索和字符串操作函数实现,对比了不同方法如DFS、insert函数及list集合的使用,为理解和解决类似问题提供了多种编程思路。

第一题s01字串
题意
在这里插入图片描述
我感觉题目的数据有一些错误
在这里插入图片描述
我自己写的代码(简单的实现题目):

#include <iostream>
#include <algorithm>
#include <cstring>
using namespace std; 
/* run this program using the console pauser or add your own getch, system("pause") or input loop */
char s[100000];
int n;
void dfs(char s[],int len,int count){
 if(count==n){
  for(int i=0;i<len;i++){
   printf("%c",s[i]);
  }
  //printf("\n%d",len);
  return ;
 }
 for(int i=0;i<len;i++){
  if(s[i]=='0'){
   s[i]='1';
  }
  //if(s[i]=='1'){
  else{
   s[i]='0';
   for(int j=len;j>i;j--){
    s[j]=s[j-1];
   }
   s[i+1]='1';
   len++;
   i++;
  }
 }
 dfs(s,len,count+1);
}
int main(int argc, char** argv) {
 
 cin>>n;
 s[0]='0';
 dfs(s,1,0);
 return 0;
}

参考了别人代码后,用insert函数更加简单:

#include <iostream>
#include <algorithm>
#include <cstring>
using namespace std; 
int main(int argc, char** argv) {
 int n;
 cin>>n;
 string s="0";
 while(n--)
 {
  for(int i=0;i<s.length();i++){
   if(s[i]=='0'){
    s[i]='1';
   }
   else {
    s.insert(i,"0");
    i++; 
   }
  }
 }
 cout<<s;
 return 0;
}
/*

还有用list集合(以及迭代器)的方法和上面差不多:

#include <iostream>
#include <algorithm>
#include <cstring>
using namespace std; 
list<int> l;
typedef list<int>::iterator iter; 
int main(int argc, char** argv) {
	 int n;
 cin>>n;
 l.push_back(0);
 while(n--)
 {
  for(iter i=l.begin();i!=l.end();i++){
   if(*i==0){
    *i=1;
   }
   else {
    l.insert(i,0); 
   }
  }
 }
 for(iter i=l.begin();i!=l.end();i++){
  cout<<*i;
 }
 return 0;
}
/*
IPL gdaacaa6 D-10 HW Reset QFN88 WB512M DDR2 1333 miu pll: 266MHz DRAM Size: 64MB BIST-64MB: OK IPL_CUST gdaacaa6 D-10 [ 0.000000] Booting Linux on physical CPU 0x0 [ 0.000000] Linux version 5.10.117 (root@smartlifeci1) (arm-linux-uclibcgnueabihf-9.1.0-gcc (crosstool-NG 1.24.0) 9.1.0, GNU ld (crosstool-NG 1.24.0) 2.32) #11 PREEMPT Thu Aug 21 09:35:52 CST 2025 [ 0.000000] CPU: ARMv7 Processor [410fc075] revision 5 (ARMv7), cr=50c53c7d [ 0.000000] CPU: div instructions available: patching division code [ 0.000000] CPU: PIPT / VIPT nonaliasing data cache, VIPT aliasing instruction cache [ 0.000000] early_atags_to_fdt() success [ 0.000000] OF: fdt: Machine model: IFADO SSC032B-S01A [ 0.000000] LXmem is 0x4000000 PHYS_OFFSET is 0x20000000 [ 0.000000] Add mem start 0x20000000 size 0x4000000!!!! [ 0.000000] [ 0.000000] LX_MEM = 0x20000000, 0x4000000 [ 0.000000] LX_MEM2 = 0x0, 0x0 [ 0.000000] LX_MEM3 = 0x0, 0x0 [ 0.000000] MMU_MEM = 0x0, 0x0 [ 0.000000] EMAC_LEN= 0x0 [ 0.000000] DRAM_LEN= 0x0 [ 0.000000] Memory policy: Data cache writeback [ 0.000000] Reserved memory: bypass cma0 node, using cmdline CMA params instead [ 0.000000] OF: reserved mem: node cma0 compatible matching fail [ 0.000000] no any mmap reserved [ 0.000000] deal_with_reserve_mma_heap memblock_reserve success mma_config[0].reserved_start= [ 0.000000] 0x23000000 size:1000000 [ 0.000000] [ 0.000000] cma: Reserved 2 MiB at 0x22e00000 [ 0.000000] Zone ranges: [ 0.000000] Normal [mem 0x0000000020000000-0x0000000022ffffff] [ 0.000000] Movable zone start for each node [ 0.000000] Early memory node ranges [ 0.000000] node 0: [mem 0x0000000020000000-0x0000000022ffffff] [ 0.000000] Initmem setup node 0 [mem 0x0000000020000000-0x0000000022ffffff] [ 0.000000] CPU: All CPU(s) started in SVC mode. [ 0.000000] Built 1 zonelists, mobility grouping on. Total pages: 12192 [ 0.000000] Kernel command line: WBWA root=/dev/mtdblock6 rootfstype=squashfs ro spdev=/dev/mtdblock7 init=/etc/preinit LX_MEM=0x4000000 mma_heap=mma_heap_name0,miu=0,sz=0x1000000 mma_memblock_remove=1 cma=1M [ 0.000000] Dentry cache hash table entries: 8192 (order: 3, 32768 bytes, linear) [ 0.000000] Inode-cache hash table entries: 4096 (order: 2, 16384 bytes, linear) [ 0.000000] mem auto-init: stack:off, heap alloc:off, heap free:off [ 0.000000] Memory: 42740K/49152K available (2308K kernel code, 394K rwdata, 652K rodata, 104K init, 107K bss, 4364K reserved, 2048K cma-reserved) [ 0.000000] SLUB: HWalign=64, Order=0-3, MinObjects=0, CPUs=1, Nodes=1 [ 0.000000] rcu: Preemptible hierarchical RCU implementation. [ 0.000000] Trampoline variant of Tasks RCU enabled. [ 0.000000] rcu: RCU calculated value of scheduler-enlistment delay is 10 jiffies. [ 0.000000] NR_IRQS: 16, nr_irqs: 16, preallocated irqs: 16 [ 0.000000] ms_init_main_intc: np->name=ms_main_intc, parent=gic [ 0.000000] sstar_gpi_init: np->name=ms_gpi_intc, parent=ms_main_intc [ 0.000000] random: get_random_bytes called from 0xc0314969 with crng_init=0 [ 0.000000] arch_timer: cp15 timer(s) running at 6.00MHz (virt). [ 0.000000] clocksource: arch_sys_counter: mask: 0xffffffffffffff max_cycles: 0x1623fa770, max_idle_ns: 440795202238 ns [ 0.000006] sched_clock: 56 bits at 6MHz, resolution 166ns, wraps every 4398046511055ns [ 0.000018] Switching to timer-based delay loop, resolution 166ns [ 0.293778] printk: console [ttyS0] enabled [ 0.297973] Calibrating delay loop (skipped), value calculated using timer frequency.. 12.00 BogoMIPS (lpj=60000) [ 0.308227] pid_max: default: 4096 minimum: 301 [ 0.312857] Mount-cache hash table entries: 1024 (order: 0, 4096 bytes, linear) [ 0.320073] Mountpoint-cache hash table entries: 1024 (order: 0, 4096 bytes, linear) [ 0.328337] CPU: Testing write buffer coherency: ok [ 0.333264] Setting up static identity map for 0x200081c0 - 0x200081f8 [ 0.339277] rcu: Hierarchical SRCU implementation. [ 0.344362] devtmpfs: initialized [ 0.356439] VFP support v0.3: implementor 41 architecture 2 part 30 variant 7 rev 5
10-01
我弄过了,现在遇到问题了[root@master server]# [root@master server]# # 重命名为 hive(简洁) [root@master server]# mv apache-hive-3.1.2-bin hive [root@master server]# vi /etc/profile [root@master server]# source /etc/profile [root@master server]# echo $HIVE_HOME /export/server/hive [root@master server]# cd /export/server/hive/lib [root@master lib]# [root@master lib]# # 删除 Hive 自带的旧版 guava [root@master lib]# rm -f guava-19.0.jar [root@master lib]# [root@master lib]# # 从 Hadoop 复制新版(先查看真实版本) [root@master lib]# ls /export/server/hadoop/share/hadoop/common/lib/guava-*.jar ls: 无法访问/export/server/hadoop/share/hadoop/common/lib/guava-*.jar: 没有那个文件或目录 [root@master lib]# cd /export/server/hive [root@master hive]# [root@master hive]# # 创建 conf 目录(如果不存在) [root@master hive]# mkdir -p conf [root@master hive]# [root@master hive]# # 进入配置目录 [root@master hive]# cd conf [root@master conf]# vi hive-site.xml [root@master conf]# cp /export/soft/mysql-connector-java-5.1.49.jar /export/server/hive/lib/ [root@master conf]# ls /export/server/hive/lib/mysql-connector-java-5.1.49.jar /export/server/hive/lib/mysql-connector-java-5.1.49.jar [root@master conf]# mysql -u root -p Enter password: Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 5 Server version: 5.7.43 MySQL Community Server (GPL) Copyright (c) 2000, 2023, Oracle and/or its affiliates. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> -- 创建 hive 数据库 mysql> CREATE DATABASE IF NOT EXISTS hive CHARACTER SET utf8 COLLATE utf8_general_ci; Query OK, 1 row affected (0.10 sec) mysql> mysql> -- 授权 root 用户访问 hive 库(因为我们用 root 连接) mysql> -- 注意:生产环境建议新建专用用户,这里简化操作 mysql> GRANT ALL PRIVILEGES ON hive.* TO 'root'@'%'; Query OK, 0 rows affected (0.07 sec) mysql> mysql> -- 刷新权限 mysql> FLUSH PRIVILEGES; Query OK, 0 rows affected (0.01 sec) mysql> mysql> -- 退出 mysql> EXIT; Bye [root@master conf]# cd /export/server/hive [root@master hive]# [root@master hive]# # 执行 schematool 初始化 [root@master hive]# bin/schematool -dbType mysql -initSchema -verbose SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/export/server/hive/lib/log4j-slf4j-impl-2.10.0.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/root/export/server/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory] Exception in thread "main" java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V at org.apache.hadoop.conf.Configuration.set(Configuration.java:1357) at org.apache.hadoop.conf.Configuration.set(Configuration.java:1338) at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:518) at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:536) at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:430) at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:5141) at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:5104) at org.apache.hive.beeline.HiveSchemaTool.<init>(HiveSchemaTool.java:96) at org.apache.hive.beeline.HiveSchemaTool.main(HiveSchemaTool.java:1473) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:318) at org.apache.hadoop.util.RunJar.main(RunJar.java:232) [root@master hive]# # 创建日志目录 [root@master hive]# mkdir -p /export/server/hive/logs [root@master hive]# [root@master hive]# # 启动 Metastore(后台运行) [root@master hive]# nohup /export/server/hive/bin/hive --service metastore \ > > /export/server/hive/logs/metastore.log 2>&1 & [1] 124844 [root@master hive]# [root@master hive]# # 查看是否启动成功 [root@master hive]# tail -f /export/server/hive/logs/metastore.log nohup: 忽略输入 2025-11-26 22:50:08: Starting Hive Metastore Server SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/export/server/hive/lib/log4j-slf4j-impl-2.10.0.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/root/export/server/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory] MetaException(message:com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:84) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8661) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8656) at org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStore.java:8926) at org.apache.hadoop.hive.metastore.HiveMetaStore.main(HiveMetaStore.java:8843) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:318) at org.apache.hadoop.util.RunJar.main(RunJar.java:232) Caused by: java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V at org.apache.hadoop.conf.Configuration.set(Configuration.java:1357) at org.apache.hadoop.conf.Configuration.set(Configuration.java:1338) at org.apache.hadoop.hive.metastore.ObjectStore.correctAutoStartMechanism(ObjectStore.java:641) at org.apache.hadoop.hive.metastore.ObjectStore.getDataSourceProps(ObjectStore.java:572) at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:349) at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:77) at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:137) at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:59) at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:718) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:696) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:690) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:767) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:538) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:80) ... 11 more Exception in thread "main" MetaException(message:com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:84) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8661) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8656) at org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStore.java:8926) at org.apache.hadoop.hive.metastore.HiveMetaStore.main(HiveMetaStore.java:8843) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:318) at org.apache.hadoop.util.RunJar.main(RunJar.java:232) Caused by: java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V at org.apache.hadoop.conf.Configuration.set(Configuration.java:1357) at org.apache.hadoop.conf.Configuration.set(Configuration.java:1338) at org.apache.hadoop.hive.metastore.ObjectStore.correctAutoStartMechanism(ObjectStore.java:641) at org.apache.hadoop.hive.metastore.ObjectStore.getDataSourceProps(ObjectStore.java:572) at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:349) at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:77) at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:137) at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:59) at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:718) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:696) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:690) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:767) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:538) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:80) ... 11 more ^C [1]+ 退出 1 nohup /export/server/hive/bin/hive --service metastore > /export/server/hive/logs/metastore.log 2>&1 [root@master hive]# # 启动 HiveServer2 [root@master hive]# nohup /export/server/hive/bin/hive --service hiveserver2 \ > > /export/server/hive/logs/hiveserver2.log 2>&1 & [1] 126010 [root@master hive]# [root@master hive]# # 查看日志 [root@master hive]# tail -f /export/server/hive/logs/hiveserver2.log nohup: 忽略输入 which: no hbase in (/usr/local/bin:/usr/bin:/usr/sbin:/bin:/sbin:/export/server/jdk/bin:/export/server/hadoop/bin:/export/server/hadoop/sbin:/export/server/mysql/bin:/export/server/hive/bin) 2025-11-26 22:50:53: Starting HiveServer2 SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/export/server/hive/lib/log4j-slf4j-impl-2.10.0.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/root/export/server/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory] Exception in thread "main" java.lang.NoSuchMethodError: com.google.common.base.Preconditions.checkArgument(ZLjava/lang/String;Ljava/lang/Object;)V at org.apache.hadoop.conf.Configuration.set(Configuration.java:1357) at org.apache.hadoop.conf.Configuration.set(Configuration.java:1338) at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:518) at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:536) at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:430) at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:5141) at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:5099) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:97) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4j(LogUtils.java:81) at org.apache.hive.service.server.HiveServer2.main(HiveServer2.java:1141) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:318) at org.apache.hadoop.util.RunJar.main(RunJar.java:232) ^C [1]+ 退出 1 nohup /export/server/hive/bin/hive --service hiveserver2 > /export/server/hive/logs/hiveserver2.log 2>&1 [root@master hive]# jps 96417 DataNode 96162 NameNode 126676 Jps 103240 ResourceManager 103483 NodeManager 96894 SecondaryNameNode [root@master hive]# /export/server/hive/bin/beeline SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/export/server/hive/lib/log4j-slf4j-impl-2.10.0.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/root/export/server/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory] Beeline version 3.1.2 by Apache Hive beeline> !connect jdbc:hive2://192.168.161.130:10000 Connecting to jdbc:hive2://192.168.161.130:10000 Enter username for jdbc:hive2://192.168.161.130:10000: root Enter password for jdbc:hive2://192.168.161.130:10000: 25/11/26 22:51:47 [main]: WARN jdbc.HiveConnection: Failed to connect to 192.168.161.130:10000 Could not open connection to the HS2 server. Please check the server URI and if the URI is correct, then ask the administrator to check the server status. Error: Could not open client transport with JDBC Uri: jdbc:hive2://192.168.161.130:10000: java.net.ConnectException: 拒绝连接 (Connection refused) (state=08S01,code=0) beeline> SHOW DATABASES; No current connection beeline> # 开放 Metastore 端口 beeline> firewall-cmd --permanent --add-port=9083/tcp . . . .> . . . .> # 开放 HiveServer2 端口 . . . .> firewall-cmd --permanent --add-port=10000/tcp . . . .> . . . .> # 重新加载 . . . .> firewall-cmd --reload . . . .> exit; No current connection beeline> !quit [root@master hive]#
最新发布
11-27
MMesOrderStartInfo MMMesOrderStartInfo = new MMesOrderStartInfo(); //读取实时值Json字符 string str01JsonPeopleRealValue = rdsReal.GetStrValue("people2001"); //读取到的Json值反序列化 if (!String.IsNullOrEmpty(str01JsonPeopleRealValue)) { s01PeopleParamValue = GetPeopleInfoParamValueInfo(str01JsonPeopleRealValue); } PublicParams.s01StartTime = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"); bool autoOrderStartInfo = SdalOrderStartInfo.AutoOrderStartInfo("H7002", s01IdParamValue.auto01Id); if (!autoOrderStartInfo) { MMMesOrderStartInfo.F_IDCODE = cgCurrentJobOrderInfo.F_MONOPARTID; MMMesOrderStartInfo.F_PRODUCEORDER = cgCurrentJobOrderInfo.F_JOBORDERNO; MMMesOrderStartInfo.F_ATVNO = cgCurrentJobOrderInfo.F_PROCESSNO; MMMesOrderStartInfo.F_ACSTARTTIME = PublicParams.s01StartTime; MMMesOrderStartInfo.F_ACENDTIME = ""; MMMesOrderStartInfo.F_ACENDOKNUM = "0"; if (s01PeopleParamValue != null) { MMMesOrderStartInfo.F_IDCARD = s01PeopleParamValue.IdCard; } else { MMMesOrderStartInfo.F_IDCARD = "320322200106034416"; } MMMesOrderStartInfo.F_MACNO = "E582-121"; MMMesOrderStartInfo.F_BASICMODEL = cgCurrentJobOrderInfo.F_VERHICLETYPE; MMMesOrderStartInfo.F_MODEL = cgCurrentJobOrderInfo.F_PRODNO; MMMesOrderStartInfo.F_ISLOAD = 0; MMMesOrderStartInfo.F_WORKPOSNO = cgCurrentJobOrderInfo.F_WORKCENTERCODE; StationProdFinished01(cgCurrentJobOrderInfo);//机台开始信息记录并更新到派工单表 bool bResult = SdalOrderStartInfo.EditOrderStartInfo(MMMesOrderStartInfo); if (bResult) { //s01Task = true; LogInfo.LogInfoInstance.WriteLogInfo("机台产品信息开工报工成功!", LogType.Error, "MESWorkSignalLog"); } else { LogInfo.LogInfoInstance.WriteLogInfo("机台产品信息开工报工失败!", LogType.Error, "MESWorkSignalLog"); } JsonOrderInfo MJsonOrderInfo = new JsonOrderInfo(); MJsonOrderInfo.ID = MMMesOrderStartInfo.F_IDCODE; MJsonOrderInfo.PRODUCEORDER = MMMesOrderStartInfo.F_PRODUCEORDER; MJsonOrderInfo.ATVNO = MMMesOrderStartInfo.F_ATVNO; MJsonOrderInfo.ACSTARTTIME = MMMesOrderStartInfo.F_ACSTARTTIME; MJsonOrderInfo.ACENDTIME = MMMesOrderStartInfo.F_ACENDTIME; MJsonOrderInfo.ACENDOKNUM = MMMesOrderStartInfo.F_ACENDOKNUM; MJsonOrderInfo.IDCARD = MMMesOrderStartInfo.F_IDCARD; MJsonOrderInfo.MACNO = MMMesOrderStartInfo.F_MACNO; MJsonOrderInfo.BASICMODEL = MMMesOrderStartInfo.F_BASICMODEL; MJsonOrderInfo.MODEL = MMMesOrderStartInfo.F_MODEL; //加上人员信息数组 IDCARDACENDOKNUM iDCARDACENDOKNUM = new IDCARDACENDOKNUM(); MJsonOrderInfo.IDCARDACENDOKNUM.Add(iDCARDACENDOKNUM); MJsonOrderInfo.IDCARDACENDOKNUM[0].IDCARD = MMMesOrderStartInfo.F_IDCARD; MJsonOrderInfo.IDCARDACENDOKNUM[0].ACENDOKNUM = MMMesOrderStartInfo.F_ACENDOKNUM; string postData = JsonConvert.SerializeObject(MJsonOrderInfo);帮我生成一个转化后的postData数据示例
10-11
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值