spark-core_03: org.apache.spark.launcher.Main源码分析

本文深入解析 Spark Submit 脚本的工作原理,特别是如何解析并处理命令行参数,为 Spark 应用程序提供必要的配置。

承接上文“spark-core_02: spark-submit、spark-class脚本分析

launcher.Main主要作用是就是检测,注入spark-submit参数到spark环境中,然后返回SparkSubmit需要执行的参数,给spark-class脚本中的exec "${CMD[@]}"进行执行

class Main {
  public static void main(String[] argsArray) throws Exception {
    checkArgument(argsArray.length > 0, "Not enough arguments: missing class name.");
/**
 * java -cp spark_home/lib/spark-assembly-1.6.0-hadoop2.6.0.jar org.apache.spark.launcher.Main  org.apache.spark.deploy.SparkSubmit
 *      --class org.apache.spark.repl.Main --name "Spark shell" --master spark://luyl152:7077
    这个main方法最终会将org.apache.spark.deploy.SparkSubmit --class org.apache.spark.repl.Main 
--name "Spark shell" --master spark://luyl152:7077
    给spark-class的 exec "${CMD[@]}"执行
 */
    List<String> args = new ArrayList<String>(Arrays.asList(argsArray));
    String className = args.remove(0);
//可以在spark-class或别的配制文件中 export SPARK_PRINT_LAUNCH_COMMAND=任何值,只要不为空即可
 可以用它来打印cmd,也就是spark-class的 exec "${CMD[@]}"中的值


    boolean
printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
   
AbstractCommandBuilder builder;//创建命令解析器
    //spark-shell执行时第1个参数就是SparkSubmit
    if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
     
try {
       
//将参数解析到spark对应的变量中,如 --class的值 放到mainClass变量中。
        //如果有多出来的参数则将该参数放到SparkSubmitCommandBuilder成员sparkArgs这个集合中

       
builder = new SparkSubmitCommandBuilder(args);

一、分析new SparkSubmitCommandBuilder(args)的代码:

// args参数是这些:--class org.apache.spark.repl.Main--name "Spark shell" --master spark://luyl152:7077
 
SparkSubmitCommandBuilder(List<String> args) {
   
this.sparkArgs = new ArrayList<String>();
   
List<String> submitArgs = args;
   
//第一个参数值是pyspark-shell-main,如果python执行的
    if (args.size() > 0 && args.get(0).equals(PYSPARK_SHELL)) {
     
this.allowsMixedArguments = true;
     
appResource = PYSPARK_SHELL_RESOURCE;
     
submitArgs = args.subList(1, args.size());
     
//第一个参数值是:sparkr-shell-main,如果是R执行
    } else if (args.size() > 0 &&args.get(0).equals(SPARKR_SHELL)) {
     
this.allowsMixedArguments = true;
     
appResource = SPARKR_SHELL_RESOURCE;
     
submitArgs = args.subList(1, args.size());
   
} else{
     
//如果不是python或r,则allowsMixedArguments值是false
      this.allowsMixedArguments = false;
   
}

    OptionParser parser =
new OptionParser();
  
 //作用就是将spark-submit放进来的参数对应值赋到spark对应的变量中,如 --class的值 放到mainClass变量中
    parser.parse(submitArgs); //它的父类方法SparkSubmitOptionParser实现的
    this.printInfo = parser.infoRequested;
 
}

1,查看一下OptionParser的父类SparkSubmitOptionParser对parse的实现

/**
 * Parse a list of spark-submit commandline options.
 *
<p>
 
* See SparkSubmitArguments.scala for a more formaldescription of available options.
 *
 * @throws IllegalArgumentExceptionIf an error is found during parsing
 
* 参数是这些:--class org.apache.spark.repl.Main --name"Spark shell" --master spark://luyl152:7077.
 * 作用就是将spark-submit放进来的参数对应值赋到spark对应的变量中,如 --class的值 放到mainClass变量中

 */
protected finalvoid parse(List<String> args) {
 
 //spark-submit可以传sparkConf参数:--confPROP=VALUE ,参数可以看org.apache.spark.deploy.SparkSubmitArguments类最后面
  //或spark-submit-h就可以查看

 
Pattern eqSeparatedOpt = Pattern.compile("(--[^=]+)=(.+)");

  int
idx = 0;
  for
(idx = 0; idx < args.size(); idx++) {
    String arg = args.get(idx)
;
   
String value = null;
   
//当出现--conf PROP=VALUE这种类型的参数arg、value值变成if代码里面的值
    Matcher m = eqSeparatedOpt.matcher(arg);
    if
(m.matches()) {
      arg = m.group(
1); //--conf PROP
      value = m.group(2); //VALUE
    }

   
// Look for options with a value.
   //该方法主要是找到spark-submit后面的带有--参数,如args 放进"--class",和opts二维数组进行匹配
    //匹配到的还是返回--class,如果没有匹配到则null

   
String name = findCliOption(arg, opts);
    if
(name != null) {
     
if (value== null) {
       
if (idx== args.size() - 1) { //如果匹配了并且没有参数值则报错,如:只有 --class ,则size是1,idx此时0, 1-1=0
          throw new IllegalArgumentException(
              String.format(
"Missing argument for option'%s'.", arg));
       
}
        idx++
;
       
value = args.get(idx); //如果有值,则idx索引的下一位就是参数对应的值
      }
     
//name就是spark-submit的参数如--class,而value就是参数对应的值
      //在它的自身OptionParser做的实现,作用就是将spark-submit放进来的参数对应值赋到spark对应的变量中
      //如 --class的值放到mainClass变量中(里面实现很easy,就不写了)

      if (!handle(name, value)) {
       
break;
     
}
     
continue; //只有匹配到才会让idx再次加1
    }

  
 // Look for aswitch. 如果上面没有匹配到,会再去匹配一下是否有出现-verbose这样参数
    name = findCliOption(arg, switches);
    if
(name != null) {
     
if (!handle(name, null)) {
       
break;
     
}
     
continue;
   
}

   
if (!handleUnknown(arg)){
     
break;
   
}
  }

 
if (idx< args.size()) {
    idx++
;
 
}
 
 // 将多出来的参数加到 SparkSubmitCommandBuilder() {his.sparkArgs = new ArrayList<String>();..}
  handleExtraArgs(args.subList(idx, args.size()));
}

===》上面handle(name,value)在OptionParser的实现如下

/**
 *作用就是将spark-submit放进来的参数对应值赋到spark对应的变量中
   */

@Override
protected boolean handle(String opt, String value) {
 
if (opt.equals(MASTER)) {
   
master =value;
 
} elseif (opt.equals(DEPLOY_MODE)) {
   
deployMode =value;
 
} elseif (opt.equals(PROPERTIES_FILE)) {
   
propertiesFile = value;
 
} elseif (opt.equals(DRIVER_MEMORY)) {
   
conf.put(SparkLauncher.DRIVER_MEMORY, value);
 
} elseif (opt.equals(DRIVER_JAVA_OPTIONS)) {
   
conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, value);
 
} elseif (opt.equals(DRIVER_LIBRARY_PATH)) {
   
conf.put(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH, value);。。。。。


2,此时就从new SparkSubmitCommandBuilder(args)主构造方法返回了,接着launcher.Main$main()下面的方法进行分析:

                                          
     
} catch(IllegalArgumentException e) {

//初始化SparkSubmitCommandBuilder出现异常的容错代码
        printLaunchCommand =
false;
       
System.err.println("Error: " + e.getMessage());
       
System.err.println();
       
MainClassOptionParser parser = new MainClassOptionParser();
        try
{
          parser.parse(args)
;
       
} catch(Exception ignored) {
         
// Ignore parsing exceptions.
       
}
        List<String> help =
new ArrayList<String>();
        if
(parser.className!= null) {
          help.add(parser.
CLASS);
         
help.add(parser.className);
       
}
        help.add(parser.
USAGE_ERROR);
       
builder = new SparkSubmitCommandBuilder(help);
      
}
    }
else {
    
 //第一个参数如果不是:org.apache.spark.deploy.SparkSubmit,则使用SparkClassCommandBuilder,解析器
     
builder = new SparkClassCommandBuilder(className, args);
   
}

    Map<String
, String>env = new HashMap<String, String>();
   //将所有和jvm及SparkSubmit相关的参数返回
   
List<String> cmd = builder.buildCommand(env);

二、此处看一下SparkSubmitOptionParser.buildCommand(Map)这个方法

@Override
public List<String> buildCommand(Map<String, String> env) throws IOException{
 
//PYSPARK_SHELL_RESOURCE表示python,SPARKR_SHELL_RESOURCE表示r语言
  if (PYSPARK_SHELL_RESOURCE.equals(appResource)&& !printInfo) {
   
return buildPySparkShellCommand(env);
 
} elseif (SPARKR_SHELL_RESOURCE.equals(appResource) && !printInfo) {
   
return buildSparkRCommand(env);
 
} else{
   
//这个env就是一个空的Map,会调用buildSparkSubmitCommand()方法
   
return buildSparkSubmitCommand(env);
 
}
}

1,查看一下buildSparkSubmitCommand(env)


private List<String> buildSparkSubmitCommand(Map<String, String> env) throws IOException {
 
// Load the properties file and check whether spark-submitwill be running the app's driver
  // or just launching a cluster app.When running the driver, the JVM's argument will be
  // modified to cover the driver'sconfiguration.
 
//加载属性文件,并检查spark-submit是否正在运行driver的应用程序或仅启动集群应用程序。
  // 在运行驱动程序时,JVM的参数将被修改以涵盖驱动程序的配置。

  Map<String, String> config = getEffectiveConfig();

  boolean isClientMode = isClientMode(config)

//默认如果standalone不匹配--deploy-mode cluster就是client,所以这个值是true
  //这个DRIVER_EXTRA_CLASSPATH在client模式是不能直接在SparkConf中设置的,因为driver的JVM已经被Spark-submit通过反射启动起来了
  // 而是通过参数:--driver-class-path来设置的

  //这个DRIVER_EXTRA_CLASSPATH在client模式是不能直接在SparkConf中设置的,因为driver的JVM已经被Spark-submit通过反射启动起来了,应该通过参数:--driver-class-path来设置的
  String extraClassPath = isClientMode ? config.get(SparkLauncher. DRIVER_EXTRA_CLASSPATH ) : null;

 
List<String> cmd = buildJavaCommand(extraClassPath) ;
 
// Take Thrift Server as daemon
 
if (isThriftServer( mainClass )) {
    addOptionString(cmd
, System.getenv( "SPARK_DAEMON_JAVA_OPTS" )) ;
 
}
 
//SPARK_SUBMIT_OPTS就是在spark-shell中提到的,需要将java的classpath手动设置到scala中 SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS-Dscala.usejavacp=true"
  addOptionString(cmd , System.getenv( "SPARK_SUBMIT_OPTS" )) ;
 
addOptionString(cmd , System.getenv( "SPARK_JAVA_OPTS" )) ;

  if
(isClientMode) {
   
// Figuring out where the memory value come from is alittle tricky due to precedence.
    // Precedence is observed in thefollowing order:
   // - explicit configuration (setConf()), which also covers--driver-memory cli argument.
    // - properties file.
    // - SPARK_DRIVER_MEMORY env variable
    // - SPARK_MEM env variable
    // - default value (1g)
    // Take Thrift Server as daemon
   
String tsMemory =
      isThriftServer(
mainClass ) ? System.getenv( "SPARK_DAEMON_MEMORY" ) : null;
   
String memory = firstNonEmpty(tsMemory , config.get(SparkLauncher. DRIVER_MEMORY ) ,
     
System.getenv( "SPARK_DRIVER_MEMORY" ) , System.getenv( "SPARK_MEM" ) , DEFAULT_MEM ) ;
   
cmd.add( "-Xms" + memory) ; // 最大、小堆内存默认是1g
    cmd.add( "-Xmx" + memory) ;
   
addOptionString(cmd , config.get(SparkLauncher. DRIVER_EXTRA_JAVA_OPTIONS )) ;
   
mergeEnvPathList (env , getLibPathEnvName () ,
     
config.get(SparkLauncher. DRIVER_EXTRA_LIBRARY_PATH )) ;
 
}

  addPermGenSizeOpt(cmd)
;
 
cmd.add( "org.apache.spark.deploy.SparkSubmit" ) ;
 
 //buildSparkSubmitArgs()返回list将上面spark-submit参数注入进来的参数及对应值取出来
  cmd.addAll(buildSparkSubmitArgs()) ;
  return
cmd ;
}

==》此时将要执行的cmd参数返回,接着launcher.Main$main()下面的方法进行分析


    if
(printLaunchCommand) {
      System.
err.println("Spark Command: " + join(" ", cmd));
     
System.err.println("========================================");
   
}

   
if (isWindows()){
      System.
out.println(prepareWindowsCommand(cmd, env));
   
} else{
     
// In bash, use NULL as the arg separator since it cannotbe used in an argument.
      //
返回有效的参数,会通过打印的方式给spark-class的 exec "${CMD[@]}"执行
  
    /**
       * '\0'
和空格不是同一个概念。
         '\0'表示字符串结束符,代表字符串结束,而空格是一个普通字符,显示在文本中可以选中。
         '\0'的ASCII码为0,空格的ASCII码为32,两个不是同一个字符
          在计算机程序中通常使用'\0'表示字符串结束,空格为文本字符,二者完全不同
       */

     
List<String> bashCmd = prepareBashCommand(cmd, env);
      for
(String c : bashCmd) {
        System.
out.print(c);
       
System.out.print('\0');
     
}
    }
  }

 

CMD的内容如下

/usr/local/java/jdk1.8.0_91/bin/java-cp

/data/spark-1.6.0-bin-hadoop2.6/conf/:/data/spark-1.6.0-bin-hadoop2.6/lib/spark-assembly-1.6.0-hadoop2.6.0.jar:/data/spark-1.6.0-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar:/data/spark-1.6.0-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar:/data/spark-1.6.0-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar:/data/hadoop-2.6.5/etc/hadoop/

-Xms1g-Xmx1g -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005

org.apache.spark.deploy.SparkSubmit

--classorg.apache.spark.repl.Main

--nameSpark shell

--masterspark://luyl152:7077,luyl153:7077,luyl154:7077

--verbose/tool/jarDir/maven_scala-1.0-SNAPSHOT.jar

接收下分析一下“org.apache.spark.deploy.SparkSubmit“源码
/home/hadoopmaster/jdk1.8.0_161/bin/java -javaagent:/home/hadoopmaster/idea-IC-221.6008.13/lib/idea_rt.jar=34515:/home/hadoopmaster/idea-IC-221.6008.13/bin -Dfile.encoding=UTF-8 -classpath /home/hadoopmaster/jdk1.8.0_161/jre/lib/charsets.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/deploy.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/cldrdata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/dnsns.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jaccess.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jfxrt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/localedata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/nashorn.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunec.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunjce_provider.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunpkcs11.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/zipfs.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/javaws.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jce.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfr.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfxswt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jsse.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/management-agent.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/plugin.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/resources.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/rt.jar:/root/IdeaProjects/kkk/out/production/kkk:/home/hadoopmaster/scala-2.12.15/lib/scala-reflect.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-xml_2.12-1.0.6.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-parser-combinators_2.12-1.0.7.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-swing_2.12-2.0.3.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-library.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xz-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jta-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jpam-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ST4-4.0.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ivy-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/oro-2.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/blas-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/core-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/gson-2.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/tink-1.6.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsp-api-2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okio-1.14.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/opencsv-2.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shims-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xmlenc-0.52.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guava-14.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jline-2.14.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsr305-3.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lapack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/log4j-1.2.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/minlog-1.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stream-2.9.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/velocity-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/generex-1.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-api-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/janino-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jdo-api-3.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/objenesis-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/paranamer-2.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/py4j-0.10.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/pyrolite-4.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/HikariCP-2.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-io-2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-cli-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.inject-1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libfb303-0.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lz4-java-1.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okhttp-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snakeyaml-1.27.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stax-api-1.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JTransforms-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-ipc-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-cli-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-net-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/derby-10.14.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-jdbc-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-utils-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpcore-4.4.14.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-api-2.2.11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-hk2-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jodd-core-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-core-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/super-csv-2.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xml-apis-1.4.01.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JLargeArrays-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/activation-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/automaton-1.11-8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-dbcp-1.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-text-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-serde-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javolution-5.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libthrift-0.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-shims-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-api-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zjsonpatch-0.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zstd-jni-1.5.0-4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill-java-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill_2.12-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-servlet-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-auth-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-hdfs-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-locator-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpclient-4.5.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-xc-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-util-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/joda-time-2.10.10.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kryo-shaded-4.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jmx-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jvm-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/rocksdbjni-6.20.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xercesImpl-2.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aircompressor-0.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/algebra_2.12-2.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/annotations-17.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr4-runtime-4.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-util-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-format-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-vector-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-mapred-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-codec-1.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-pool-1.5.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/compress-lzf-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-beeline-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.jdo-3.2.0-m3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-runtime-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-client-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-common-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-server-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/leveldbjni-all-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-core-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-json-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/RoaringBitmap-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr-runtime-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-math3-3.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javassist-3.25.0-GA.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jul-to-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/protobuf-java-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snappy-java-1.1.8.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/transaction-api-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/bonecp-0.8.0.RELEASE.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-crypto-1.1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-digester-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang3-3.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-client-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-exec-2.3.9-core.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-metastore-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-jaxrs-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.inject-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-mapreduce-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-xml_2.12-1.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shapeless_2.12-2.3.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-log4j12-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sql_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/threeten-extra-1.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-jute-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compress-1.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-logging-1.1.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-recipes-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-api-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-0.23-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jcl-over-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-column-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-common-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-hadoop-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-library-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-reflect-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-core_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-repl_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-yarn_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-asn1-api-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze-macros_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/cats-kernel_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-httpclient-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/flatbuffers-java-1.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-llap-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-service-rpc-3.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-storage-api-2.7.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-sslengine-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-graphite-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/netty-all-4.1.68.Final.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-jackson-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-compiler-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mesos_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-util_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xbean-asm9-shaded-4.20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-i18n-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack_combined_all-0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-core-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-beanutils-1.9.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compiler-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-framework-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-core-4.1.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-databind-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.ws.rs-api-2.1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-client-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/macro-compat_2.12-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-encoding-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-graphx_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sketch_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-unsafe_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/univocity-parsers-2.9.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-netty-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-rdbms-4.1.19.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-annotations-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kvstore_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-macros_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-collections-3.2.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-configuration-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-api-jdo-4.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-mapper-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.servlet-api-4.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-ast_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-catalyst_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-launcher_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/audience-annotations-0.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-scheduler-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-vector-code-gen-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-annotations-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.xml.bind-api-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-core_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-streaming_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-platform_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apps-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-core-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-node-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-rbac-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/logging-interceptor-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/mesos-1.4.0-shaded-protobuf.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/osgi-resource-locator-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kubernetes_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1-tests.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-repackaged-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/htrace-core-3.1.0-incubating.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/istack-commons-runtime-3.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.annotation-api-1.3.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.validation-api-2.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-scalap_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-batch-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib-local_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-jackson_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-common-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-events-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-policy-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-dataformat-yaml-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-datatype-jsr310-2.11.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-metrics-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-common_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-module-scala_2.12-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-discovery-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-format-structures-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-shuffle_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-kerberos-codec-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-app-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-extensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-networking-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-scheduling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-core-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-web-proxy-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-core-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-autoscaling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-flowcontrol-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-collection-compat_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive-thriftserver_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-certificates-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-coordination-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-storageclass-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-parser-combinators_2.12-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apiextensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-shuffle-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-jobclient-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-admissionregistration-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar kkk.WordCount Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/02 20:17:03 INFO SparkContext: Running Spark version 3.2.1 25/06/02 20:17:04 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO SparkContext: Submitted application: WordCount 25/06/02 20:17:04 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/02 20:17:04 INFO ResourceProfile: Limiting resource is cpu 25/06/02 20:17:04 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/02 20:17:04 INFO SecurityManager: Changing view acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing view acls groups to: 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls groups to: 25/06/02 20:17:04 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/06/02 20:17:05 INFO Utils: Successfully started service 'sparkDriver' on port 41615. 25/06/02 20:17:05 INFO SparkEnv: Registering MapOutputTracker 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMaster 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/02 20:17:05 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-68486311-3f69-48e8-8f69-e7afffcf5979 25/06/02 20:17:05 INFO MemoryStore: MemoryStore started with capacity 258.5 MiB 25/06/02 20:17:05 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/02 20:17:06 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/06/02 20:17:06 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://hadoopmaster:4040 25/06/02 20:17:06 INFO Executor: Starting executor ID driver on host hadoopmaster 25/06/02 20:17:06 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41907. 25/06/02 20:17:06 INFO NettyBlockTransferService: Server created on hadoopmaster:41907 25/06/02 20:17:06 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/02 20:17:06 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMasterEndpoint: Registering block manager hadoopmaster:41907 with 258.5 MiB RAM, BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:08 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 244.0 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 23.4 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on hadoopmaster:41907 (size: 23.4 KiB, free: 258.5 MiB) 25/06/02 20:17:09 INFO SparkContext: Created broadcast 0 from textFile at WordCount.scala:9 Exception in thread "main" org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/hadoopmaster/words.txt at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:287) at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:229) at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:315) at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4(Partitioner.scala:78) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4$adapted(Partitioner.scala:78) at scala.collection.immutable.List.map(List.scala:293) at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:78) at org.apache.spark.rdd.PairRDDFunctions.$anonfun$reduceByKey$4(PairRDDFunctions.scala:322) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:414) at org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:322) at kkk.WordCount$.main(WordCount.scala:10) at kkk.WordCount.main(WordCount.scala) 25/06/02 20:17:09 INFO SparkContext: Invoking stop() from shutdown hook 25/06/02 20:17:09 INFO SparkUI: Stopped Spark web UI at http://hadoopmaster:4040 25/06/02 20:17:09 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 25/06/02 20:17:09 INFO MemoryStore: MemoryStore cleared 25/06/02 20:17:09 INFO BlockManager: BlockManager stopped 25/06/02 20:17:09 INFO BlockManagerMaster: BlockManagerMaster stopped 25/06/02 20:17:09 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 25/06/02 20:17:10 INFO SparkContext: Successfully stopped SparkContext 25/06/02 20:17:10 INFO ShutdownHookManager: Shutdown hook called 25/06/02 20:17:10 INFO ShutdownHookManager: Deleting directory /tmp/spark-213e3a91-227c-4e0a-8254-ab5c0f00786d Process finished with exit code 1
06-04
package gs8 import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.{SparkSession, DataFrame} import org.apache.spark.ml.feature.VectorAssembler import org.apache.spark.ml.Pipeline import org.apache.spark.ml.classification.RandomForestClassifier import org.apache.spark.sql.functions.col import java.util.Properties object shujuwaqu2 { def main(args: Array[String]): Unit = { val spark = SparkSession.builder() .appName("RandomForestModel") .master("local[*]") .enableHiveSupport() .getOrCreate() Logger.getLogger("org").setLevel(Level.ERROR) import spark.implicits._ val trainDF = spark.sql("SELECT * FROM dwd.fact_machine_learning_data WHERE machine_record_state IS NOT NULL") .withColumn("machine_record_id", $"machine_record_id".cast("LongType")) val featureCols = Array( "machine_record_mainshaft_speed", "machine_record_mainshaft_multiplerate", "machine_record_mainshaft_load", "machine_record_feed_speed", "machine_record_feed_multiplerate", "machine_record_pmc_code", "machine_record_circle_time", "machine_record_run_time", "machine_record_effective_shaft", "machine_record_amount_process", "machine_record_use_memory", "machine_record_free_memory", "machine_record_amount_use_code", "machine_record_amount_free_code" ) val assembler = new VectorAssembler() .setInputCols(featureCols) .setOutputCol("features") val trainData = assembler.transform(trainDF) .select($"machine_record_state".alias("label"), $"features") val rf = new RandomForestClassifier() .setLabelCol("label") .setFeaturesCol("features") .setNumTrees(20) .setMaxDepth(8) val pipeline = new Pipeline().setStages(Array(rf)) val model = pipeline.fit(trainData) println("随机森林模型训练完成!") val testDF = spark.sql("SELECT * FROM dwd.fact_machine_learning_data_test") .withColumn("machine_record_id", $"machine_record_id".cast("LongType")) val testData = assembler.transform(testDF) .select($"machine_record_id", $"features") val predictions = model.transform(testData) .select($"machine_record_id", $"prediction".alias("machine_record_state")) println("预测完成,前5条结果:") predictions.show(5, truncate = false) val url = "jdbc:mysql://xueai:3306/shtd_industry?useSSL=false&serverTimezone=UTC" val props = new Properties() props.put("user", "root") props.put("password", "admin") props.put("driver", "com.mysql.jdbc.Driver") predictions.write .mode("overwrite") .jdbc(url, "ml_result", props) println("预测结果已成功写入 MySQL 表 ml_result!") println("Hive 中 dwd.fact_machine_learning_data_test 预测完毕。") println("请在 MySQL 中执行以下查询语句查看结果:") println( """ |SELECT * FROM ml_result |WHERE machine_record_id IN (1,8,20,28,36); |""".stripMargin) spark.stop() } } 报错/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=37031:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:41:11 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:41:11 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:41:11 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:41:11 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:41:11 INFO ResourceUtils: ============================================================== 25/10/09 15:41:11 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:41:11 INFO ResourceUtils: ============================================================== 25/10/09 15:41:11 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:41:11 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:41:11 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:41:11 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:41:11 INFO SecurityManager: Changing view acls to: root 25/10/09 15:41:11 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:41:11 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:41:11 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:41:11 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:41:12 INFO Utils: Successfully started service 'sparkDriver' on port 45815. 25/10/09 15:41:12 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:41:12 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:41:12 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:41:12 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:41:12 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:41:12 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-5cf527aa-87b6-4cde-8bd4-fd7658d60a9a 25/10/09 15:41:12 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:41:12 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:41:12 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:41:12 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:41:12 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:41:12 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44405. 25/10/09 15:41:12 INFO NettyBlockTransferService: Server created on 192.168.75.3:44405 25/10/09 15:41:12 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:41:12 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 44405, None) 25/10/09 15:41:12 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:44405 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 44405, None) 25/10/09 15:41:12 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 44405, None) 25/10/09 15:41:12 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 44405, None) 25/10/09 15:41:13 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:41:13 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:41:14 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:41:14 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:41:14 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:41:14 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:41:14 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:41:14 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:41:14 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist Exception in thread "main" org.apache.spark.sql.catalyst.parser.ParseException: DataType longtype is not supported.(line 1, pos 0) == SQL == LongType ^^^ at org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitPrimitiveDataType$1(AstBuilder.scala:2267) at org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:124) at org.apache.spark.sql.catalyst.parser.AstBuilder.visitPrimitiveDataType(AstBuilder.scala:2242) at org.apache.spark.sql.catalyst.parser.AstBuilder.visitPrimitiveDataType(AstBuilder.scala:54) at org.apache.spark.sql.catalyst.parser.SqlBaseParser$PrimitiveDataTypeContext.accept(SqlBaseParser.java:17259) at org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:58) at org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitSingleDataType$1(AstBuilder.scala:98) at org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:124) at org.apache.spark.sql.catalyst.parser.AstBuilder.visitSingleDataType(AstBuilder.scala:98) at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.$anonfun$parseDataType$1(ParseDriver.scala:39) at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:107) at org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parseDataType(ParseDriver.scala:38) at org.apache.spark.sql.Column.cast(Column.scala:1204) at gs8.shujuwaqu2$.main(shujuwaqu2.scala:24) at gs8.shujuwaqu2.main(shujuwaqu2.scala) Process finished with exit code 1
最新发布
10-10
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=35029:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:19:59 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:19:59 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:19:59 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:20:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:20:00 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:20:00 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:20:00 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:20:00 INFO SecurityManager: Changing view acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:20:00 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:20:00 INFO Utils: Successfully started service 'sparkDriver' on port 39167. 25/10/09 15:20:00 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:20:00 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-7824d7b3-541f-4ec6-88e6-298ad263b86a 25/10/09 15:20:00 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:20:00 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:20:01 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:20:01 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:20:01 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:20:01 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33353. 25/10/09 15:20:01 INFO NettyBlockTransferService: Server created on 192.168.75.3:33353 25/10/09 15:20:01 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:20:01 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:33353 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 预测结果已成功写入 MySQL 表 ml_result! Hive 中 dwd.fact_machine_learning_data_test 预测完毕。 请在 MySQL 中执行以下查询语句查看结果: SELECT * FROM ml_result WHERE machine_record_id IN (1,8,20,28,36); +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 代码显示空值
10-10
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=46819:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:31:36 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:31:36 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:31:36 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:31:36 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:31:36 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:31:36 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:31:36 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:31:36 INFO SecurityManager: Changing view acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:31:36 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:31:37 INFO Utils: Successfully started service 'sparkDriver' on port 33909. 25/10/09 15:31:37 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:31:37 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-fc5a25f6-f8d7-41e2-8c52-61cc65b7fc90 25/10/09 15:31:37 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:31:37 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:31:37 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:31:37 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:31:37 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:31:37 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41227. 25/10/09 15:31:37 INFO NettyBlockTransferService: Server created on 192.168.75.3:41227 25/10/09 15:31:37 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:31:37 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:41227 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ |1.4747628E7 |0.0 | |1.4747629E7 |0.0 | |1.474763E7 |0.0 | |1.4747631E7 |0.0 | |1.4747632E7 |0.0 | +-----------------+--------------------+ only showing top 5 rows c
10-10
"C:\Program Files\Java\jdk1.8.0_261\bin\java.exe" "-javaagent:C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2022.2.2\lib\idea_rt.jar=62562:C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2022.2.2\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_261\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_261\jre\lib\rt.jar;D:\yunjisuan\lcy\target\classes;D:\yunjisuan\.m2\repository\org\apache\spark\spark-core_2.11\2.4.6\spark-core_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\com\thoughtworks\paranamer\paranamer\2.8\paranamer-2.8.jar;D:\yunjisuan\.m2\repository\org\apache\avro\avro\1.8.2\avro-1.8.2.jar;D:\yunjisuan\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;D:\yunjisuan\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;D:\yunjisuan\.m2\repository\org\apache\commons\commons-compress\1.8.1\commons-compress-1.8.1.jar;D:\yunjisuan\.m2\repository\org\tukaani\xz\1.5\xz-1.5.jar;D:\yunjisuan\.m2\repository\org\apache\avro\avro-mapred\1.8.2\avro-mapred-1.8.2-hadoop2.jar;D:\yunjisuan\.m2\repository\org\apache\avro\avro-ipc\1.8.2\avro-ipc-1.8.2.jar;D:\yunjisuan\.m2\repository\com\twitter\chill_2.11\0.9.3\chill_2.11-0.9.3.jar;D:\yunjisuan\.m2\repository\com\esotericsoftware\kryo-shaded\4.0.2\kryo-shaded-4.0.2.jar;D:\yunjisuan\.m2\repository\com\esotericsoftware\minlog\1.3.0\minlog-1.3.0.jar;D:\yunjisuan\.m2\repository\org\objenesis\objenesis\2.5.1\objenesis-2.5.1.jar;D:\yunjisuan\.m2\repository\com\twitter\chill-java\0.9.3\chill-java-0.9.3.jar;D:\yunjisuan\.m2\repository\org\apache\xbean\xbean-asm6-shaded\4.8\xbean-asm6-shaded-4.8.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-client\2.6.5\hadoop-client-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-common\2.6.5\hadoop-common-2.6.5.jar;D:\yunjisuan\.m2\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\yunjisuan\.m2\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\yunjisuan\.m2\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\yunjisuan\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;D:\yunjisuan\.m2\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\yunjisuan\.m2\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\yunjisuan\.m2\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;D:\yunjisuan\.m2\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-auth\2.6.5\hadoop-auth-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\httpcomponents\httpclient\4.2.5\httpclient-4.2.5.jar;D:\yunjisuan\.m2\repository\org\apache\httpcomponents\httpcore\4.2.4\httpcore-4.2.4.jar;D:\yunjisuan\.m2\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;D:\yunjisuan\.m2\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;D:\yunjisuan\.m2\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;D:\yunjisuan\.m2\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;D:\yunjisuan\.m2\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;D:\yunjisuan\.m2\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-hdfs\2.6.5\hadoop-hdfs-2.6.5.jar;D:\yunjisuan\.m2\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;D:\yunjisuan\.m2\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;D:\yunjisuan\.m2\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.5\hadoop-mapreduce-client-app-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.5\hadoop-mapreduce-client-common-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-yarn-client\2.6.5\hadoop-yarn-client-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.5\hadoop-yarn-server-common-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.5\hadoop-mapreduce-client-shuffle-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-yarn-api\2.6.5\hadoop-yarn-api-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.5\hadoop-mapreduce-client-core-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-yarn-common\2.6.5\hadoop-yarn-common-2.6.5.jar;D:\yunjisuan\.m2\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;D:\yunjisuan\.m2\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;D:\yunjisuan\.m2\repository\org\codehaus\jackson\jackson-jaxrs\1.9.13\jackson-jaxrs-1.9.13.jar;D:\yunjisuan\.m2\repository\org\codehaus\jackson\jackson-xc\1.9.13\jackson-xc-1.9.13.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.5\hadoop-mapreduce-client-jobclient-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\hadoop\hadoop-annotations\2.6.5\hadoop-annotations-2.6.5.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-launcher_2.11\2.4.6\spark-launcher_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-kvstore_2.11\2.4.6\spark-kvstore_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\yunjisuan\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.6.7\jackson-core-2.6.7.jar;D:\yunjisuan\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.6.7\jackson-annotations-2.6.7.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-network-common_2.11\2.4.6\spark-network-common_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-network-shuffle_2.11\2.4.6\spark-network-shuffle_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-unsafe_2.11\2.4.6\spark-unsafe_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\javax\activation\activation\1.1.1\activation-1.1.1.jar;D:\yunjisuan\.m2\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;D:\yunjisuan\.m2\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;D:\yunjisuan\.m2\repository\com\google\guava\guava\16.0.1\guava-16.0.1.jar;D:\yunjisuan\.m2\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;D:\yunjisuan\.m2\repository\javax\servlet\javax.servlet-api\3.1.0\javax.servlet-api-3.1.0.jar;D:\yunjisuan\.m2\repository\org\apache\commons\commons-lang3\3.5\commons-lang3-3.5.jar;D:\yunjisuan\.m2\repository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;D:\yunjisuan\.m2\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;D:\yunjisuan\.m2\repository\org\slf4j\slf4j-api\1.7.16\slf4j-api-1.7.16.jar;D:\yunjisuan\.m2\repository\org\slf4j\jul-to-slf4j\1.7.16\jul-to-slf4j-1.7.16.jar;D:\yunjisuan\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.16\jcl-over-slf4j-1.7.16.jar;D:\yunjisuan\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;D:\yunjisuan\.m2\repository\org\slf4j\slf4j-log4j12\1.7.16\slf4j-log4j12-1.7.16.jar;D:\yunjisuan\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;D:\yunjisuan\.m2\repository\org\xerial\snappy\snappy-java\1.1.7.5\snappy-java-1.1.7.5.jar;D:\yunjisuan\.m2\repository\org\lz4\lz4-java\1.4.0\lz4-java-1.4.0.jar;D:\yunjisuan\.m2\repository\com\github\luben\zstd-jni\1.3.2-2\zstd-jni-1.3.2-2.jar;D:\yunjisuan\.m2\repository\org\roaringbitmap\RoaringBitmap\0.7.45\RoaringBitmap-0.7.45.jar;D:\yunjisuan\.m2\repository\org\roaringbitmap\shims\0.7.45\shims-0.7.45.jar;D:\yunjisuan\.m2\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\yunjisuan\.m2\repository\org\json4s\json4s-jackson_2.11\3.5.3\json4s-jackson_2.11-3.5.3.jar;D:\yunjisuan\.m2\repository\org\json4s\json4s-core_2.11\3.5.3\json4s-core_2.11-3.5.3.jar;D:\yunjisuan\.m2\repository\org\json4s\json4s-ast_2.11\3.5.3\json4s-ast_2.11-3.5.3.jar;D:\yunjisuan\.m2\repository\org\json4s\json4s-scalap_2.11\3.5.3\json4s-scalap_2.11-3.5.3.jar;D:\yunjisuan\.m2\repository\org\scala-lang\modules\scala-xml_2.11\1.0.6\scala-xml_2.11-1.0.6.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\core\jersey-client\2.22.2\jersey-client-2.22.2.jar;D:\yunjisuan\.m2\repository\javax\ws\rs\javax.ws.rs-api\2.0.1\javax.ws.rs-api-2.0.1.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\hk2-api\2.4.0-b34\hk2-api-2.4.0-b34.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\hk2-utils\2.4.0-b34\hk2-utils-2.4.0-b34.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\external\aopalliance-repackaged\2.4.0-b34\aopalliance-repackaged-2.4.0-b34.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\external\javax.inject\2.4.0-b34\javax.inject-2.4.0-b34.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\hk2-locator\2.4.0-b34\hk2-locator-2.4.0-b34.jar;D:\yunjisuan\.m2\repository\org\javassist\javassist\3.18.1-GA\javassist-3.18.1-GA.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\core\jersey-common\2.22.2\jersey-common-2.22.2.jar;D:\yunjisuan\.m2\repository\javax\annotation\javax.annotation-api\1.2\javax.annotation-api-1.2.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\bundles\repackaged\jersey-guava\2.22.2\jersey-guava-2.22.2.jar;D:\yunjisuan\.m2\repository\org\glassfish\hk2\osgi-resource-locator\1.0.1\osgi-resource-locator-1.0.1.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\core\jersey-server\2.22.2\jersey-server-2.22.2.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\media\jersey-media-jaxb\2.22.2\jersey-media-jaxb-2.22.2.jar;D:\yunjisuan\.m2\repository\javax\validation\validation-api\1.1.0.Final\validation-api-1.1.0.Final.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet\2.22.2\jersey-container-servlet-2.22.2.jar;D:\yunjisuan\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet-core\2.22.2\jersey-container-servlet-core-2.22.2.jar;D:\yunjisuan\.m2\repository\io\netty\netty-all\4.1.47.Final\netty-all-4.1.47.Final.jar;D:\yunjisuan\.m2\repository\io\netty\netty\3.9.9.Final\netty-3.9.9.Final.jar;D:\yunjisuan\.m2\repository\com\clearspring\analytics\stream\2.7.0\stream-2.7.0.jar;D:\yunjisuan\.m2\repository\io\dropwizard\metrics\metrics-core\3.1.5\metrics-core-3.1.5.jar;D:\yunjisuan\.m2\repository\io\dropwizard\metrics\metrics-jvm\3.1.5\metrics-jvm-3.1.5.jar;D:\yunjisuan\.m2\repository\io\dropwizard\metrics\metrics-json\3.1.5\metrics-json-3.1.5.jar;D:\yunjisuan\.m2\repository\io\dropwizard\metrics\metrics-graphite\3.1.5\metrics-graphite-3.1.5.jar;D:\yunjisuan\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.6.7.3\jackson-databind-2.6.7.3.jar;D:\yunjisuan\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.11\2.6.7.1\jackson-module-scala_2.11-2.6.7.1.jar;D:\yunjisuan\.m2\repository\org\scala-lang\scala-reflect\2.11.8\scala-reflect-2.11.8.jar;D:\yunjisuan\.m2\repository\com\fasterxml\jackson\module\jackson-module-paranamer\2.7.9\jackson-module-paranamer-2.7.9.jar;D:\yunjisuan\.m2\repository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;D:\yunjisuan\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;D:\yunjisuan\.m2\repository\net\razorvine\pyrolite\4.13\pyrolite-4.13.jar;D:\yunjisuan\.m2\repository\net\sf\py4j\py4j\0.10.7\py4j-0.10.7.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-tags_2.11\2.4.6\spark-tags_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\commons\commons-crypto\1.0.0\commons-crypto-1.0.0.jar;D:\yunjisuan\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-mllib_2.11\2.4.6\spark-mllib_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\scala-lang\modules\scala-parser-combinators_2.11\1.1.0\scala-parser-combinators_2.11-1.1.0.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-streaming_2.11\2.4.6\spark-streaming_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-graphx_2.11\2.4.6\spark-graphx_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\com\github\fommil\netlib\core\1.1.2\core-1.1.2.jar;D:\yunjisuan\.m2\repository\net\sourceforge\f2j\arpack_combined_all\0.1\arpack_combined_all-0.1.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-mllib-local_2.11\2.4.6\spark-mllib-local_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\scalanlp\breeze_2.11\0.13.2\breeze_2.11-0.13.2.jar;D:\yunjisuan\.m2\repository\org\scalanlp\breeze-macros_2.11\0.13.2\breeze-macros_2.11-0.13.2.jar;D:\yunjisuan\.m2\repository\net\sf\opencsv\opencsv\2.3\opencsv-2.3.jar;D:\yunjisuan\.m2\repository\com\github\rwl\jtransforms\2.4.0\jtransforms-2.4.0.jar;D:\yunjisuan\.m2\repository\org\spire-math\spire_2.11\0.13.0\spire_2.11-0.13.0.jar;D:\yunjisuan\.m2\repository\org\spire-math\spire-macros_2.11\0.13.0\spire-macros_2.11-0.13.0.jar;D:\yunjisuan\.m2\repository\org\typelevel\machinist_2.11\0.6.1\machinist_2.11-0.6.1.jar;D:\yunjisuan\.m2\repository\com\chuusai\shapeless_2.11\2.3.2\shapeless_2.11-2.3.2.jar;D:\yunjisuan\.m2\repository\org\typelevel\macro-compat_2.11\1.1.1\macro-compat_2.11-1.1.1.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-sql_2.11\2.4.6\spark-sql_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\com\univocity\univocity-parsers\2.7.3\univocity-parsers-2.7.3.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-sketch_2.11\2.4.6\spark-sketch_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\apache\spark\spark-catalyst_2.11\2.4.6\spark-catalyst_2.11-2.4.6.jar;D:\yunjisuan\.m2\repository\org\codehaus\janino\janino\3.0.16\janino-3.0.16.jar;D:\yunjisuan\.m2\repository\org\codehaus\janino\commons-compiler\3.0.16\commons-compiler-3.0.16.jar;D:\yunjisuan\.m2\repository\org\antlr\antlr4-runtime\4.7\antlr4-runtime-4.7.jar;D:\yunjisuan\.m2\repository\org\apache\orc\orc-core\1.5.5\orc-core-1.5.5-nohive.jar;D:\yunjisuan\.m2\repository\org\apache\orc\orc-shims\1.5.5\orc-shims-1.5.5.jar;D:\yunjisuan\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;D:\yunjisuan\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\yunjisuan\.m2\repository\io\airlift\aircompressor\0.10\aircompressor-0.10.jar;D:\yunjisuan\.m2\repository\org\apache\orc\orc-mapreduce\1.5.5\orc-mapreduce-1.5.5-nohive.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-column\1.10.1\parquet-column-1.10.1.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-common\1.10.1\parquet-common-1.10.1.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-encoding\1.10.1\parquet-encoding-1.10.1.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-hadoop\1.10.1\parquet-hadoop-1.10.1.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-format\2.4.0\parquet-format-2.4.0.jar;D:\yunjisuan\.m2\repository\org\apache\parquet\parquet-jackson\1.10.1\parquet-jackson-1.10.1.jar;D:\yunjisuan\.m2\repository\org\apache\arrow\arrow-vector\0.10.0\arrow-vector-0.10.0.jar;D:\yunjisuan\.m2\repository\org\apache\arrow\arrow-format\0.10.0\arrow-format-0.10.0.jar;D:\yunjisuan\.m2\repository\org\apache\arrow\arrow-memory\0.10.0\arrow-memory-0.10.0.jar;D:\yunjisuan\.m2\repository\joda-time\joda-time\2.9.9\joda-time-2.9.9.jar;D:\yunjisuan\.m2\repository\com\carrotsearch\hppc\0.7.2\hppc-0.7.2.jar;D:\yunjisuan\.m2\repository\com\vlkan\flatbuffers\1.2.0-3f79e055\flatbuffers-1.2.0-3f79e055.jar;D:\yunjisuan\.m2\repository\org\apache\maven\plugins\maven-assembly-plugin\3.0.0\maven-assembly-plugin-3.0.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-plugin-api\3.0\maven-plugin-api-3.0.jar;D:\yunjisuan\.m2\repository\org\sonatype\sisu\sisu-inject-plexus\1.4.2\sisu-inject-plexus-1.4.2.jar;D:\yunjisuan\.m2\repository\org\sonatype\sisu\sisu-inject-bean\1.4.2\sisu-inject-bean-1.4.2.jar;D:\yunjisuan\.m2\repository\org\sonatype\sisu\sisu-guice\2.1.7\sisu-guice-2.1.7-noaop.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-core\3.0\maven-core-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-settings\3.0\maven-settings-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-settings-builder\3.0\maven-settings-builder-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-repository-metadata\3.0\maven-repository-metadata-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-model-builder\3.0\maven-model-builder-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-aether-provider\3.0\maven-aether-provider-3.0.jar;D:\yunjisuan\.m2\repository\org\sonatype\aether\aether-impl\1.7\aether-impl-1.7.jar;D:\yunjisuan\.m2\repository\org\sonatype\aether\aether-spi\1.7\aether-spi-1.7.jar;D:\yunjisuan\.m2\repository\org\sonatype\aether\aether-api\1.7\aether-api-1.7.jar;D:\yunjisuan\.m2\repository\org\sonatype\aether\aether-util\1.7\aether-util-1.7.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-classworlds\2.2.3\plexus-classworlds-2.2.3.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-component-annotations\1.5.5\plexus-component-annotations-1.5.5.jar;D:\yunjisuan\.m2\repository\org\sonatype\plexus\plexus-sec-dispatcher\1.3\plexus-sec-dispatcher-1.3.jar;D:\yunjisuan\.m2\repository\org\sonatype\plexus\plexus-cipher\1.4\plexus-cipher-1.4.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-artifact\3.0\maven-artifact-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-model\3.0\maven-model-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\maven-common-artifact-filters\3.0.1\maven-common-artifact-filters-3.0.1.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\maven-shared-utils\3.1.0\maven-shared-utils-3.1.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\maven-artifact-transfer\0.9.0\maven-artifact-transfer-0.9.0.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-interpolation\1.24\plexus-interpolation-1.24.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-archiver\3.4\plexus-archiver-3.4.jar;D:\yunjisuan\.m2\repository\org\iq80\snappy\snappy\0.4\snappy-0.4.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\file-management\3.0.0\file-management-3.0.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\maven-shared-io\3.0.0\maven-shared-io-3.0.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-compat\3.0\maven-compat-3.0.jar;D:\yunjisuan\.m2\repository\org\apache\maven\wagon\wagon-provider-api\2.10\wagon-provider-api-2.10.jar;D:\yunjisuan\.m2\repository\commons-io\commons-io\2.5\commons-io-2.5.jar;D:\yunjisuan\.m2\repository\org\apache\maven\shared\maven-filtering\3.1.1\maven-filtering-3.1.1.jar;D:\yunjisuan\.m2\repository\org\sonatype\plexus\plexus-build-api\0.0.7\plexus-build-api-0.0.7.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-io\2.7.1\plexus-io-2.7.1.jar;D:\yunjisuan\.m2\repository\org\apache\maven\maven-archiver\3.1.1\maven-archiver-3.1.1.jar;D:\yunjisuan\.m2\repository\org\codehaus\plexus\plexus-utils\3.0.24\plexus-utils-3.0.24.jar;D:\yunjisuan\.m2\repository\commons-codec\commons-codec\1.6\commons-codec-1.6.jar;D:\yunjisuan\.m2\repository\org\scala-lang\scala-library\2.11.12\scala-library-2.11.12.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-actors-2.11.0.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-actors-migration_2.11-1.1.0.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-library.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-parser-combinators_2.11-1.0.4.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-reflect.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-swing_2.11-1.0.2.jar;D:\yunjisuan\scala-2.11.12\scala-2.11.12\lib\scala-xml_2.11-1.0.5.jar" wazi.waye Exception in thread "main" org.apache.spark.sql.AnalysisException: cannot resolve '`date`' given input columns: [货号, 规格, 日期, 出库];; 'Project [货号#10, 规格#11, 日期#12, 出库#13, to_date('date, Some(yyyy-MM-dd)) AS date#18] +- Relation[货号#10,规格#11,日期#12,出库#13] csv at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$3.applyOrElse(CheckAnalysis.scala:111) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$3.applyOrElse(CheckAnalysis.scala:108) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:279) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:93) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:93) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:105) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:105) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69) at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpression$1(QueryPlan.scala:104) at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:116) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$2.apply(QueryPlan.scala:121) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.immutable.List.foreach(List.scala:392) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.immutable.List.map(List.scala:296) at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:121) at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$2.apply(QueryPlan.scala:126) at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) at org.apache.spark.sql.catalyst.plans.QueryPlan.mapExpressions(QueryPlan.scala:126) at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsUp(QueryPlan.scala:93) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:108) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:86) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:126) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:86) at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:95) at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:108) at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201) at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:58) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:56) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:48) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:3412) at org.apache.spark.sql.Dataset.select(Dataset.scala:1340) at org.apache.spark.sql.Dataset.withColumns(Dataset.scala:2258) at org.apache.spark.sql.Dataset.withColumn(Dataset.scala:2225) at wazi.waye$.main(waye.scala:23) at wazi.waye.main(waye.scala) 进程已结束,退出代码1
05-25
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值