hadoop-spark 内存测试-version 3.1.1

1.

Welcome to
      ____              __
     / __/__  ___ _____/ /__
    _\ \/ _ \/ _ `/ __/  '_/
   /___/ .__/\_,_/_/ /_/\_\   version 3.1.1
      /_/

Using Scala version 2.12.10 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_231)
Type in expressions to have them evaluated.
Type :help for more information.
 

3个节点  8gb(主节点)+4gb(1)+4gb(2)

[root@master huangtest]# cat 1gfortest.scala
import org.apache.spark.sql.SparkSession
import java.lang.management.ManagementFactory

object MemoryConsumer {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("MemoryConsumer")
      .config("spark.driver.memory", "2g")  // 预留额外内存给Spark系统
      .getOrCreate()

    // 内存监控工具
    val mxBean = ManagementFactory.getMemoryMXBean
    val runtime = Runtime.getRuntime

    // 目标消耗1GB内存 (实际会略超)
    val targetBytes = 1024L * 1024 * 1024
    val chunkSize = 100 * 1024 * 1024  // 每次分配100MB
    var totalAllocated = 0L
    val memoryHog = scala.collection.mutable.ArrayBuffer[Array[Byte]]()

    println(s"=== 开始内存消耗测试 ===")
    println(s"JVM最大内存: ${runtime.maxMemory() / 1024 / 1024}MB")
    println(s"初始堆内存: ${mxBean.getHeapMemoryUsage.getUsed / 1024 / 1024}MB")
    println(s"初始非堆内存: ${mxBean.getNonHeapMemoryUsage.getUsed / 1024 / 1024}MB")

    try {
      while (totalAllocated < targetBytes) {
        // 分配字节数组并填充数据
        val chunk = new Array[Byte](chunkSize)
        java.util.Arrays.fill(chunk, 1.toByte)  // 确保实际占用物理内存
        memoryHog += chunk
        totalAllocated += chunkSize

        // 打印当前状态
        val heapUsed = mxBean.getHeapMemoryUsage.getUsed
        val nonHeapUsed = mxBean.getNonHeapMemoryUsage.getUsed
        println(f"[进度] 已分配: ${totalAllocated / 1024 / 1024}%4dMB | " +
                f"堆内存: ${heapUsed / 1024 / 1024}%4dMB | " +
                f"非堆: ${nonHeapUsed / 1024 / 1024}%3dMB | " +
                f"总RSS: ${getProcessRSS / 1024}%5dMB")

        Thread.sleep(500)  // 放慢速度观察
      }
      println("=== 测试成功完成 ===")
    } catch {
      case e: OutOfMemoryError =>
        println(s"!!! 内存溢出 !!! 最终分配: ${totalAllocated / 1024 / 1024}MB")
    } finally {
      spark.stop()
    }
  }

  // 获取进程实际物理内存使用(RSS)
  def getProcessRSS: Long = {
    val pid = ManagementFactory.getRuntimeMXBean.getName.split("@")(0)
    val rss = scala.io.Source.fromFile(s"/proc/$pid/statm").mkString.split(" ")(1)
    rss.toLong * 4096  // 转换为字节
  }
}

// 执行程序
MemoryConsumer.main(Array.empty)
[root@master huangtest]#

2.

修改

spark-shell --driver-memory 2g --conf spark.driver.maxResultSize=1g

:load /home/huangtest/1gfortest.scala

3.测试结果

scala> :load /home/huangtest/1gfortest.scala
Loading /home/huangtest/1gfortest.scala...
import org.apache.spark.sql.SparkSession
import java.lang.management.ManagementFactory
defined object MemoryConsumer
2025-08-06 15:20:13,922 WARN sql.SparkSession$Builder: Using an existing SparkSession; some spark core configurations may not take effect.
=== 开始内存消耗测试 ===
JVM最大内存: 1820MB
初始堆内存: 188MB
初始非堆内存: 137MB
[进度] 已分配:  100MB | 堆内存:  288MB | 非堆: 136MB | 总RSS: 682240MB
[进度] 已分配:  200MB | 堆内存:  299MB | 非堆: 136MB | 总RSS: 841036MB
[进度] 已分配:  300MB | 堆内存:  404MB | 非堆: 136MB | 总RSS: 841036MB
[进度] 已分配:  400MB | 堆内存:  491MB | 非堆: 136MB | 总RSS: 1043976MB
[进度] 已分配:  500MB | 堆内存:  596MB | 非堆: 135MB | 总RSS: 1043976MB
[进度] 已分配:  600MB | 堆内存:  691MB | 非堆: 135MB | 总RSS: 1248968MB
[进度] 已分配:  700MB | 堆内存:  797MB | 非堆: 135MB | 总RSS: 1248968MB
[进度] 已分配:  800MB | 堆内存:  892MB | 非堆: 135MB | 总RSS: 1461948MB
[进度] 已分配:  900MB | 堆内存: 1001MB | 非堆: 134MB | 总RSS: 1454624MB
[进度] 已分配: 1000MB | 堆内存: 1091MB | 非堆: 134MB | 总RSS: 1659492MB
[进度] 已分配: 1100MB | 堆内存: 1198MB | 非堆: 134MB | 总RSS: 1659492MB
=== 测试成功完成 ===

2.1测试内存的分配

[root@master huangtest]# cat detailMemory2.scala
import org.apache.spark.sql.SparkSession
import java.lang.management.{ManagementFactory, MemoryPoolMXBean, MemoryType, MemoryMXBean}
import scala.collection.JavaConverters._

object DetailedMemoryTracker {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("DetailedMemoryTracker")
      .config("spark.driver.memory", "2g")
      .config("spark.driver.extraJavaOptions",
               "-XX:+PrintGCDetails -XX:+PrintGCTimeStamps")
      .getOrCreate()

    // 获取内存子系统组件
    val mxBean = ManagementFactory.getMemoryMXBean
    val memoryPools = ManagementFactory.getMemoryPoolMXBeans.asScala
    val runtime = Runtime.getRuntime

    // 打印初始内存状态
    printInitialMemoryStats(mxBean, memoryPools, runtime)

    // 定义内存分配测试
    val targetMB = 500  // 目标分配500MB
    val chunkSize = 50 * 1024 * 1024  // 每次50MB
    val memoryHog = scala.collection.mutable.ArrayBuffer[Array[Byte]]()

    println("\n=== 开始内存分配跟踪 ===")
    try {
      (1 to (targetMB / 50)).foreach { i =>
        val chunk = new Array[Byte](chunkSize)
        java.util.Arrays.fill(chunk, 1.toByte)
        memoryHog += chunk

        println(s"\n[分配批次 $i] 已分配 ${i * 50}MB")
        printCurrentMemoryStats(mxBean, memoryPools, runtime)

        Thread.sleep(1000)  // 间隔1秒便于观察
      }
      println("=== 内存分配测试完成 ===")
    } catch {
      case e: OutOfMemoryError =>
        println(s"\n!!! 内存溢出 !!! 错误: ${e.getMessage}")
        println("当前内存状态:")
        printCurrentMemoryStats(mxBean, memoryPools, runtime)
    } finally {
      spark.stop()
    }
  }

  // 打印初始内存状态
  def printInitialMemoryStats(
    mxBean: MemoryMXBean,
    pools: scala.collection.Seq[MemoryPoolMXBean],
    runtime: Runtime
  ): Unit = {
    println("\n=== 初始内存状态 ===")
    println(s"JVM最大内存(Xmx): ${runtime.maxMemory() / 1024 / 1024}MB")
    println(s"总物理内存: ${runtime.totalMemory() / 1024 / 1024}MB")
    println(s"空闲内存: ${runtime.freeMemory() / 1024 / 1024}MB")

    val heapUsage = mxBean.getHeapMemoryUsage
    println("\n[堆内存初始状态]")
    println(s"初始提交: ${heapUsage.getCommitted / 1024 / 1024}MB")
    println(s"初始使用: ${heapUsage.getUsed / 1024 / 1024}MB")
    println(s"最大容量: ${heapUsage.getMax / 1024 / 1024}MB")

    val nonHeapUsage = mxBean.getNonHeapMemoryUsage
    println("\n[非堆内存初始状态]")
    println(s"初始提交: ${nonHeapUsage.getCommitted / 1024 / 1024}MB")
    println(s"初始使用: ${nonHeapUsage.getUsed / 1024 / 1024}MB")

    println("\n[各内存池详情]")
    pools.foreach { pool =>
      val usage = pool.getUsage
      val poolType = if (pool.getType == MemoryType.HEAP) "堆" else "非堆"
      println(s"${pool.getName} ($poolType):")
      println(s"  提交: ${usage.getCommitted / 1024 / 1024}MB")
      println(s"  使用: ${usage.getUsed / 1024 / 1024}MB")
      if (pool.getType == MemoryType.HEAP) {
        println(s"  最大: ${usage.getMax / 1024 / 1024}MB")
      }
    }
  }

  // 打印当前内存状态
  def printCurrentMemoryStats(
    mxBean: MemoryMXBean,
    pools: scala.collection.Seq[MemoryPoolMXBean],
    runtime: Runtime
  ): Unit = {
    println("\n[当前内存概览]")
    println(s"总分配: ${(runtime.totalMemory() - runtime.freeMemory()) / 1024 / 1024}MB")
    println(s"空闲内存: ${runtime.freeMemory() / 1024 / 1024}MB")

    val heapUsage = mxBean.getHeapMemoryUsage
    println("\n[堆内存变化]")
    println(s"当前使用: ${heapUsage.getUsed / 1024 / 1024}MB")
    println(s"提交大小: ${heapUsage.getCommitted / 1024 / 1024}MB")

    println("\n[各内存池变化]")
    pools.foreach { pool =>
      val usage = pool.getUsage
      val usedMB = usage.getUsed / 1024 / 1024
      if (usedMB > 0) {
        println(s"${pool.getName}: ${usedMB}MB (${pool.getType})")
      }
    }

    // 打印GC信息
    ManagementFactory.getGarbageCollectorMXBeans.asScala.foreach { gc =>
      println(s"GC[${gc.getName}]: 次数=${gc.getCollectionCount} 耗时=${gc.getCollectionTime}ms")
    }

    // 打印进程物理内存
    println(s"\n[物理内存] RSS: ${getProcessRSS / 1024 / 1024}MB")
  }

  // 获取进程RSS内存 (Linux)
  def getProcessRSS: Long = {
    try {
      val pid = ManagementFactory.getRuntimeMXBean.getName.split("@")(0)
      scala.io.Source.fromFile(s"/proc/$pid/status")
        .getLines()
        .find(_.startsWith("VmRSS:"))
        .map(_.split("\\s+").tail.head.toLong * 1024)
        .getOrElse(0L)
    } catch {
      case _: Exception => 0L
    }
  }
}

// 执行程序
DetailedMemoryTracker.main(Array.empty)
[root@master huangtest]#

执行部分的

[root@master huangtest]# pwd
/home/huangtest

执行效果

scala> :load /home/huangtest/detailMemory2.scala
Loading /home/huangtest/detailMemory2.scala...
import org.apache.spark.sql.SparkSession
import java.lang.management.{ManagementFactory, MemoryPoolMXBean, MemoryType, MemoryMXBean}
import scala.collection.JavaConverters._
defined object DetailedMemoryTracker
2025-08-06 15:50:45,460 WARN sql.SparkSession$Builder: Using an existing SparkSession; some spark core configurations may not take effect.

=== 初始内存状态 ===
JVM最大内存(Xmx): 910MB
总物理内存: 438MB
空闲内存: 157MB

[堆内存初始状态]
初始提交: 438MB
初始使用: 281MB
最大容量: 910MB

[非堆内存初始状态]
初始提交: 154MB
初始使用: 146MB

[各内存池详情]
Code Cache (非堆):
  提交: 48MB
  使用: 48MB
Metaspace (非堆):
  提交: 94MB
  使用: 87MB
Compressed Class Space (非堆):
  提交: 11MB
  使用: 10MB
PS Eden Space (堆):
  提交: 230MB
  使用: 187MB
  最大: 287MB
PS Survivor Space (堆):
  提交: 23MB
  使用: 18MB
  最大: 23MB
PS Old Gen (堆):
  提交: 185MB
  使用: 75MB
  最大: 683MB

=== 开始内存分配跟踪 ===

[分配批次 1] 已分配 50MB

[当前内存概览]
总分配: 163MB
空闲内存: 281MB

[堆内存变化]
当前使用: 163MB
提交大小: 445MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 55MB (Heap memory)
PS Survivor Space: 30MB (Heap memory)
PS Old Gen: 77MB (Heap memory)
GC[PS Scavenge]: 次数=15 耗时=677ms
GC[PS MarkSweep]: 次数=3 耗时=1034ms

[物理内存] RSS: 670MB

[分配批次 2] 已分配 100MB

[当前内存概览]
总分配: 215MB
空闲内存: 230MB

[堆内存变化]
当前使用: 215MB
提交大小: 445MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 107MB (Heap memory)
PS Survivor Space: 30MB (Heap memory)
PS Old Gen: 77MB (Heap memory)
GC[PS Scavenge]: 次数=15 耗时=677ms
GC[PS MarkSweep]: 次数=3 耗时=1034ms

[物理内存] RSS: 670MB

[分配批次 3] 已分配 150MB

[当前内存概览]
总分配: 265MB
空闲内存: 180MB

[堆内存变化]
当前使用: 265MB
提交大小: 445MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 157MB (Heap memory)
PS Survivor Space: 30MB (Heap memory)
PS Old Gen: 77MB (Heap memory)
GC[PS Scavenge]: 次数=15 耗时=677ms
GC[PS MarkSweep]: 次数=3 耗时=1034ms

[物理内存] RSS: 670MB

[分配批次 4] 已分配 200MB

[当前内存概览]
总分配: 318MB
空闲内存: 127MB

[堆内存变化]
当前使用: 318MB
提交大小: 445MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 210MB (Heap memory)
PS Survivor Space: 30MB (Heap memory)
PS Old Gen: 77MB (Heap memory)
GC[PS Scavenge]: 次数=15 耗时=677ms
GC[PS MarkSweep]: 次数=3 耗时=1034ms

[物理内存] RSS: 670MB

[分配批次 5] 已分配 250MB

[当前内存概览]
总分配: 350MB
空闲内存: 429MB

[堆内存变化]
当前使用: 350MB
提交大小: 780MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 56MB (Heap memory)
PS Survivor Space: 8MB (Heap memory)
PS Old Gen: 285MB (Heap memory)
GC[PS Scavenge]: 次数=16 耗时=1452ms
GC[PS MarkSweep]: 次数=4 耗时=2135ms

[物理内存] RSS: 883MB

[分配批次 6] 已分配 300MB

[当前内存概览]
总分配: 400MB
空闲内存: 379MB

[堆内存变化]
当前使用: 400MB
提交大小: 780MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 106MB (Heap memory)
PS Survivor Space: 8MB (Heap memory)
PS Old Gen: 285MB (Heap memory)
GC[PS Scavenge]: 次数=16 耗时=1452ms
GC[PS MarkSweep]: 次数=4 耗时=2135ms

[物理内存] RSS: 883MB

[分配批次 7] 已分配 350MB

[当前内存概览]
总分配: 450MB
空闲内存: 329MB

[堆内存变化]
当前使用: 450MB
提交大小: 780MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 156MB (Heap memory)
PS Survivor Space: 8MB (Heap memory)
PS Old Gen: 285MB (Heap memory)
GC[PS Scavenge]: 次数=16 耗时=1452ms
GC[PS MarkSweep]: 次数=4 耗时=2135ms

[物理内存] RSS: 883MB

[分配批次 8] 已分配 400MB

[当前内存概览]
总分配: 500MB
空闲内存: 279MB

[堆内存变化]
当前使用: 500MB
提交大小: 780MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 206MB (Heap memory)
PS Survivor Space: 8MB (Heap memory)
PS Old Gen: 285MB (Heap memory)
GC[PS Scavenge]: 次数=16 耗时=1452ms
GC[PS MarkSweep]: 次数=4 耗时=2135ms

[物理内存] RSS: 868MB

[分配批次 9] 已分配 450MB

[当前内存概览]
总分配: 550MB
空闲内存: 229MB

[堆内存变化]
当前使用: 550MB
提交大小: 780MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 256MB (Heap memory)
PS Survivor Space: 8MB (Heap memory)
PS Old Gen: 285MB (Heap memory)
GC[PS Scavenge]: 次数=16 耗时=1452ms
GC[PS MarkSweep]: 次数=4 耗时=2135ms

[物理内存] RSS: 873MB

[分配批次 10] 已分配 500MB

[当前内存概览]
总分配: 602MB
空闲内存: 377MB

[堆内存变化]
当前使用: 602MB
提交大小: 979MB

[各内存池变化]
Code Cache: 48MB (Non-heap memory)
Metaspace: 87MB (Non-heap memory)
Compressed Class Space: 10MB (Non-heap memory)
PS Eden Space: 58MB (Heap memory)
PS Old Gen: 544MB (Heap memory)
GC[PS Scavenge]: 次数=17 耗时=2795ms
GC[PS MarkSweep]: 次数=5 耗时=2565ms

[物理内存] RSS: 1146MB
=== 内存分配测试完成 ===

scala>

"C:\Program Files\Java\jdk1.8.0_281\bin\java.exe" "-javaagent:D:\新建文件夹 (2)\IDEA\idea\IntelliJ IDEA 2019.3.3\lib\idea_rt.jar=59342" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_281\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\rt.jar;D:\carspark\out\production\carspark;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\jars\scala-library-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-reflect\jars\scala-reflect-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\srcs\scala-library-2.12.10-sources.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\accessors-smart-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\activation-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aircompressor-0.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\algebra_2.12-2.0.0-M2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr-runtime-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr4-runtime-4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-repackaged-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arpack_combined_all-0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-format-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-core-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-netty-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\audience-annotations-0.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\automaton-1.11-8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-ipc-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-mapred-1.8.2-hadoop2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\bonecp-0.8.0.RELEASE.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze-macros_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\cats-kernel_2.12-2.0.0-M4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill-java-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill_2.12-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-beanutils-1.9.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-cli-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-codec-1.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-collections-3.2.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compiler-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compress-1.20.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-configuration2-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-crypto-1.1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-daemon-1.0.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-dbcp-1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-httpclient-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-io-2.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang3-3.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-logging-1.1.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-math3-3.4.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-net-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-pool-1.5.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-text-1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\compress-lzf-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\core-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-client-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-framework-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-recipes-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-api-jdo-4.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-core-4.1.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-rdbms-4.1.19.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\derby-10.12.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dnsjava-2.1.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ehcache-3.3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\flatbuffers-java-1.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\generex-1.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\geronimo-jcache_1.0_spec-1.0-alpha-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\gson-2.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guava-14.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-servlet-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-annotations-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-auth-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-hdfs-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-core-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-jobclient-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-api-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-registry-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-web-proxy-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\HikariCP-2.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-beeline-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-cli-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-exec-2.3.7-core.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-jdbc-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-llap-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-metastore-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-serde-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-service-rpc-3.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-0.23-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-scheduler-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-storage-api-2.7.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-vector-code-gen-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-api-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-locator-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-utils-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\htrace-core4-4.1.0-incubating.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpclient-4.5.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpcore-4.4.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\istack-commons-runtime-3.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ivy-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-databind-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-dataformat-yaml-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-datatype-jsr310-2.11.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-base-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-json-provider-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-mapper-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-jaxb-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-paranamer-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-scala_2.12-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.activation-api-1.2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.annotation-api-1.3.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.inject-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.servlet-api-4.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.validation-api-2.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.ws.rs-api-2.1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.xml.bind-api-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\janino-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javassist-3.25.0-GA.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.inject-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.jdo-3.2.0-m3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javolution-5.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-api-2.2.11.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-runtime-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcip-annotations-1.0-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcl-over-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jdo-api-3.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-client-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-common-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-core-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-hk2-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-media-jaxb-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-server-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JLargeArrays-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jline-2.14.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\joda-time-2.10.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jodd-core-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jpam-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-smart-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-ast_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-core_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-jackson_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-scalap_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsp-api-2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsr305-3.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jta-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JTransforms-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jul-to-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-admin-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-client-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-common-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-core-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-crypto-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-identity-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-server-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-simplekdc-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-asn1-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-config-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-pkix-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-xdr-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kryo-shaded-4.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-client-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-admissionregistration-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apiextensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apps-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-autoscaling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-batch-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-certificates-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-common-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-coordination-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-core-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-discovery-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-events-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-extensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-metrics-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-networking-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-policy-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-rbac-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-scheduling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-settings-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-storageclass-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\leveldbjni-all-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libfb303-0.9.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libthrift-0.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\log4j-1.2.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\logging-interceptor-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\lz4-java-1.7.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\machinist_2.12-0.6.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\macro-compat_2.12-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\mesos-1.4.0-shaded-protobuf.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-core-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-graphite-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jmx-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-json-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jvm-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\minlog-1.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\netty-all-4.1.51.Final.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\nimbus-jose-jwt-4.41.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\objenesis-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-2.7.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okio-1.14.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\opencsv-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-core-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-mapreduce-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-shims-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\oro-2.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\osgi-resource-locator-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\paranamer-2.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-column-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-common-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-encoding-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-format-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-hadoop-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-jackson-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\protobuf-java-2.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\py4j-0.10.9.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\pyrolite-4.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\re2j-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\RoaringBitmap-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-collection-compat_2.12-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-compiler-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-library-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-parser-combinators_2.12-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-reflect-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-xml_2.12-1.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shapeless_2.12-2.3.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shims-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-api-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-log4j12-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snakeyaml-1.24.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snappy-java-1.1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-catalyst_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-core_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-graphx_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive-thriftserver_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kubernetes_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kvstore_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-launcher_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mesos_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib-local_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-common_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-shuffle_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-repl_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sketch_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sql_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-streaming_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1-tests.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-unsafe_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-yarn_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-macros_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-platform_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-util_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ST4-4.0.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax-api-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax2-api-3.1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stream-2.9.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\super-csv-2.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\threeten-extra-1.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\token-provider-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\transaction-api-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\univocity-parsers-2.9.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\velocity-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\woodstox-core-5.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xbean-asm7-shaded-4.15.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xz-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zjsonpatch-0.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zookeeper-3.4.14.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zstd-jni-1.4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-vector-2.0.0.jar" car.LoadModelRideHailing Using Spark&#39;s default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/08 17:05:07 INFO SparkContext: Running Spark version 3.1.1 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO SparkContext: Submitted application: LoadModelRideHailing 25/06/08 17:05:07 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/08 17:05:07 INFO ResourceProfile: Limiting resource is cpu 25/06/08 17:05:07 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/08 17:05:07 INFO SecurityManager: Changing view acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing view acls groups to: 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls groups to: 25/06/08 17:05:07 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(wyatt); groups with view permissions: Set(); users with modify permissions: Set(wyatt); groups with modify permissions: Set() 25/06/08 17:05:07 INFO Utils: Successfully started service &#39;sparkDriver&#39; on port 59361. 25/06/08 17:05:07 INFO SparkEnv: Registering MapOutputTracker 25/06/08 17:05:07 INFO SparkEnv: Registering BlockManagerMaster 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/08 17:05:08 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/08 17:05:08 INFO DiskBlockManager: Created local directory at C:\Users\wyatt\AppData\Local\Temp\blockmgr-8fe065e2-024c-4e2f-8662-45d2fe3de444 25/06/08 17:05:08 INFO MemoryStore: MemoryStore started with capacity 1899.0 MiB 25/06/08 17:05:08 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/08 17:05:08 INFO Utils: Successfully started service &#39;SparkUI&#39; on port 4040. 25/06/08 17:05:08 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://windows10.microdone.cn:4040 25/06/08 17:05:08 INFO Executor: Starting executor ID driver on host windows10.microdone.cn 25/06/08 17:05:08 INFO Utils: Successfully started service &#39;org.apache.spark.network.netty.NettyBlockTransferService&#39; on port 59392. 25/06/08 17:05:08 INFO NettyBlockTransferService: Server created on windows10.microdone.cn:59392 25/06/08 17:05:08 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/08 17:05:08 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Registering block manager windows10.microdone.cn:59392 with 1899.0 MiB RAM, BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, windows10.microdone.cn, 59392, None) Exception in thread "main" java.lang.IllegalArgumentException: 测试数据中不包含 features 列,请检查数据! at car.LoadModelRideHailing$.main(LoadModelRideHailing.scala:23) at car.LoadModelRideHailing.main(LoadModelRideHailing.scala) 进程已结束,退出代码为 1 package car import org.apache.spark.ml.classification.{LogisticRegressionModel, RandomForestClassificationModel} import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator import org.apache.spark.sql.{SparkSession, functions => F} object LoadModelRideHailing { def main(args: Array[String]): Unit = { val spark = SparkSession.builder() .master("local[3]") .appName("LoadModelRideHailing") .getOrCreate() spark.sparkContext.setLogLevel("Error") // 使用经过特征工程处理后的测试数据 val TestData = spark.read.option("header", "true").csv("C:\\Users\\wyatt\\Documents\\ride_hailing_test_data.csv") // 将 label 列转换为数值类型 val testDataWithNumericLabel = TestData.withColumn("label", F.col("label").cast("double")) // 检查 features 列是否存在 if (!testDataWithNumericLabel.columns.contains("features")) { throw new IllegalArgumentException("测试数据中不包含 features 列,请检查数据!") } // 修正后的模型路径(确保文件夹存在且包含元数据) val LogisticModel = LogisticRegressionModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_logistic_model") // 示例路径 val LogisticPre = LogisticModel.transform(testDataWithNumericLabel) val LogisticAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(LogisticPre) println("逻辑回归模型后期数据准确率:" + LogisticAcc) // 随机森林模型路径同步修正 val RandomForest = RandomForestClassificationModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_random_forest_model") // 示例路径 val RandomForestPre = RandomForest.transform(testDataWithNumericLabel) val RandomForestAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(RandomForestPre) println("随机森林模型后期数据准确率:" + RandomForestAcc) spark.stop() } }
06-09
STARTUP_MSG: host = 小明同学/192.168.3.104 STARTUP_MSG: args = [] STARTUP_MSG: version = 3.3.4 STARTUP_MSG: classpath = D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\etc\hadoop;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\accessors-smart-2.4.7.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\animal-sniffer-annotations-1.17.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\asm-5.0.4.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\audience-annotations-0.5.0.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\avro-1.7.7.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\checker-qual-2.5.2.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-beanutils-1.9.4.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-cli-1.2.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-codec-1.15.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-collections-3.2.2.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-compress-1.21.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-configuration2-2.1.1.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-daemon-1.0.13.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-io-2.8.0.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-lang3-3.12.0.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-logging-1.1.3.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-math3-3.1.1.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-net-3.6.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\commons-text-1.4.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\curator-client-4.2.0.jar;D:\Hadoop\hadoop3.3.4\hadoop-3.3.4\share\hadoop\common\lib\curator-framework-4.2.0.jar;D:\Hadoop\hadoo
04-02
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值