hadoop1.1.2 hdfs-default.xml

本文详细介绍了Hadoop分布式文件系统(HDFS)的各种配置参数及其含义,包括名称节点、数据节点的相关设置,如地址、端口、日志级别、权限检查等。
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>


<!-- Do not modify this file directly.  Instead, copy entries that you -->
<!-- wish to modify from this file into hdfs-site.xml and change them -->
<!-- there.  If hdfs-site.xml does not already exist, create it.      -->


<configuration>


<property>
  <name>dfs.namenode.logging.level</name>
  <value>info</value>
  <description>The logging level for dfs namenode. Other values are "dir"(trac
e namespace mutations), "block"(trace block under/over replications and block
creations/deletions), or "all".</description>
</property>


<property>
  <name>dfs.secondary.http.address</name>
  <value>0.0.0.0:50090</value>
  <description>
    The secondary namenode http server address and port.
    If the port is 0 then the server will start on a free port.
  </description>
</property>


<property>
  <name>dfs.datanode.address</name>
  <value>0.0.0.0:50010</value>
  <description>
    The datanode server address and port for data transfer.
    If the port is 0 then the server will start on a free port.
  </description>
</property>


<property>
  <name>dfs.datanode.http.address</name>
  <value>0.0.0.0:50075</value>
  <description>
    The datanode http server address and port.
    If the port is 0 then the server will start on a free port.
  </description>
</property>


<property>
  <name>dfs.datanode.ipc.address</name>
  <value>0.0.0.0:50020</value>
  <description>
    The datanode ipc server address and port.
    If the port is 0 then the server will start on a free port.
  </description>
</property>


<property>
  <name>dfs.datanode.handler.count</name>
  <value>3</value>
  <description>The number of server threads for the datanode.</description>
</property>


<property>
  <name>dfs.http.address</name>
  <value>0.0.0.0:50070</value>
  <description>
    The address and the base port where the dfs namenode web ui will listen on.
    If the port is 0 then the server will start on a free port.
  </description>
</property>


<property>
  <name>dfs.https.enable</name>
  <value>false</value>
  <description>Decide if HTTPS(SSL) is supported on HDFS
  </description>
</property>


<property>
  <name>dfs.https.need.client.auth</name>
  <value>false</value>
  <description>Whether SSL client certificate authentication is required
  </description>
</property>


<property>
  <name>dfs.https.server.keystore.resource</name>
  <value>ssl-server.xml</value>
  <description>Resource file from which ssl server keystore
  information will be extracted
  </description>
</property>


<property>
  <name>dfs.https.client.keystore.resource</name>
  <value>ssl-client.xml</value>
  <description>Resource file from which ssl client keystore
  information will be extracted
  </description>
</property>


<property>
  <name>dfs.datanode.https.address</name>
  <value>0.0.0.0:50475</value>
</property>


<property>
  <name>dfs.https.address</name>
  <value>0.0.0.0:50470</value>
</property>


 <property>
  <name>dfs.datanode.dns.interface</name>
  <value>default</value>
  <description>The name of the Network Interface from which a data node should 
  report its IP address.
  </description>
 </property>
 
<property>
  <name>dfs.datanode.dns.nameserver</name>
  <value>default</value>
  <description>The host name or IP address of the name server (DNS)
  which a DataNode should use to determine the host name used by the
  NameNode for communication and display purposes.
  </description>
 </property>
 
 
 
<property>
  <name>dfs.replication.considerLoad</name>
  <value>true</value>
  <description>Decide if chooseTarget considers the target's load or not
  </description>
</property>
<property>
  <name>dfs.default.chunk.view.size</name>
  <value>32768</value>
  <description>The number of bytes to view for a file on the browser.
  </description>
</property>


<property>
  <name>dfs.datanode.du.reserved</name>
  <value>0</value>
  <description>Reserved space in bytes per volume. Always leave this much space free for non dfs use.
  </description>
</property>


<property>
  <name>dfs.name.dir</name>
  <value>${hadoop.tmp.dir}/dfs/name</value>
  <description>Determines where on the local filesystem the DFS name node
      should store the name table(fsimage).  If this is a comma-delimited list
      of directories then the name table is replicated in all of the
      directories, for redundancy. </description>
</property>


<property>
  <name>dfs.name.edits.dir</name>
  <value>${dfs.name.dir}</value>
  <description>Determines where on the local filesystem the DFS name node
      should store the transaction (edits) file. If this is a comma-delimited list
      of directories then the transaction file is replicated in all of the 
      directories, for redundancy. Default value is same as dfs.name.dir
  </description>
</property>
<property>
  <name>dfs.web.ugi</name>
  <value>webuser,webgroup</value>
  <description>The user account used by the web interface.
    Syntax: USERNAME,GROUP1,GROUP2, ...
  </description>
</property>


<property>
  <name>dfs.permissions</name>
  <value>true</value>
  <description>
    If "true", enable permission checking in HDFS.
    If "false", permission checking is turned off,
    but all other behavior is unchanged.
    Switching from one parameter value to the other does not change the mode,
    owner or group of files or directories.
  </description>
</property>


<property>
  <name>dfs.permissions.supergroup</name>
  <value>supergroup</value>
  <description>The name of the group of super-users.</description>
</property>


<property>
  <name>dfs.block.access.token.enable</name>
  <value>false</value>
  <description>
    If "true", access tokens are used as capabilities for accessing datanodes.
    If "false", no access tokens are checked on accessing datanodes.
  </description>
</property>


<property>
  <name>dfs.block.access.key.update.interval</name>
  <value>600</value>
  <description>
    Interval in minutes at which namenode updates its access keys.
  </description>
</property>


<property>
  <name>dfs.block.access.token.lifetime</name>
  <value>600</value>
  <description>The lifetime of access tokens in minutes.</description>
</property>




<property>
  <name>dfs.data.dir</name>
  <value>${hadoop.tmp.dir}/dfs/data</value>
  <description>Determines where on the local filesystem an DFS data node
  should store its blocks.  If this is a comma-delimited
  list of directories, then data will be stored in all named
  directories, typically on different devices.
  Directories that do not exist are ignored.
  </description>
</property>


<property>
  <name>dfs.datanode.data.dir.perm</name>
  <value>755</value>
  <description>Permissions for the directories on on the local filesystem where 
  the DFS data node store its blocks. The permissions can either be octal or 
  symbolic.</description>
</property>


<property>
  <name>dfs.replication</name>
  <value>3</value>
  <description>Default block replication. 
  The actual number of replications can be specified when the file is created.
  The default is used if replication is not specified in create time.
  </description>
</property>


<property>
  <name>dfs.replication.max</name>
  <value>512</value>
  <description>Maximal block replication. 
  </description>
</property>


<property>
  <name>dfs.replication.min</name>
  <value>1</value>
  <description>Minimal block replication. 
  </description>
</property>


<property>
  <name>dfs.block.size</name>
  <value>67108864</value>
  <description>The default block size for new files.</description>
</property>


<property>
  <name>dfs.df.interval</name>
  <value>60000</value>
  <description>Disk usage statistics refresh interval in msec.</description>
</property>


<property>
  <name>dfs.client.block.write.retries</name>
  <value>3</value>
  <description>The number of retries for writing blocks to the data nodes, 
  before we signal failure to the application.
  </description>
</property>


<property>
  <name>dfs.blockreport.intervalMsec</name>
  <value>3600000</value>
  <description>Determines block reporting interval in milliseconds.</description>
</property>


<property>
  <name>dfs.blockreport.initialDelay</name>  <value>0</value>
  <description>Delay for first block report in seconds.</description>
</property>


<property>
  <name>dfs.heartbeat.interval</name>
  <value>3</value>
  <description>Determines datanode heartbeat interval in seconds.</description>
</property>


<property>
  <name>dfs.namenode.handler.count</name>
  <value>10</value>
  <description>The number of server threads for the namenode.</description>
</property>


<property>
  <name>dfs.safemode.threshold.pct</name>
  <value>0.999f</value>
  <description>
    Specifies the percentage of blocks that should satisfy 
    the minimal replication requirement defined by dfs.replication.min.
    Values less than or equal to 0 mean not to wait for any particular
    percentage of blocks before exiting safemode.
    Values greater than 1 will make safe mode permanent.
  </description>
 </property>
 
<property>
  <name>dfs.namenode.safemode.min.datanodes</name>
  <value>0</value>
  <description>
    Specifies the number of datanodes that must be considered alive
    before the name node exits safemode.
    Values less than or equal to 0 mean not to take the number of live
    datanodes into account when deciding whether to remain in safe mode
    during startup.
    Values greater than the number of datanodes in the cluster
    will make safe mode permanent.
  </description>
</property>


<property>
  <name>dfs.safemode.extension</name>
  <value>30000</value>
  <description>
    Determines extension of safe mode in milliseconds 
    after the threshold level is reached.
  </description>
</property>


<property>
  <name>dfs.balance.bandwidthPerSec</name>
  <value>1048576</value>
  <description>
        Specifies the maximum amount of bandwidth that each datanode
        can utilize for the balancing purpose in term of
        the number of bytes per second.
  </description>
</property>


<property>
  <name>dfs.hosts</name>
  <value></value>
  <description>Names a file that contains a list of hosts that are
  permitted to connect to the namenode. The full pathname of the file
  must be specified.  If the value is empty, all hosts are
  permitted.</description>
</property>


<property>
  <name>dfs.hosts.exclude</name>
  <value></value>
  <description>Names a file that contains a list of hosts that are
  not permitted to connect to the namenode.  The full pathname of the
  file must be specified.  If the value is empty, no hosts are
  excluded.</description>
</property> 


<property>
  <name>dfs.max.objects</name>
  <value>0</value>
  <description>The maximum number of files, directories and blocks
  dfs supports. A value of zero indicates no limit to the number
  of objects that dfs supports.
  </description>
</property>


<property>
  <name>dfs.namenode.decommission.interval</name>
  <value>30</value>
  <description>Namenode periodicity in seconds to check if decommission is 
  complete.</description>
</property>


<property>
  <name>dfs.namenode.decommission.nodes.per.interval</name>
  <value>5</value>
  <description>The number of nodes namenode checks if decommission is complete
  in each dfs.namenode.decommission.interval.</description>
</property>


<property>
  <name>dfs.replication.interval</name>
  <value>3</value>
  <description>The periodicity in seconds with which the namenode computes 
  repliaction work for datanodes. </description>
</property>


<property>
  <name>dfs.access.time.precision</name>
  <value>3600000</value>
  <description>The access time for HDFS file is precise upto this value. 
               The default value is 1 hour. Setting a value of 0 disables
               access times for HDFS.
  </description>
</property>


<property>
  <name>dfs.support.append</name>
  <description>
    This option is no longer supported. HBase no longer requires that
    this option be enabled as sync is now enabled by default. See
    HADOOP-8230 for additional information.
  </description>
</property>


<property>
  <name>dfs.namenode.delegation.key.update-interval</name>
  <value>86400000</value>
  <description>The update interval for master key for delegation tokens 
       in the namenode in milliseconds.
  </description>
</property>


<property>
  <name>dfs.namenode.delegation.token.max-lifetime</name>
  <value>604800000</value>
  <description>The maximum lifetime in milliseconds for which a delegation 
      token is valid.
  </description>
</property>


<property>
  <name>dfs.namenode.delegation.token.renew-interval</name>
  <value>86400000</value>
  <description>The renewal interval for delegation token in milliseconds.
  </description>
</property>


<property>
  <name>dfs.datanode.failed.volumes.tolerated</name>
  <value>0</value>
  <description>The number of volumes that are allowed to
  fail before a datanode stops offering service. By default
  any volume failure will cause a datanode to shutdown.
  </description>
</property>


<property>
  <name>dfs.datanode.max.xcievers</name>
  <value>4096</value>
  <description>Specifies the maximum number of threads to use for transferring data
  in and out of the DN.
  </description>
</property>


<property>
  <name>dfs.client.use.datanode.hostname</name>
  <value>false</value>
  <description>Whether clients should use datanode hostnames when
    connecting to datanodes.
  </description>
</property>


<property>
  <name>dfs.datanode.use.datanode.hostname</name>
  <value>false</value>
  <description>Whether datanodes should use datanode hostnames when
    connecting to other datanodes for data transfer.
  </description>
</property>


<property>
  <name>dfs.client.local.interfaces</name>
  <value></value>
  <description>A comma separated list of network interface names to use
    for data transfer between the client and datanodes. When creating
    a connection to read from or write to a datanode, the client
    chooses one of the specified interfaces at random and binds its
    socket to the IP of that interface. Individual names may be
    specified as either an interface name (eg "eth0"), a subinterface
    name (eg "eth0:0"), or an IP address (which may be specified using
    CIDR notation to match a range of IPs).
  </description>
</property>


<property>
  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
  <value>${dfs.web.authentication.kerberos.principal}</value>
</property>


<property>
  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
  <value>${dfs.web.authentication.kerberos.principal}</value>
</property>


<property>
  <name>dfs.namenode.invalidate.work.pct.per.iteration</name>
  <value>0.32f</value>
  <description>
    *Note*: Advanced property. Change with caution.
    This determines the percentage amount of block
    invalidations (deletes) to do over a single DN heartbeat
    deletion command. The final deletion count is determined by applying this
    percentage to the number of live nodes in the system.
    The resultant number is the number of blocks from the deletion list
    chosen for proper invalidation over a single heartbeat of a single DN.
    Value should be a positive, non-zero percentage in float notation (X.Yf),
    with 1.0f meaning 100%.
  </description>
</property>


<property>
  <name>dfs.namenode.replication.work.multiplier.per.iteration</name>
  <value>2</value>
  <description>
    *Note*: Advanced property. Change with caution.
    This determines the total amount of block transfers to begin in
    parallel at a DN, for replication, when such a command list is being
    sent over a DN heartbeat by the NN. The actual number is obtained by
    multiplying this multiplier with the total number of live nodes in the
    cluster. The result number is the number of blocks to begin transfers
    immediately for, per DN heartbeat. This number can be any positive,
    non-zero integer.
  </description>
</property>


<property>
  <name>dfs.namenode.check.stale.datanode</name>
  <value>false</value>
  <description>
  Indicate whether or not to check "stale" datanodes whose 
  heartbeat messages have not been received by the namenode 
  for more than a specified time interval. If this configuration 
  parameter is set as true, the stale datanodes will be moved to 
  the end of the target node list for reading. The writing will 
  also try to avoid stale nodes.
  </description>
</property>


<property>
  <name>dfs.namenode.stale.datanode.interval</name>
  <value>30000</value>
  <description>
  Default time interval for marking a datanode as "stale", i.e., if 
  the namenode has not received heartbeat msg from a datanode for 
  more than this time interval, the datanode will be marked and treated 
  as "stale" by default.
  </description>
</property>


</configuration>


/home/hadoopmaster/jdk1.8.0_161/bin/java -javaagent:/home/hadoopmaster/idea-IC-221.6008.13/lib/idea_rt.jar=34515:/home/hadoopmaster/idea-IC-221.6008.13/bin -Dfile.encoding=UTF-8 -classpath /home/hadoopmaster/jdk1.8.0_161/jre/lib/charsets.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/deploy.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/cldrdata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/dnsns.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jaccess.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jfxrt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/localedata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/nashorn.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunec.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunjce_provider.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunpkcs11.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/zipfs.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/javaws.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jce.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfr.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfxswt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jsse.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/management-agent.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/plugin.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/resources.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/rt.jar:/root/IdeaProjects/kkk/out/production/kkk:/home/hadoopmaster/scala-2.12.15/lib/scala-reflect.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-xml_2.12-1.0.6.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-parser-combinators_2.12-1.0.7.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-swing_2.12-2.0.3.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-library.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xz-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jta-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jpam-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ST4-4.0.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ivy-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/oro-2.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/blas-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/core-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/gson-2.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/tink-1.6.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsp-api-2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okio-1.14.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/opencsv-2.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shims-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xmlenc-0.52.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guava-14.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jline-2.14.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsr305-3.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lapack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/log4j-1.2.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/minlog-1.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stream-2.9.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/velocity-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/generex-1.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-api-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/janino-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jdo-api-3.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/objenesis-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/paranamer-2.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/py4j-0.10.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/pyrolite-4.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/HikariCP-2.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-io-2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-cli-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.inject-1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libfb303-0.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lz4-java-1.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okhttp-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snakeyaml-1.27.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stax-api-1.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JTransforms-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-ipc-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-cli-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-net-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/derby-10.14.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-jdbc-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-utils-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpcore-4.4.14.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-api-2.2.11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-hk2-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jodd-core-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-core-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/super-csv-2.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xml-apis-1.4.01.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JLargeArrays-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/activation-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/automaton-1.11-8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-dbcp-1.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-text-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-serde-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javolution-5.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libthrift-0.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-shims-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-api-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zjsonpatch-0.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zstd-jni-1.5.0-4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill-java-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill_2.12-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-servlet-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-auth-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-hdfs-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-locator-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpclient-4.5.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-xc-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-util-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/joda-time-2.10.10.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kryo-shaded-4.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jmx-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jvm-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/rocksdbjni-6.20.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xercesImpl-2.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aircompressor-0.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/algebra_2.12-2.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/annotations-17.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr4-runtime-4.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-util-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-format-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-vector-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-mapred-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-codec-1.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-pool-1.5.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/compress-lzf-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-beeline-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.jdo-3.2.0-m3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-runtime-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-client-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-common-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-server-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/leveldbjni-all-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-core-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-json-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/RoaringBitmap-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr-runtime-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-math3-3.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javassist-3.25.0-GA.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jul-to-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/protobuf-java-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snappy-java-1.1.8.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/transaction-api-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/bonecp-0.8.0.RELEASE.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-crypto-1.1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-digester-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang3-3.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-client-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-exec-2.3.9-core.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-metastore-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-jaxrs-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.inject-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-mapreduce-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-xml_2.12-1.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shapeless_2.12-2.3.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-log4j12-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sql_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/threeten-extra-1.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-jute-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compress-1.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-logging-1.1.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-recipes-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-api-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-0.23-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jcl-over-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-column-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-common-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-hadoop-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-library-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-reflect-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-core_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-repl_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-yarn_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-asn1-api-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze-macros_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/cats-kernel_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-httpclient-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/flatbuffers-java-1.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-llap-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-service-rpc-3.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-storage-api-2.7.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-sslengine-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-graphite-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/netty-all-4.1.68.Final.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-jackson-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-compiler-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mesos_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-util_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xbean-asm9-shaded-4.20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-i18n-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack_combined_all-0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-core-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-beanutils-1.9.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compiler-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-framework-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-core-4.1.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-databind-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.ws.rs-api-2.1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-client-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/macro-compat_2.12-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-encoding-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-graphx_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sketch_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-unsafe_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/univocity-parsers-2.9.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-netty-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-rdbms-4.1.19.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-annotations-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kvstore_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-macros_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-collections-3.2.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-configuration-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-api-jdo-4.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-mapper-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.servlet-api-4.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-ast_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-catalyst_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-launcher_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/audience-annotations-0.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-scheduler-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-vector-code-gen-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-annotations-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.xml.bind-api-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-core_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-streaming_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-platform_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apps-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-core-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-node-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-rbac-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/logging-interceptor-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/mesos-1.4.0-shaded-protobuf.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/osgi-resource-locator-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kubernetes_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1-tests.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-repackaged-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/htrace-core-3.1.0-incubating.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/istack-commons-runtime-3.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.annotation-api-1.3.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.validation-api-2.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-scalap_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-batch-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib-local_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-jackson_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-common-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-events-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-policy-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-dataformat-yaml-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-datatype-jsr310-2.11.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-metrics-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-common_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-module-scala_2.12-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-discovery-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-format-structures-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-shuffle_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-kerberos-codec-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-app-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-extensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-networking-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-scheduling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-core-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-web-proxy-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-core-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-autoscaling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-flowcontrol-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-collection-compat_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive-thriftserver_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-certificates-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-coordination-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-storageclass-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-parser-combinators_2.12-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apiextensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-shuffle-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-jobclient-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-admissionregistration-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar kkk.WordCount Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/02 20:17:03 INFO SparkContext: Running Spark version 3.2.1 25/06/02 20:17:04 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO SparkContext: Submitted application: WordCount 25/06/02 20:17:04 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/02 20:17:04 INFO ResourceProfile: Limiting resource is cpu 25/06/02 20:17:04 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/02 20:17:04 INFO SecurityManager: Changing view acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing view acls groups to: 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls groups to: 25/06/02 20:17:04 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/06/02 20:17:05 INFO Utils: Successfully started service 'sparkDriver' on port 41615. 25/06/02 20:17:05 INFO SparkEnv: Registering MapOutputTracker 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMaster 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/02 20:17:05 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-68486311-3f69-48e8-8f69-e7afffcf5979 25/06/02 20:17:05 INFO MemoryStore: MemoryStore started with capacity 258.5 MiB 25/06/02 20:17:05 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/02 20:17:06 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/06/02 20:17:06 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://hadoopmaster:4040 25/06/02 20:17:06 INFO Executor: Starting executor ID driver on host hadoopmaster 25/06/02 20:17:06 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41907. 25/06/02 20:17:06 INFO NettyBlockTransferService: Server created on hadoopmaster:41907 25/06/02 20:17:06 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/02 20:17:06 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMasterEndpoint: Registering block manager hadoopmaster:41907 with 258.5 MiB RAM, BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:08 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 244.0 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 23.4 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on hadoopmaster:41907 (size: 23.4 KiB, free: 258.5 MiB) 25/06/02 20:17:09 INFO SparkContext: Created broadcast 0 from textFile at WordCount.scala:9 Exception in thread "main" org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/hadoopmaster/words.txt at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:287) at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:229) at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:315) at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4(Partitioner.scala:78) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4$adapted(Partitioner.scala:78) at scala.collection.immutable.List.map(List.scala:293) at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:78) at org.apache.spark.rdd.PairRDDFunctions.$anonfun$reduceByKey$4(PairRDDFunctions.scala:322) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:414) at org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:322) at kkk.WordCount$.main(WordCount.scala:10) at kkk.WordCount.main(WordCount.scala) 25/06/02 20:17:09 INFO SparkContext: Invoking stop() from shutdown hook 25/06/02 20:17:09 INFO SparkUI: Stopped Spark web UI at http://hadoopmaster:4040 25/06/02 20:17:09 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 25/06/02 20:17:09 INFO MemoryStore: MemoryStore cleared 25/06/02 20:17:09 INFO BlockManager: BlockManager stopped 25/06/02 20:17:09 INFO BlockManagerMaster: BlockManagerMaster stopped 25/06/02 20:17:09 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 25/06/02 20:17:10 INFO SparkContext: Successfully stopped SparkContext 25/06/02 20:17:10 INFO ShutdownHookManager: Shutdown hook called 25/06/02 20:17:10 INFO ShutdownHookManager: Deleting directory /tmp/spark-213e3a91-227c-4e0a-8254-ab5c0f00786d Process finished with exit code 1
06-04
"C:\Program Files\Java\jdk1.8.0_281\bin\java.exe" "-javaagent:D:\新建文件夹 (2)\IDEA\idea\IntelliJ IDEA 2019.3.3\lib\idea_rt.jar=59342" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_281\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\rt.jar;D:\carspark\out\production\carspark;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\jars\scala-library-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-reflect\jars\scala-reflect-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\srcs\scala-library-2.12.10-sources.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\accessors-smart-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\activation-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aircompressor-0.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\algebra_2.12-2.0.0-M2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr-runtime-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr4-runtime-4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-repackaged-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arpack_combined_all-0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-format-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-core-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-netty-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\audience-annotations-0.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\automaton-1.11-8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-ipc-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-mapred-1.8.2-hadoop2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\bonecp-0.8.0.RELEASE.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze-macros_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\cats-kernel_2.12-2.0.0-M4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill-java-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill_2.12-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-beanutils-1.9.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-cli-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-codec-1.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-collections-3.2.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compiler-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compress-1.20.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-configuration2-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-crypto-1.1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-daemon-1.0.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-dbcp-1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-httpclient-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-io-2.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang3-3.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-logging-1.1.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-math3-3.4.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-net-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-pool-1.5.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-text-1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\compress-lzf-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\core-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-client-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-framework-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-recipes-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-api-jdo-4.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-core-4.1.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-rdbms-4.1.19.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\derby-10.12.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dnsjava-2.1.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ehcache-3.3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\flatbuffers-java-1.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\generex-1.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\geronimo-jcache_1.0_spec-1.0-alpha-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\gson-2.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guava-14.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-servlet-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-annotations-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-auth-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-hdfs-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-core-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-jobclient-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-api-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-registry-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-web-proxy-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\HikariCP-2.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-beeline-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-cli-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-exec-2.3.7-core.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-jdbc-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-llap-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-metastore-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-serde-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-service-rpc-3.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-0.23-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-scheduler-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-storage-api-2.7.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-vector-code-gen-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-api-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-locator-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-utils-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\htrace-core4-4.1.0-incubating.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpclient-4.5.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpcore-4.4.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\istack-commons-runtime-3.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ivy-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-databind-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-dataformat-yaml-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-datatype-jsr310-2.11.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-base-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-json-provider-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-mapper-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-jaxb-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-paranamer-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-scala_2.12-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.activation-api-1.2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.annotation-api-1.3.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.inject-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.servlet-api-4.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.validation-api-2.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.ws.rs-api-2.1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.xml.bind-api-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\janino-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javassist-3.25.0-GA.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.inject-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.jdo-3.2.0-m3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javolution-5.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-api-2.2.11.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-runtime-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcip-annotations-1.0-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcl-over-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jdo-api-3.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-client-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-common-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-core-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-hk2-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-media-jaxb-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-server-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JLargeArrays-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jline-2.14.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\joda-time-2.10.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jodd-core-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jpam-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-smart-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-ast_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-core_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-jackson_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-scalap_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsp-api-2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsr305-3.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jta-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JTransforms-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jul-to-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-admin-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-client-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-common-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-core-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-crypto-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-identity-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-server-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-simplekdc-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-asn1-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-config-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-pkix-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-xdr-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kryo-shaded-4.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-client-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-admissionregistration-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apiextensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apps-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-autoscaling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-batch-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-certificates-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-common-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-coordination-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-core-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-discovery-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-events-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-extensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-metrics-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-networking-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-policy-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-rbac-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-scheduling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-settings-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-storageclass-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\leveldbjni-all-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libfb303-0.9.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libthrift-0.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\log4j-1.2.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\logging-interceptor-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\lz4-java-1.7.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\machinist_2.12-0.6.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\macro-compat_2.12-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\mesos-1.4.0-shaded-protobuf.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-core-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-graphite-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jmx-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-json-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jvm-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\minlog-1.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\netty-all-4.1.51.Final.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\nimbus-jose-jwt-4.41.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\objenesis-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-2.7.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okio-1.14.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\opencsv-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-core-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-mapreduce-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-shims-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\oro-2.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\osgi-resource-locator-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\paranamer-2.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-column-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-common-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-encoding-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-format-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-hadoop-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-jackson-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\protobuf-java-2.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\py4j-0.10.9.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\pyrolite-4.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\re2j-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\RoaringBitmap-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-collection-compat_2.12-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-compiler-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-library-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-parser-combinators_2.12-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-reflect-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-xml_2.12-1.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shapeless_2.12-2.3.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shims-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-api-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-log4j12-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snakeyaml-1.24.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snappy-java-1.1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-catalyst_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-core_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-graphx_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive-thriftserver_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kubernetes_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kvstore_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-launcher_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mesos_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib-local_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-common_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-shuffle_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-repl_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sketch_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sql_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-streaming_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1-tests.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-unsafe_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-yarn_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-macros_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-platform_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-util_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ST4-4.0.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax-api-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax2-api-3.1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stream-2.9.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\super-csv-2.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\threeten-extra-1.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\token-provider-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\transaction-api-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\univocity-parsers-2.9.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\velocity-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\woodstox-core-5.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xbean-asm7-shaded-4.15.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xz-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zjsonpatch-0.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zookeeper-3.4.14.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zstd-jni-1.4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-vector-2.0.0.jar" car.LoadModelRideHailing Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/08 17:05:07 INFO SparkContext: Running Spark version 3.1.1 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO SparkContext: Submitted application: LoadModelRideHailing 25/06/08 17:05:07 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/08 17:05:07 INFO ResourceProfile: Limiting resource is cpu 25/06/08 17:05:07 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/08 17:05:07 INFO SecurityManager: Changing view acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing view acls groups to: 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls groups to: 25/06/08 17:05:07 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(wyatt); groups with view permissions: Set(); users with modify permissions: Set(wyatt); groups with modify permissions: Set() 25/06/08 17:05:07 INFO Utils: Successfully started service 'sparkDriver' on port 59361. 25/06/08 17:05:07 INFO SparkEnv: Registering MapOutputTracker 25/06/08 17:05:07 INFO SparkEnv: Registering BlockManagerMaster 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/08 17:05:08 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/08 17:05:08 INFO DiskBlockManager: Created local directory at C:\Users\wyatt\AppData\Local\Temp\blockmgr-8fe065e2-024c-4e2f-8662-45d2fe3de444 25/06/08 17:05:08 INFO MemoryStore: MemoryStore started with capacity 1899.0 MiB 25/06/08 17:05:08 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/08 17:05:08 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/06/08 17:05:08 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://windows10.microdone.cn:4040 25/06/08 17:05:08 INFO Executor: Starting executor ID driver on host windows10.microdone.cn 25/06/08 17:05:08 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 59392. 25/06/08 17:05:08 INFO NettyBlockTransferService: Server created on windows10.microdone.cn:59392 25/06/08 17:05:08 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/08 17:05:08 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Registering block manager windows10.microdone.cn:59392 with 1899.0 MiB RAM, BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, windows10.microdone.cn, 59392, None) Exception in thread "main" java.lang.IllegalArgumentException: 测试数据中不包含 features 列,请检查数据! at car.LoadModelRideHailing$.main(LoadModelRideHailing.scala:23) at car.LoadModelRideHailing.main(LoadModelRideHailing.scala) 进程已结束,退出代码为 1 package car import org.apache.spark.ml.classification.{LogisticRegressionModel, RandomForestClassificationModel} import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator import org.apache.spark.sql.{SparkSession, functions => F} object LoadModelRideHailing { def main(args: Array[String]): Unit = { val spark = SparkSession.builder() .master("local[3]") .appName("LoadModelRideHailing") .getOrCreate() spark.sparkContext.setLogLevel("Error") // 使用经过特征工程处理后的测试数据 val TestData = spark.read.option("header", "true").csv("C:\\Users\\wyatt\\Documents\\ride_hailing_test_data.csv") // 将 label 列转换为数值类型 val testDataWithNumericLabel = TestData.withColumn("label", F.col("label").cast("double")) // 检查 features 列是否存在 if (!testDataWithNumericLabel.columns.contains("features")) { throw new IllegalArgumentException("测试数据中不包含 features 列,请检查数据!") } // 修正后的模型路径(确保文件夹存在且包含元数据) val LogisticModel = LogisticRegressionModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_logistic_model") // 示例路径 val LogisticPre = LogisticModel.transform(testDataWithNumericLabel) val LogisticAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(LogisticPre) println("逻辑回归模型后期数据准确率:" + LogisticAcc) // 随机森林模型路径同步修正 val RandomForest = RandomForestClassificationModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_random_forest_model") // 示例路径 val RandomForestPre = RandomForest.transform(testDataWithNumericLabel) val RandomForestAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(RandomForestPre) println("随机森林模型后期数据准确率:" + RandomForestAcc) spark.stop() } }
06-09
"Z:\Program Files\Java\jdk1.8.0_181\bin\java.exe" "-javaagent:Z:\Java文件\lntelliJ IDEA\IDEA安装包\IntelliJ IDEA Community Edition 2022.2.1\lib\idea_rt.jar=52236:Z:\Java文件\lntelliJ IDEA\IDEA安装包\IntelliJ IDEA Community Edition 2022.2.1\bin" -Dfile.encoding=UTF-8 -classpath "Z:\Program Files\Java\jdk1.8.0_181\jre\lib\charsets.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\deploy.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\access-bridge-64.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\cldrdata.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\dnsns.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\jaccess.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\jfxrt.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\localedata.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\nashorn.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunec.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunjce_provider.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunmscapi.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunpkcs11.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\zipfs.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\javaws.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jce.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jfr.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jfxswt.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jsse.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\management-agent.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\plugin.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\resources.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\rt.jar;Z:\sparkdemo\sparkdemo2402\sparkdemo2402\target\classes;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-library.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-parser-combinators_2.12-1.0.7.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-reflect.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-swing_2.12-2.0.3.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-xml_2.12-1.0.6.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-client\3.1.3\hadoop-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-common\3.1.3\hadoop-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;C:\Users\zxr17\.m2\repository\commons-io\commons-io\2.5\commons-io-2.5.jar;C:\Users\zxr17\.m2\repository\commons-net\commons-net\3.6\commons-net-3.6.jar;C:\Users\zxr17\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-servlet\9.3.24.v20180605\jetty-servlet-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-security\9.3.24.v20180605\jetty-security-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-webapp\9.3.24.v20180605\jetty-webapp-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-xml\9.3.24.v20180605\jetty-xml-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-servlet\1.19\jersey-servlet-1.19.jar;C:\Users\zxr17\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\zxr17\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\zxr17\.m2\repository\commons-beanutils\commons-beanutils\1.9.3\commons-beanutils-1.9.3.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-configuration2\2.1.1\commons-configuration2-2.1.1.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-lang3\3.4\commons-lang3-3.4.jar;C:\Users\zxr17\.m2\repository\com\google\re2j\re2j\1.1\re2j-1.1.jar;C:\Users\zxr17\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\Users\zxr17\.m2\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-client\2.13.0\curator-client-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-recipes\2.13.0\curator-recipes-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\htrace\htrace-core4\4.1.0-incubating\htrace-core4-4.1.0-incubating.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-compress\1.18\commons-compress-1.18.jar;C:\Users\zxr17\.m2\repository\org\codehaus\woodstox\stax2-api\3.1.4\stax2-api-3.1.4.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\woodstox\woodstox-core\5.0.3\woodstox-core-5.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-hdfs-client\3.1.3\hadoop-hdfs-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\com\squareup\okhttp\okhttp\2.7.5\okhttp-2.7.5.jar;C:\Users\zxr17\.m2\repository\com\squareup\okio\okio\1.6.0\okio-1.6.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.7.8\jackson-annotations-2.7.8.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-api\3.1.3\hadoop-yarn-api-3.1.3.jar;C:\Users\zxr17\.m2\repository\javax\xml\bind\jaxb-api\2.2.11\jaxb-api-2.2.11.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-client\3.1.3\hadoop-yarn-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-core\3.1.3\hadoop-mapreduce-client-core-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-common\3.1.3\hadoop-yarn-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-util\9.3.24.v20180605\jetty-util-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-core\1.19\jersey-core-1.19.jar;C:\Users\zxr17\.m2\repository\javax\ws\rs\jsr311-api\1.1.1\jsr311-api-1.1.1.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-client\1.19\jersey-client-1.19.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-jaxb-annotations\2.7.8\jackson-module-jaxb-annotations-2.7.8.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\jaxrs\jackson-jaxrs-json-provider\2.7.8\jackson-jaxrs-json-provider-2.7.8.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\jaxrs\jackson-jaxrs-base\2.7.8\jackson-jaxrs-base-2.7.8.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\3.1.3\hadoop-mapreduce-client-jobclient-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-common\3.1.3\hadoop-mapreduce-client-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-annotations\3.1.3\hadoop-annotations-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-auth\3.1.3\hadoop-auth-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\slf4j\slf4j-api\1.7.25\slf4j-api-1.7.25.jar;C:\Users\zxr17\.m2\repository\commons-codec\commons-codec\1.11\commons-codec-1.11.jar;C:\Users\zxr17\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\Users\zxr17\.m2\repository\org\slf4j\slf4j-log4j12\1.7.25\slf4j-log4j12-1.7.25.jar;C:\Users\zxr17\.m2\repository\org\apache\httpcomponents\httpclient\4.5.2\httpclient-4.5.2.jar;C:\Users\zxr17\.m2\repository\org\apache\httpcomponents\httpcore\4.4.4\httpcore-4.4.4.jar;C:\Users\zxr17\.m2\repository\com\nimbusds\nimbus-jose-jwt\4.41.1\nimbus-jose-jwt-4.41.1.jar;C:\Users\zxr17\.m2\repository\com\github\stephenc\jcip\jcip-annotations\1.0-1\jcip-annotations-1.0-1.jar;C:\Users\zxr17\.m2\repository\net\minidev\json-smart\2.3\json-smart-2.3.jar;C:\Users\zxr17\.m2\repository\net\minidev\accessors-smart\1.2\accessors-smart-1.2.jar;C:\Users\zxr17\.m2\repository\org\ow2\asm\asm\5.0.4\asm-5.0.4.jar;C:\Users\zxr17\.m2\repository\org\apache\zookeeper\zookeeper\3.4.13\zookeeper-3.4.13.jar;C:\Users\zxr17\.m2\repository\org\apache\yetus\audience-annotations\0.5.0\audience-annotations-0.5.0.jar;C:\Users\zxr17\.m2\repository\io\netty\netty\3.10.6.Final\netty-3.10.6.Final.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-framework\2.13.0\curator-framework-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-simplekdc\1.0.1\kerb-simplekdc-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-client\1.0.1\kerb-client-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-config\1.0.1\kerby-config-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-core\1.0.1\kerb-core-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-pkix\1.0.1\kerby-pkix-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-asn1\1.0.1\kerby-asn1-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-util\1.0.1\kerby-util-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-common\1.0.1\kerb-common-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-crypto\1.0.1\kerb-crypto-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-util\1.0.1\kerb-util-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\token-provider\1.0.1\token-provider-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-admin\1.0.1\kerb-admin-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-server\1.0.1\kerb-server-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-identity\1.0.1\kerb-identity-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-xdr\1.0.1\kerby-xdr-1.0.1.jar;C:\Users\zxr17\.m2\repository\com\google\guava\guava\27.0-jre\guava-27.0-jre.jar;C:\Users\zxr17\.m2\repository\com\google\guava\failureaccess\1.0\failureaccess-1.0.jar;C:\Users\zxr17\.m2\repository\com\google\guava\listenablefuture\9999.0-empty-to-avoid-conflict-with-guava\listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar;C:\Users\zxr17\.m2\repository\org\checkerframework\checker-qual\2.5.2\checker-qual-2.5.2.jar;C:\Users\zxr17\.m2\repository\com\google\errorprone\error_prone_annotations\2.2.0\error_prone_annotations-2.2.0.jar;C:\Users\zxr17\.m2\repository\com\google\j2objc\j2objc-annotations\1.1\j2objc-annotations-1.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\mojo\animal-sniffer-annotations\1.17\animal-sniffer-annotations-1.17.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-sql_2.12\3.0.3\spark-sql_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\com\univocity\univocity-parsers\2.9.0\univocity-parsers-2.9.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-sketch_2.12\3.0.3\spark-sketch_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-core_2.12\3.0.3\spark-core_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\com\thoughtworks\paranamer\paranamer\2.8\paranamer-2.8.jar;C:\Users\zxr17\.m2\repository\com\twitter\chill_2.12\0.9.5\chill_2.12-0.9.5.jar;C:\Users\zxr17\.m2\repository\com\esotericsoftware\kryo-shaded\4.0.2\kryo-shaded-4.0.2.jar;C:\Users\zxr17\.m2\repository\com\esotericsoftware\minlog\1.3.0\minlog-1.3.0.jar;C:\Users\zxr17\.m2\repository\org\objenesis\objenesis\2.5.1\objenesis-2.5.1.jar;C:\Users\zxr17\.m2\repository\com\twitter\chill-java\0.9.5\chill-java-0.9.5.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-launcher_2.12\3.0.3\spark-launcher_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-kvstore_2.12\3.0.3\spark-kvstore_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-network-common_2.12\3.0.3\spark-network-common_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-network-shuffle_2.12\3.0.3\spark-network-shuffle_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-unsafe_2.12\3.0.3\spark-unsafe_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\javax\activation\activation\1.1.1\activation-1.1.1.jar;C:\Users\zxr17\.m2\repository\javax\servlet\javax.servlet-api\3.1.0\javax.servlet-api-3.1.0.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-text\1.6\commons-text-1.6.jar;C:\Users\zxr17\.m2\repository\org\slf4j\jul-to-slf4j\1.7.30\jul-to-slf4j-1.7.30.jar;C:\Users\zxr17\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.30\jcl-over-slf4j-1.7.30.jar;C:\Users\zxr17\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\Users\zxr17\.m2\repository\org\xerial\snappy\snappy-java\1.1.8.2\snappy-java-1.1.8.2.jar;C:\Users\zxr17\.m2\repository\org\lz4\lz4-java\1.7.1\lz4-java-1.7.1.jar;C:\Users\zxr17\.m2\repository\com\github\luben\zstd-jni\1.4.4-3\zstd-jni-1.4.4-3.jar;C:\Users\zxr17\.m2\repository\org\roaringbitmap\RoaringBitmap\0.7.45\RoaringBitmap-0.7.45.jar;C:\Users\zxr17\.m2\repository\org\roaringbitmap\shims\0.7.45\shims-0.7.45.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\modules\scala-xml_2.12\1.2.0\scala-xml_2.12-1.2.0.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\scala-library\2.12.10\scala-library-2.12.10.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\scala-reflect\2.12.10\scala-reflect-2.12.10.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-jackson_2.12\3.6.6\json4s-jackson_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-core_2.12\3.6.6\json4s-core_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-ast_2.12\3.6.6\json4s-ast_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-scalap_2.12\3.6.6\json4s-scalap_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-client\2.30\jersey-client-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\ws\rs\jakarta.ws.rs-api\2.1.6\jakarta.ws.rs-api-2.1.6.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\external\jakarta.inject\2.6.1\jakarta.inject-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-common\2.30\jersey-common-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\annotation\jakarta.annotation-api\1.3.5\jakarta.annotation-api-1.3.5.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\osgi-resource-locator\1.0.3\osgi-resource-locator-1.0.3.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-server\2.30\jersey-server-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\media\jersey-media-jaxb\2.30\jersey-media-jaxb-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\validation\jakarta.validation-api\2.0.2\jakarta.validation-api-2.0.2.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet\2.30\jersey-container-servlet-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet-core\2.30\jersey-container-servlet-core-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\inject\jersey-hk2\2.30\jersey-hk2-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-locator\2.6.1\hk2-locator-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\external\aopalliance-repackaged\2.6.1\aopalliance-repackaged-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-api\2.6.1\hk2-api-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-utils\2.6.1\hk2-utils-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\javassist\javassist\3.25.0-GA\javassist-3.25.0-GA.jar;C:\Users\zxr17\.m2\repository\io\netty\netty-all\4.1.47.Final\netty-all-4.1.47.Final.jar;C:\Users\zxr17\.m2\repository\com\clearspring\analytics\stream\2.9.6\stream-2.9.6.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-core\4.1.1\metrics-core-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-jvm\4.1.1\metrics-jvm-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-json\4.1.1\metrics-json-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-graphite\4.1.1\metrics-graphite-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-jmx\4.1.1\metrics-jmx-4.1.1.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.12\2.10.0\jackson-module-scala_2.12-2.10.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-paranamer\2.10.0\jackson-module-paranamer-2.10.0.jar;C:\Users\zxr17\.m2\repository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\Users\zxr17\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;C:\Users\zxr17\.m2\repository\net\razorvine\pyrolite\4.30\pyrolite-4.30.jar;C:\Users\zxr17\.m2\repository\net\sf\py4j\py4j\0.10.9\py4j-0.10.9.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-crypto\1.1.0\commons-crypto-1.1.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-catalyst_2.12\3.0.3\spark-catalyst_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\modules\scala-parser-combinators_2.12\1.1.2\scala-parser-combinators_2.12-1.1.2.jar;C:\Users\zxr17\.m2\repository\org\codehaus\janino\janino\3.0.16\janino-3.0.16.jar;C:\Users\zxr17\.m2\repository\org\codehaus\janino\commons-compiler\3.0.16\commons-compiler-3.0.16.jar;C:\Users\zxr17\.m2\repository\org\antlr\antlr4-runtime\4.7.1\antlr4-runtime-4.7.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-vector\0.15.1\arrow-vector-0.15.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-format\0.15.1\arrow-format-0.15.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-memory\0.15.1\arrow-memory-0.15.1.jar;C:\Users\zxr17\.m2\repository\com\google\flatbuffers\flatbuffers-java\1.9.0\flatbuffers-java-1.9.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-tags_2.12\3.0.3\spark-tags_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-core\1.5.10\orc-core-1.5.10.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-shims\1.5.10\orc-shims-1.5.10.jar;C:\Users\zxr17\.m2\repository\io\airlift\aircompressor\0.10\aircompressor-0.10.jar;C:\Users\zxr17\.m2\repository\org\threeten\threeten-extra\1.5.0\threeten-extra-1.5.0.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-mapreduce\1.5.10\orc-mapreduce-1.5.10.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-storage-api\2.7.1\hive-storage-api-2.7.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-column\1.10.1\parquet-column-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-common\1.10.1\parquet-common-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-encoding\1.10.1\parquet-encoding-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-hadoop\1.10.1\parquet-hadoop-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-format\2.4.0\parquet-format-2.4.0.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-jackson\1.10.1\parquet-jackson-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.10.0\jackson-databind-2.10.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.10.0\jackson-core-2.10.0.jar;C:\Users\zxr17\.m2\repository\org\apache\xbean\xbean-asm7-shaded\4.15\xbean-asm7-shaded-4.15.jar;C:\Users\zxr17\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-hive_2.12\3.0.3\spark-hive_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-common\2.3.7\hive-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\jline\jline\2.12\jline-2.12.jar;C:\Users\zxr17\.m2\repository\com\tdunning\json\1.8\json-1.8.jar;C:\Users\zxr17\.m2\repository\com\github\joshelser\dropwizard-metrics-hadoop-metrics2-reporter\0.1.2\dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-exec\2.3.7\hive-exec-2.3.7-core.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-vector-code-gen\2.3.7\hive-vector-code-gen-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\velocity\velocity\1.5\velocity-1.5.jar;C:\Users\zxr17\.m2\repository\org\antlr\antlr-runtime\3.5.2\antlr-runtime-3.5.2.jar;C:\Users\zxr17\.m2\repository\org\antlr\ST4\4.0.4\ST4-4.0.4.jar;C:\Users\zxr17\.m2\repository\stax\stax-api\1.0.1\stax-api-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-metastore\2.3.7\hive-metastore-2.3.7.jar;C:\Users\zxr17\.m2\repository\javolution\javolution\5.5.1\javolution-5.5.1.jar;C:\Users\zxr17\.m2\repository\com\jolbox\bonecp\0.8.0.RELEASE\bonecp-0.8.0.RELEASE.jar;C:\Users\zxr17\.m2\repository\com\zaxxer\HikariCP\2.5.1\HikariCP-2.5.1.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-api-jdo\4.2.4\datanucleus-api-jdo-4.2.4.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-rdbms\4.1.19\datanucleus-rdbms-4.1.19.jar;C:\Users\zxr17\.m2\repository\commons-pool\commons-pool\1.5.4\commons-pool-1.5.4.jar;C:\Users\zxr17\.m2\repository\commons-dbcp\commons-dbcp\1.4\commons-dbcp-1.4.jar;C:\Users\zxr17\.m2\repository\javax\jdo\jdo-api\3.0.1\jdo-api-3.0.1.jar;C:\Users\zxr17\.m2\repository\javax\transaction\jta\1.1\jta-1.1.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\javax.jdo\3.2.0-m3\javax.jdo-3.2.0-m3.jar;C:\Users\zxr17\.m2\repository\javax\transaction\transaction-api\1.1\transaction-api-1.1.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-serde\2.3.7\hive-serde-2.3.7.jar;C:\Users\zxr17\.m2\repository\net\sf\opencsv\opencsv\2.3\opencsv-2.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-shims\2.3.7\hive-shims-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-common\2.3.7\hive-shims-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-0.23\2.3.7\hive-shims-0.23-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-scheduler\2.3.7\hive-shims-scheduler-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-llap-common\2.3.7\hive-llap-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-llap-client\2.3.7\hive-llap-client-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro\1.8.2\avro-1.8.2.jar;C:\Users\zxr17\.m2\repository\org\tukaani\xz\1.5\xz-1.5.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro-mapred\1.8.2\avro-mapred-1.8.2-hadoop2.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro-ipc\1.8.2\avro-ipc-1.8.2.jar;C:\Users\zxr17\.m2\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\zxr17\.m2\repository\joda-time\joda-time\2.10.5\joda-time-2.10.5.jar;C:\Users\zxr17\.m2\repository\org\jodd\jodd-core\3.5.2\jodd-core-3.5.2.jar;C:\Users\zxr17\.m2\repository\com\google\code\findbugs\jsr305\3.0.0\jsr305-3.0.0.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-core\4.1.17\datanucleus-core-4.1.17.jar;C:\Users\zxr17\.m2\repository\org\apache\thrift\libthrift\0.12.0\libthrift-0.12.0.jar;C:\Users\zxr17\.m2\repository\org\apache\thrift\libfb303\0.9.3\libfb303-0.9.3.jar;C:\Users\zxr17\.m2\repository\org\apache\derby\derby\10.12.1.1\derby-10.12.1.1.jar;C:\Users\zxr17\.m2\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar" bigdata2402.SparkDemo Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/14 15:36:52 INFO SparkContext: Running Spark version 3.0.3 25/10/14 15:36:52 WARN Shell: Did not find winutils.exe: {} java.io.FileNotFoundException: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. -see https://wiki.apache.org/hadoop/WindowsProblems at org.apache.hadoop.util.Shell.fileNotFoundException(Shell.java:549) at org.apache.hadoop.util.Shell.getHadoopHomeDir(Shell.java:570) at org.apache.hadoop.util.Shell.getQualifiedBin(Shell.java:593) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:690) at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:78) at org.apache.hadoop.conf.Configuration.getBoolean(Configuration.java:1665) at org.apache.hadoop.security.SecurityUtil.setConfigurationInternal(SecurityUtil.java:102) at org.apache.hadoop.security.SecurityUtil.<clinit>(SecurityUtil.java:86) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:315) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:303) at org.apache.hadoop.security.UserGroupInformation.doSubjectLogin(UserGroupInformation.java:1827) at org.apache.hadoop.security.UserGroupInformation.createLoginUser(UserGroupInformation.java:709) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:659) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:570) at org.apache.spark.util.Utils$.$anonfun$getCurrentUserName$1(Utils.scala:2414) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.util.Utils$.getCurrentUserName(Utils.scala:2414) at org.apache.spark.SparkContext.<init>(SparkContext.scala:308) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2589) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:937) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:931) at bigdata2402.SparkDemo$.main(SparkDemo.scala:27) at bigdata2402.SparkDemo.main(SparkDemo.scala) Caused by: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:469) at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:440) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:517) ... 20 more 25/10/14 15:36:52 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/14 15:36:52 INFO ResourceUtils: ============================================================== 25/10/14 15:36:52 INFO ResourceUtils: Resources for spark.driver: 25/10/14 15:36:52 INFO ResourceUtils: ============================================================== 25/10/14 15:36:52 INFO SparkContext: Submitted application: ducsv 25/10/14 15:36:52 INFO SecurityManager: Changing view acls to: zxr17 25/10/14 15:36:52 INFO SecurityManager: Changing modify acls to: zxr17 25/10/14 15:36:52 INFO SecurityManager: Changing view acls groups to: 25/10/14 15:36:52 INFO SecurityManager: Changing modify acls groups to: 25/10/14 15:36:52 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(zxr17); groups with view permissions: Set(); users with modify permissions: Set(zxr17); groups with modify permissions: Set() 25/10/14 15:36:53 INFO Utils: Successfully started service 'sparkDriver' on port 52248. 25/10/14 15:36:53 INFO SparkEnv: Registering MapOutputTracker 25/10/14 15:36:53 INFO SparkEnv: Registering BlockManagerMaster 25/10/14 15:36:53 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/14 15:36:53 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up Exception in thread "main" java.lang.NoSuchFieldError: JAVA_9 at org.apache.spark.storage.StorageUtils$.<init>(StorageUtils.scala:207) at org.apache.spark.storage.StorageUtils$.<clinit>(StorageUtils.scala) at org.apache.spark.storage.BlockManagerMasterEndpoint.<init>(BlockManagerMasterEndpoint.scala:93) at org.apache.spark.SparkEnv$.$anonfun$create$9(SparkEnv.scala:370) at org.apache.spark.SparkEnv$.registerOrLookupEndpoint$1(SparkEnv.scala:311) at org.apache.spark.SparkEnv$.create(SparkEnv.scala:359) at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:272) at org.apache.spark.SparkContext.<init>(SparkContext.scala:448) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2589) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:937) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:931) at bigdata2402.SparkDemo$.main(SparkDemo.scala:27) at bigdata2402.SparkDemo.main(SparkDemo.scala) 进程已结束,退出代码1
最新发布
10-15
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=35029:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:19:59 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:19:59 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:19:59 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:20:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:20:00 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:20:00 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:20:00 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:20:00 INFO SecurityManager: Changing view acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:20:00 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:20:00 INFO Utils: Successfully started service 'sparkDriver' on port 39167. 25/10/09 15:20:00 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:20:00 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-7824d7b3-541f-4ec6-88e6-298ad263b86a 25/10/09 15:20:00 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:20:00 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:20:01 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:20:01 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:20:01 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:20:01 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33353. 25/10/09 15:20:01 INFO NettyBlockTransferService: Server created on 192.168.75.3:33353 25/10/09 15:20:01 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:20:01 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:33353 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 预测结果已成功写入 MySQL 表 ml_result! Hive 中 dwd.fact_machine_learning_data_test 预测完毕。 请在 MySQL 中执行以下查询语句查看结果: SELECT * FROM ml_result WHERE machine_record_id IN (1,8,20,28,36); +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 代码显示空值
10-10
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=46819:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:31:36 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:31:36 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:31:36 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:31:36 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:31:36 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:31:36 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:31:36 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:31:36 INFO SecurityManager: Changing view acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:31:36 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:31:37 INFO Utils: Successfully started service 'sparkDriver' on port 33909. 25/10/09 15:31:37 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:31:37 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-fc5a25f6-f8d7-41e2-8c52-61cc65b7fc90 25/10/09 15:31:37 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:31:37 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:31:37 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:31:37 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:31:37 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:31:37 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41227. 25/10/09 15:31:37 INFO NettyBlockTransferService: Server created on 192.168.75.3:41227 25/10/09 15:31:37 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:31:37 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:41227 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ |1.4747628E7 |0.0 | |1.4747629E7 |0.0 | |1.474763E7 |0.0 | |1.4747631E7 |0.0 | |1.4747632E7 |0.0 | +-----------------+--------------------+ only showing top 5 rows c
10-10
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值