hadoop1.1.2 core-default.xml

本文详细介绍了Hadoop核心配置文件中的各项参数设置,包括全局属性、日志记录、I/O操作、文件系统、IPC通信及Web界面等配置项,并解释了它们的作用与应用场景。
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>


<!-- Do not modify this file directly.  Instead, copy entries that you -->
<!-- wish to modify from this file into core-site.xml and change them -->
<!-- there.  If core-site.xml does not already exist, create it.      -->


<configuration>


<!--- global properties -->


<property>
  <name>hadoop.tmp.dir</name>
  <value>/tmp/hadoop-${user.name}</value>
  <description>A base for other temporary directories.</description>
</property>


<property>
  <name>hadoop.native.lib</name>
  <value>true</value>
  <description>Should native hadoop libraries, if present, be used.</description>
</property>


<property>
  <name>hadoop.http.filter.initializers</name>
  <value></value>
  <description>A comma separated list of class names. Each class in the list 
  must extend org.apache.hadoop.http.FilterInitializer. The corresponding 
  Filter will be initialized. Then, the Filter will be applied to all user 
  facing jsp and servlet web pages.  The ordering of the list defines the 
  ordering of the filters.</description>
</property>


 <property>
  <name>hadoop.security.group.mapping</name>
  <value>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</value>
  <description>Class for user to group mapping (get groups for a given user)
  </description>
</property>


<property>
  <name>hadoop.security.authorization</name>
  <value>false</value>
  <description>Is service-level authorization enabled?</description>
</property>


<property>
  <name>hadoop.security.authentication</name>
  <value>simple</value>
  <description>Possible values are simple (no authentication), and kerberos
  </description>
</property>


<property>
  <name>hadoop.security.token.service.use_ip</name>
  <value>true</value>
  <description>Controls whether tokens always use IP addresses.  DNS changes
  will not be detected if this option is enabled.  Existing client connections
  that break will always reconnect to the IP of the original host.  New clients
  will connect to the host's new IP but fail to locate a token.  Disabling
  this option will allow existing and new clients to detect an IP change and
  continue to locate the new host's token.
  </description>
</property>


<property>
  <name>hadoop.security.use-weak-http-crypto</name>
  <value>false</value>
  <description>If enabled, use KSSL to authenticate HTTP connections to the
  NameNode. Due to a bug in JDK6, using KSSL requires one to configure
  Kerberos tickets to use encryption types that are known to be
  cryptographically weak. If disabled, SPNEGO will be used for HTTP
  authentication, which supports stronger encryption types.
  </description>
</property>


<!--
<property>
  <name>hadoop.security.service.user.name.key</name>
  <value></value>
  <description>Name of the kerberos principal of the user that owns
  a given service daemon
  </description>
</property>
-->


<!--- logging properties -->


<property>
  <name>hadoop.logfile.size</name>
  <value>10000000</value>
  <description>The max size of each log file</description>
</property>


<property>
  <name>hadoop.logfile.count</name>
  <value>10</value>
  <description>The max number of log files</description>
</property>


<!-- i/o properties -->
<property>
  <name>io.file.buffer.size</name>
  <value>4096</value>
  <description>The size of buffer for use in sequence files.
  The size of this buffer should probably be a multiple of hardware
  page size (4096 on Intel x86), and it determines how much data is
  buffered during read and write operations.</description>
</property>
  
<property>
  <name>io.bytes.per.checksum</name>
  <value>512</value>
  <description>The number of bytes per checksum.  Must not be larger than
  io.file.buffer.size.</description>
</property>


<property>
  <name>io.skip.checksum.errors</name>
  <value>false</value>
  <description>If true, when a checksum error is encountered while
  reading a sequence file, entries are skipped, instead of throwing an
  exception.</description>
</property>


<property>
  <name>io.compression.codecs</name>
  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec</value>
  <description>A list of the compression codec classes that can be used 
               for compression/decompression.</description>
</property>


<property>
  <name>io.serializations</name>
  <value>org.apache.hadoop.io.serializer.WritableSerialization</value>
  <description>A list of serialization classes that can be used for
  obtaining serializers and deserializers.</description>
</property>


<!-- file system properties -->


<property>
  <name>fs.default.name</name>
  <value>file:///</value>
  <description>The name of the default file system.  A URI whose
  scheme and authority determine the FileSystem implementation.  The
  uri's scheme determines the config property (fs.SCHEME.impl) naming
  the FileSystem implementation class.  The uri's authority is used to
  determine the host, port, etc. for a filesystem.</description>
</property>


<property>
  <name>fs.trash.interval</name>
  <value>0</value>
  <description>Number of minutes between trash checkpoints.
  If zero, the trash feature is disabled.
  </description>
</property>


<property>
  <name>fs.file.impl</name>
  <value>org.apache.hadoop.fs.LocalFileSystem</value>
  <description>The FileSystem for file: uris.</description>
</property>


<property>
  <name>fs.hdfs.impl</name>
  <value>org.apache.hadoop.hdfs.DistributedFileSystem</value>
  <description>The FileSystem for hdfs: uris.</description>
</property>


<property>
  <name>fs.s3.impl</name>
  <value>org.apache.hadoop.fs.s3.S3FileSystem</value>
  <description>The FileSystem for s3: uris.</description>
</property>


<property>
  <name>fs.s3n.impl</name>
  <value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value>
  <description>The FileSystem for s3n: (Native S3) uris.</description>
</property>


<property>
  <name>fs.kfs.impl</name>
  <value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value>
  <description>The FileSystem for kfs: uris.</description>
</property>


<property>
  <name>fs.hftp.impl</name>
  <value>org.apache.hadoop.hdfs.HftpFileSystem</value>
</property>


<property>
  <name>fs.hsftp.impl</name>
  <value>org.apache.hadoop.hdfs.HsftpFileSystem</value>
</property>


<property>
  <name>fs.webhdfs.impl</name>
  <value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
</property>


<property>
  <name>fs.ftp.impl</name>
  <value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>
  <description>The FileSystem for ftp: uris.</description>
</property>


<property>
  <name>fs.ramfs.impl</name>
  <value>org.apache.hadoop.fs.InMemoryFileSystem</value>
  <description>The FileSystem for ramfs: uris.</description>
</property>


<property>
  <name>fs.har.impl</name>
  <value>org.apache.hadoop.fs.HarFileSystem</value>
  <description>The filesystem for Hadoop archives. </description>
</property>


<property>
  <name>fs.har.impl.disable.cache</name>
  <value>true</value>
  <description>Don't cache 'har' filesystem instances.</description>
</property>


<property>
  <name>fs.checkpoint.dir</name>
  <value>${hadoop.tmp.dir}/dfs/namesecondary</value>
  <description>Determines where on the local filesystem the DFS secondary
      name node should store the temporary images to merge.
      If this is a comma-delimited list of directories then the image is
      replicated in all of the directories for redundancy.
  </description>
</property>


<property>
  <name>fs.checkpoint.edits.dir</name>
  <value>${fs.checkpoint.dir}</value>
  <description>Determines where on the local filesystem the DFS secondary
      name node should store the temporary edits to merge.
      If this is a comma-delimited list of directoires then teh edits is
      replicated in all of the directoires for redundancy.
      Default value is same as fs.checkpoint.dir
  </description>
</property>


<property>
  <name>fs.checkpoint.period</name>
  <value>3600</value>
  <description>The number of seconds between two periodic checkpoints.
  </description>
</property>


<property>
  <name>fs.checkpoint.size</name>
  <value>67108864</value>
  <description>The size of the current edit log (in bytes) that triggers
       a periodic checkpoint even if the fs.checkpoint.period hasn't expired.
  </description>
</property>






<property>
  <name>fs.s3.block.size</name>
  <value>67108864</value>
  <description>Block size to use when writing files to S3.</description>
</property>


<property>
  <name>fs.s3.buffer.dir</name>
  <value>${hadoop.tmp.dir}/s3</value>
  <description>Determines where on the local filesystem the S3 filesystem
  should store files before sending them to S3
  (or after retrieving them from S3).
  </description>
</property>


<property>
  <name>fs.s3.maxRetries</name>
  <value>4</value>
  <description>The maximum number of retries for reading or writing files to S3, 
  before we signal failure to the application.
  </description>
</property>


<property>
  <name>fs.s3.sleepTimeSeconds</name>
  <value>10</value>
  <description>The number of seconds to sleep between each S3 retry.
  </description>
</property>




<property>
  <name>local.cache.size</name>
  <value>10737418240</value>
  <description>The limit on the size of cache you want to keep, set by default
  to 10GB. This will act as a soft limit on the cache directory for out of band data.
  </description>
</property>
            
<property>
  <name>io.seqfile.compress.blocksize</name>
  <value>1000000</value>
  <description>The minimum block size for compression in block compressed 
          SequenceFiles.
  </description>
</property>


<property>
  <name>io.seqfile.lazydecompress</name>
  <value>true</value>
  <description>Should values of block-compressed SequenceFiles be decompressed
          only when necessary.
  </description>
</property>


<property>
  <name>io.seqfile.sorter.recordlimit</name>
  <value>1000000</value>
  <description>The limit on number of records to be kept in memory in a spill 
          in SequenceFiles.Sorter
  </description>
</property>


 <property>
  <name>io.mapfile.bloom.size</name>
  <value>1048576</value>
  <description>The size of BloomFilter-s used in BloomMapFile. Each time this many
  keys is appended the next BloomFilter will be created (inside a DynamicBloomFilter).
  Larger values minimize the number of filters, which slightly increases the performance,
  but may waste too much space if the total number of keys is usually much smaller
  than this number.
  </description>
</property>


<property>
  <name>io.mapfile.bloom.error.rate</name>
  <value>0.005</value>
  <description>The rate of false positives in BloomFilter-s used in BloomMapFile.
  As this value decreases, the size of BloomFilter-s increases exponentially. This
  value is the probability of encountering false positives (default is 0.5%).
  </description>
</property>


<property>
  <name>hadoop.util.hash.type</name>
  <value>murmur</value>
  <description>The default implementation of Hash. Currently this can take one of the
  two values: 'murmur' to select MurmurHash and 'jenkins' to select JenkinsHash.
  </description>
</property>




<!-- ipc properties -->


<property>
  <name>ipc.client.idlethreshold</name>
  <value>4000</value>
  <description>Defines the threshold number of connections after which
               connections will be inspected for idleness.
  </description>
</property>


<property>
  <name>ipc.client.kill.max</name>
  <value>10</value>
  <description>Defines the maximum number of clients to disconnect in one go.
  </description>
</property>


<property>
  <name>ipc.client.connection.maxidletime</name>
  <value>10000</value>
  <description>The maximum time in msec after which a client will bring down the
               connection to the server.
  </description>
</property>


<property>
  <name>ipc.client.connect.max.retries</name>
  <value>10</value>
  <description>Indicates the number of retries a client will make to establish
               a server connection.
  </description>
</property>


<property>
  <name>ipc.server.listen.queue.size</name>
  <value>128</value>
  <description>Indicates the length of the listen queue for servers accepting
               client connections.
  </description>
</property>


<property>
  <name>ipc.server.tcpnodelay</name>
  <value>false</value>
  <description>Turn on/off Nagle's algorithm for the TCP socket connection on 
  the server. Setting to true disables the algorithm and may decrease latency
  with a cost of more/smaller packets. 
  </description>
</property>


<property>
  <name>ipc.client.tcpnodelay</name>
  <value>false</value>
  <description>Turn on/off Nagle's algorithm for the TCP socket connection on 
  the client. Setting to true disables the algorithm and may decrease latency
  with a cost of more/smaller packets. 
  </description>
</property>




<!-- Web Interface Configuration -->


<property>
  <name>webinterface.private.actions</name>
  <value>false</value>
  <description> If set to true, the web interfaces of JT and NN may contain 
                actions, such as kill job, delete file, etc., that should 
                not be exposed to public. Enable this option if the interfaces 
                are only reachable by those who have the right authorization.
  </description>
</property>


<!-- Proxy Configuration -->


<property>
  <name>hadoop.rpc.socket.factory.class.default</name>
  <value>org.apache.hadoop.net.StandardSocketFactory</value>
  <description> Default SocketFactory to use. This parameter is expected to be
    formatted as "package.FactoryClassName".
  </description>
</property>


<property>
  <name>hadoop.rpc.socket.factory.class.ClientProtocol</name>
  <value></value>
  <description> SocketFactory to use to connect to a DFS. If null or empty, use
    hadoop.rpc.socket.class.default. This socket factory is also used by
    DFSClient to create sockets to DataNodes.
  </description>
</property>






<property>
  <name>hadoop.socks.server</name>
  <value></value>
  <description> Address (host:port) of the SOCKS server to be used by the
    SocksSocketFactory.
  </description>
</property>


<!-- Rack Configuration -->


<property>
  <name>topology.node.switch.mapping.impl</name>
  <value>org.apache.hadoop.net.ScriptBasedMapping</value>
  <description> The default implementation of the DNSToSwitchMapping. It
    invokes a script specified in topology.script.file.name to resolve
    node names. If the value for topology.script.file.name is not set, the
    default value of DEFAULT_RACK is returned for all node names.
  </description>
</property>


<property>
  <name>topology.script.file.name</name>
  <value></value>
  <description> The script name that should be invoked to resolve DNS names to
    NetworkTopology names. Example: the script would take host.foo.bar as an
    argument, and return /rack1 as the output.
  </description>
</property>


<property>
  <name>topology.script.number.args</name>
  <value>100</value>
  <description> The max number of args that the script configured with 
    topology.script.file.name should be run with. Each arg is an
    IP address.
  </description>
</property>


<property>
  <name>hadoop.security.uid.cache.secs</name>
  <value>14400</value>
  <description> NativeIO maintains a cache from UID to UserName. This is
  the timeout for an entry in that cache. </description>
</property>


<!-- HTTP web-consoles Authentication -->


<property>
  <name>hadoop.http.authentication.type</name>
  <value>simple</value>
  <description>
    Defines authentication used for Oozie HTTP endpoint.
    Supported values are: simple | kerberos | #AUTHENTICATION_HANDLER_CLASSNAME#
  </description>
</property>


<property>
  <name>hadoop.http.authentication.token.validity</name>
  <value>36000</value>
  <description>
    Indicates how long (in seconds) an authentication token is valid before it has
    to be renewed.
  </description>
</property>


<property>
  <name>hadoop.http.authentication.signature.secret.file</name>
  <value>${user.home}/hadoop-http-auth-signature-secret</value>
  <description>
    The signature secret for signing the authentication tokens.
    If not set a random secret is generated at startup time.
    The same secret should be used for JT/NN/DN/TT configurations.
  </description>
</property>


<property>
  <name>hadoop.http.authentication.cookie.domain</name>
  <value></value>
  <description>
    The domain to use for the HTTP cookie that stores the authentication token.
    In order to authentiation to work correctly across all Hadoop nodes web-consoles
    the domain must be correctly set.
    IMPORTANT: when using IP addresses, browsers ignore cookies with domain settings.
    For this setting to work properly all nodes in the cluster must be configured
    to generate URLs with hostname.domain names on it.
  </description>
</property>


<property>
  <name>hadoop.http.authentication.simple.anonymous.allowed</name>
  <value>true</value>
  <description>
    Indicates if anonymous requests are allowed when using 'simple' authentication.
  </description>
</property>


<property>
  <name>hadoop.http.authentication.kerberos.principal</name>
  <value>HTTP/localhost@LOCALHOST</value>
  <description>
    Indicates the Kerberos principal to be used for HTTP endpoint.
    The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification.
  </description>
</property>


<property>
  <name>hadoop.http.authentication.kerberos.keytab</name>
  <value>${user.home}/hadoop.keytab</value>
  <description>
    Location of the keytab file with the credentials for the principal.
    Referring to the same keytab file Oozie uses for its Kerberos credentials for Hadoop.
  </description>
</property>


<property>
  <name>hadoop.relaxed.worker.version.check</name>
  <value>false</value>
  <description>
    By default datanodes refuse to connect to namenodes if their build
    revision (svn revision) do not match, and tasktrackers refuse to
    connect to jobtrackers if their build version (version, revision,
    user, and source checksum) do not match. This option changes the
    behavior of hadoop workers to only check for a version match (eg
    "1.0.2") but ignore the other build fields (revision, user, and
    source checksum).
  </description>
</property>


</configuration>


/home/hadoopmaster/jdk1.8.0_161/bin/java -javaagent:/home/hadoopmaster/idea-IC-221.6008.13/lib/idea_rt.jar=34515:/home/hadoopmaster/idea-IC-221.6008.13/bin -Dfile.encoding=UTF-8 -classpath /home/hadoopmaster/jdk1.8.0_161/jre/lib/charsets.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/deploy.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/cldrdata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/dnsns.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jaccess.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/jfxrt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/localedata.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/nashorn.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunec.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunjce_provider.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/sunpkcs11.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/ext/zipfs.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/javaws.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jce.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfr.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jfxswt.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/jsse.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/management-agent.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/plugin.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/resources.jar:/home/hadoopmaster/jdk1.8.0_161/jre/lib/rt.jar:/root/IdeaProjects/kkk/out/production/kkk:/home/hadoopmaster/scala-2.12.15/lib/scala-reflect.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-xml_2.12-1.0.6.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-parser-combinators_2.12-1.0.7.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-swing_2.12-2.0.3.jar:/home/hadoopmaster/scala-2.12.15/lib/scala-library.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xz-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jta-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jpam-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ST4-4.0.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/ivy-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/oro-2.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/blas-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/core-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/gson-2.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/tink-1.6.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsp-api-2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okio-1.14.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/opencsv-2.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shims-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xmlenc-0.52.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guava-14.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jline-2.14.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jsr305-3.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lapack-2.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/log4j-1.2.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/minlog-1.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stream-2.9.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/velocity-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/generex-1.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-api-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/janino-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jdo-api-3.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/objenesis-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/paranamer-2.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/py4j-0.10.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/pyrolite-4.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/HikariCP-2.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-io-2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-cli-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.inject-1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libfb303-0.9.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/lz4-java-1.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/okhttp-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snakeyaml-1.27.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/stax-api-1.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JTransforms-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-ipc-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-cli-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-net-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/derby-10.14.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-jdbc-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-utils-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpcore-4.4.14.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-api-2.2.11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-hk2-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jodd-core-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-core-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/super-csv-2.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xml-apis-1.4.01.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/JLargeArrays-1.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/activation-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/automaton-1.11-8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-dbcp-1.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang-2.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-text-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-serde-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javolution-5.5.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/libthrift-0.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-shims-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-api-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zjsonpatch-0.3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zstd-jni-1.5.0-4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill-java-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/chill_2.12-0.10.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/guice-servlet-3.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-auth-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-hdfs-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hk2-locator-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/httpclient-4.5.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-xc-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-util-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/joda-time-2.10.10.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kryo-shaded-4.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jmx-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-jvm-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/rocksdbjni-6.20.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xercesImpl-2.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aircompressor-0.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/algebra_2.12-2.0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/annotations-17.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr4-runtime-4.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-util-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-format-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-vector-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/avro-mapred-1.10.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-codec-1.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-pool-1.5.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/compress-lzf-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-beeline-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javax.jdo-3.2.0-m3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jaxb-runtime-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-client-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-common-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-server-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/leveldbjni-all-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-core-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-json-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/RoaringBitmap-0.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/antlr-runtime-3.5.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-math3-3.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/javassist-3.25.0-GA.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jul-to-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/protobuf-java-2.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/snappy-java-1.1.8.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/transaction-api-1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/bonecp-0.8.0.RELEASE.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-crypto-1.1.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-digester-1.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-lang3-3.12.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-client-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-exec-2.3.9-core.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-metastore-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-jaxrs-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.inject-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/orc-mapreduce-1.6.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-xml_2.12-1.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/shapeless_2.12-2.3.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/slf4j-log4j12-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sql_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/threeten-extra-1.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/zookeeper-jute-3.6.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compress-1.21.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-logging-1.1.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-recipes-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-api-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-0.23-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jcl-over-slf4j-1.7.30.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-column-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-common-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-hadoop-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-library-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-reflect-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-core_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-repl_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-yarn_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/api-asn1-api-1.0.0-M20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/breeze-macros_2.12-1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/cats-kernel_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-httpclient-3.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/flatbuffers-java-1.9.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-llap-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-service-rpc-3.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-storage-api-2.7.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jetty-sslengine-6.1.26.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/metrics-graphite-4.2.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/netty-all-4.1.68.Final.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-jackson-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-compiler-2.12.15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mesos_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-util_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/xbean-asm9-shaded-4.20.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-i18n-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arpack_combined_all-0.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-core-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-beanutils-1.9.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-compiler-3.0.16.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/curator-framework-2.7.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-core-4.1.17.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-common-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-core-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-databind-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.ws.rs-api-2.1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-client-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/macro-compat_2.12-1.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-encoding-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-graphx_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-sketch_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-unsafe_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/univocity-parsers-2.9.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/arrow-memory-netty-2.0.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-rdbms-4.1.19.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-annotations-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-client-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kvstore_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-macros_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-collections-3.2.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/commons-configuration-1.6.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/datanucleus-api-jdo-4.2.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-mapper-asl-1.9.13.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.servlet-api-4.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-ast_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-catalyst_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-launcher_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/audience-annotations-0.5.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-shims-scheduler-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hive-vector-code-gen-2.3.9.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-annotations-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.xml.bind-api-2.3.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-core_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-streaming_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spire-platform_2.12-0.17.0.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apps-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-core-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-node-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-rbac-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/logging-interceptor-3.12.12.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/mesos-1.4.0-shaded-protobuf.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/osgi-resource-locator-1.0.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-kubernetes_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-tags_2.12-3.2.1-tests.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/aopalliance-repackaged-2.6.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/htrace-core-3.1.0-incubating.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/istack-commons-runtime-3.0.8.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.annotation-api-1.3.5.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jakarta.validation-api-2.0.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-scalap_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-batch-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-mllib-local_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/json4s-jackson_2.12-3.7.0-M11.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-common-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-events-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-policy-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-dataformat-yaml-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-datatype-jsr310-2.11.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-metrics-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-common_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jackson-module-scala_2.12-2.12.3.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-discovery-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/parquet-format-structures-1.12.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-network-shuffle_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/apacheds-kerberos-codec-2.0.0-M15.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-app-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-extensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-networking-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-scheduling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-core-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-yarn-server-web-proxy-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/jersey-container-servlet-core-2.34.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-autoscaling-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-flowcontrol-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-collection-compat_2.12-2.1.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/spark-hive-thriftserver_2.12-3.2.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-certificates-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-coordination-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-storageclass-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/scala-parser-combinators_2.12-1.1.2.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-common-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-apiextensions-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-shuffle-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/hadoop-mapreduce-client-jobclient-2.7.4.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/kubernetes-model-admissionregistration-5.4.1.jar:/home/hadoopmaster/spark-3.2.1-bin-hadoop2.7/jars/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar kkk.WordCount Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/02 20:17:03 INFO SparkContext: Running Spark version 3.2.1 25/06/02 20:17:04 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/02 20:17:04 INFO ResourceUtils: ============================================================== 25/06/02 20:17:04 INFO SparkContext: Submitted application: WordCount 25/06/02 20:17:04 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/02 20:17:04 INFO ResourceProfile: Limiting resource is cpu 25/06/02 20:17:04 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/02 20:17:04 INFO SecurityManager: Changing view acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls to: root 25/06/02 20:17:04 INFO SecurityManager: Changing view acls groups to: 25/06/02 20:17:04 INFO SecurityManager: Changing modify acls groups to: 25/06/02 20:17:04 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/06/02 20:17:05 INFO Utils: Successfully started service 'sparkDriver' on port 41615. 25/06/02 20:17:05 INFO SparkEnv: Registering MapOutputTracker 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMaster 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/02 20:17:05 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/02 20:17:05 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/02 20:17:05 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-68486311-3f69-48e8-8f69-e7afffcf5979 25/06/02 20:17:05 INFO MemoryStore: MemoryStore started with capacity 258.5 MiB 25/06/02 20:17:05 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/02 20:17:06 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/06/02 20:17:06 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://hadoopmaster:4040 25/06/02 20:17:06 INFO Executor: Starting executor ID driver on host hadoopmaster 25/06/02 20:17:06 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41907. 25/06/02 20:17:06 INFO NettyBlockTransferService: Server created on hadoopmaster:41907 25/06/02 20:17:06 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/02 20:17:06 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMasterEndpoint: Registering block manager hadoopmaster:41907 with 258.5 MiB RAM, BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:06 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, hadoopmaster, 41907, None) 25/06/02 20:17:08 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 244.0 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 23.4 KiB, free 258.2 MiB) 25/06/02 20:17:09 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on hadoopmaster:41907 (size: 23.4 KiB, free: 258.5 MiB) 25/06/02 20:17:09 INFO SparkContext: Created broadcast 0 from textFile at WordCount.scala:9 Exception in thread "main" org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/hadoopmaster/words.txt at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:287) at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:229) at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:315) at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.rdd.RDD.partitions(RDD.scala:296) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4(Partitioner.scala:78) at org.apache.spark.Partitioner$.$anonfun$defaultPartitioner$4$adapted(Partitioner.scala:78) at scala.collection.immutable.List.map(List.scala:293) at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:78) at org.apache.spark.rdd.PairRDDFunctions.$anonfun$reduceByKey$4(PairRDDFunctions.scala:322) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:414) at org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:322) at kkk.WordCount$.main(WordCount.scala:10) at kkk.WordCount.main(WordCount.scala) 25/06/02 20:17:09 INFO SparkContext: Invoking stop() from shutdown hook 25/06/02 20:17:09 INFO SparkUI: Stopped Spark web UI at http://hadoopmaster:4040 25/06/02 20:17:09 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 25/06/02 20:17:09 INFO MemoryStore: MemoryStore cleared 25/06/02 20:17:09 INFO BlockManager: BlockManager stopped 25/06/02 20:17:09 INFO BlockManagerMaster: BlockManagerMaster stopped 25/06/02 20:17:09 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 25/06/02 20:17:10 INFO SparkContext: Successfully stopped SparkContext 25/06/02 20:17:10 INFO ShutdownHookManager: Shutdown hook called 25/06/02 20:17:10 INFO ShutdownHookManager: Deleting directory /tmp/spark-213e3a91-227c-4e0a-8254-ab5c0f00786d Process finished with exit code 1
06-04
"C:\Program Files\Java\jdk1.8.0_281\bin\java.exe" "-javaagent:D:\新建文件夹 (2)\IDEA\idea\IntelliJ IDEA 2019.3.3\lib\idea_rt.jar=59342" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_281\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_281\jre\lib\rt.jar;D:\carspark\out\production\carspark;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\jars\scala-library-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-reflect\jars\scala-reflect-2.12.10.jar;C:\Users\wyatt\.ivy2\cache\org.scala-lang\scala-library\srcs\scala-library-2.12.10-sources.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\accessors-smart-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\activation-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aircompressor-0.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\algebra_2.12-2.0.0-M2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr-runtime-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\antlr4-runtime-4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\aopalliance-repackaged-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arpack_combined_all-0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-format-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-core-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-memory-netty-2.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\audience-annotations-0.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\automaton-1.11-8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-ipc-1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\avro-mapred-1.8.2-hadoop2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\bonecp-0.8.0.RELEASE.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze-macros_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\breeze_2.12-1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\cats-kernel_2.12-2.0.0-M4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill-java-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\chill_2.12-0.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-beanutils-1.9.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-cli-1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-codec-1.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-collections-3.2.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compiler-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-compress-1.20.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-configuration2-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-crypto-1.1.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-daemon-1.0.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-dbcp-1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-httpclient-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-io-2.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-lang3-3.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-logging-1.1.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-math3-3.4.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-net-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-pool-1.5.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\commons-text-1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\compress-lzf-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\core-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-client-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-framework-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\curator-recipes-2.13.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-api-jdo-4.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-core-4.1.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\datanucleus-rdbms-4.1.19.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\derby-10.12.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dnsjava-2.1.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ehcache-3.3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\flatbuffers-java-1.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\generex-1.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\geronimo-jcache_1.0_spec-1.0-alpha-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\gson-2.2.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guava-14.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\guice-servlet-4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-annotations-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-auth-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-hdfs-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-core-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-mapreduce-client-jobclient-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-api-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-client-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-registry-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-common-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hadoop-yarn-server-web-proxy-3.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\HikariCP-2.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-beeline-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-cli-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-exec-2.3.7-core.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-jdbc-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-llap-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-metastore-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-serde-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-service-rpc-3.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-0.23-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-common-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-shims-scheduler-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-storage-api-2.7.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hive-vector-code-gen-2.3.7.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-api-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-locator-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\hk2-utils-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\htrace-core4-4.1.0-incubating.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpclient-4.5.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\httpcore-4.4.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\istack-commons-runtime-3.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ivy-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-core-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-databind-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-dataformat-yaml-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-datatype-jsr310-2.11.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-base-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-jaxrs-json-provider-2.9.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-mapper-asl-1.9.13.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-jaxb-annotations-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-paranamer-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jackson-module-scala_2.12-2.10.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.activation-api-1.2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.annotation-api-1.3.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.inject-2.6.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.servlet-api-4.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.validation-api-2.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.ws.rs-api-2.1.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jakarta.xml.bind-api-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\janino-3.0.16.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javassist-3.25.0-GA.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.inject-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javax.jdo-3.2.0-m3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\javolution-5.5.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-api-2.2.11.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jaxb-runtime-2.3.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcip-annotations-1.0-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jcl-over-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jdo-api-3.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-client-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-common-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-container-servlet-core-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-hk2-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-media-jaxb-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jersey-server-2.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JLargeArrays-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jline-2.14.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\joda-time-2.10.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jodd-core-3.5.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jpam-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json-smart-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-ast_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-core_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-jackson_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\json4s-scalap_2.12-3.7.0-M5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsp-api-2.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jsr305-3.0.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jta-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\JTransforms-3.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\jul-to-slf4j-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-admin-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-client-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-common-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-core-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-crypto-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-identity-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-server-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-simplekdc-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerb-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-asn1-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-config-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-pkix-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-util-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kerby-xdr-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kryo-shaded-4.0.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-client-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-admissionregistration-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apiextensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-apps-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-autoscaling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-batch-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-certificates-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-common-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-coordination-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-core-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-discovery-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-events-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-extensions-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-metrics-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-networking-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-policy-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-rbac-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-scheduling-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-settings-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\kubernetes-model-storageclass-4.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\leveldbjni-all-1.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libfb303-0.9.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\libthrift-0.12.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\log4j-1.2.17.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\logging-interceptor-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\lz4-java-1.7.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\machinist_2.12-0.6.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\macro-compat_2.12-1.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\mesos-1.4.0-shaded-protobuf.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-core-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-graphite-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jmx-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-json-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\metrics-jvm-4.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\minlog-1.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\netty-all-4.1.51.Final.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\nimbus-jose-jwt-4.41.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\objenesis-2.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-2.7.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okhttp-3.12.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\okio-1.14.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\opencsv-2.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-core-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-mapreduce-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\orc-shims-1.5.12.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\oro-2.0.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\osgi-resource-locator-1.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\paranamer-2.8.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-column-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-common-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-encoding-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-format-2.4.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-hadoop-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\parquet-jackson-1.10.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\protobuf-java-2.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\py4j-0.10.9.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\pyrolite-4.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\re2j-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\RoaringBitmap-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-collection-compat_2.12-2.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-compiler-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-library-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-parser-combinators_2.12-1.1.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-reflect-2.12.10.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\scala-xml_2.12-1.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shapeless_2.12-2.3.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\shims-0.9.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-api-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\slf4j-log4j12-1.7.30.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snakeyaml-1.24.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\snappy-java-1.1.8.2.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-catalyst_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-core_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-graphx_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive-thriftserver_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-hive_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kubernetes_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-kvstore_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-launcher_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mesos_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib-local_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-mllib_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-common_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-network-shuffle_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-repl_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sketch_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-sql_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-streaming_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1-tests.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-tags_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-unsafe_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spark-yarn_2.12-3.1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-macros_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-platform_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire-util_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\spire_2.12-0.17.0-M1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\ST4-4.0.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax-api-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stax2-api-3.1.4.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\stream-2.9.6.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\super-csv-2.2.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\threeten-extra-1.5.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\token-provider-1.0.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\transaction-api-1.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\univocity-parsers-2.9.1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\velocity-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\woodstox-core-5.0.3.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xbean-asm7-shaded-4.15.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\xz-1.5.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zjsonpatch-0.3.0.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zookeeper-3.4.14.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\zstd-jni-1.4.8-1.jar;D:\spark\spark-3.1.1-bin-hadoop3.2\jars\arrow-vector-2.0.0.jar" car.LoadModelRideHailing Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/06/08 17:05:07 INFO SparkContext: Running Spark version 3.1.1 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO ResourceUtils: No custom resources configured for spark.driver. 25/06/08 17:05:07 INFO ResourceUtils: ============================================================== 25/06/08 17:05:07 INFO SparkContext: Submitted application: LoadModelRideHailing 25/06/08 17:05:07 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/06/08 17:05:07 INFO ResourceProfile: Limiting resource is cpu 25/06/08 17:05:07 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/06/08 17:05:07 INFO SecurityManager: Changing view acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls to: wyatt 25/06/08 17:05:07 INFO SecurityManager: Changing view acls groups to: 25/06/08 17:05:07 INFO SecurityManager: Changing modify acls groups to: 25/06/08 17:05:07 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(wyatt); groups with view permissions: Set(); users with modify permissions: Set(wyatt); groups with modify permissions: Set() 25/06/08 17:05:07 INFO Utils: Successfully started service 'sparkDriver' on port 59361. 25/06/08 17:05:07 INFO SparkEnv: Registering MapOutputTracker 25/06/08 17:05:07 INFO SparkEnv: Registering BlockManagerMaster 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/06/08 17:05:08 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/06/08 17:05:08 INFO DiskBlockManager: Created local directory at C:\Users\wyatt\AppData\Local\Temp\blockmgr-8fe065e2-024c-4e2f-8662-45d2fe3de444 25/06/08 17:05:08 INFO MemoryStore: MemoryStore started with capacity 1899.0 MiB 25/06/08 17:05:08 INFO SparkEnv: Registering OutputCommitCoordinator 25/06/08 17:05:08 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/06/08 17:05:08 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://windows10.microdone.cn:4040 25/06/08 17:05:08 INFO Executor: Starting executor ID driver on host windows10.microdone.cn 25/06/08 17:05:08 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 59392. 25/06/08 17:05:08 INFO NettyBlockTransferService: Server created on windows10.microdone.cn:59392 25/06/08 17:05:08 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/06/08 17:05:08 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMasterEndpoint: Registering block manager windows10.microdone.cn:59392 with 1899.0 MiB RAM, BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, windows10.microdone.cn, 59392, None) 25/06/08 17:05:08 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, windows10.microdone.cn, 59392, None) Exception in thread "main" java.lang.IllegalArgumentException: 测试数据中不包含 features 列,请检查数据! at car.LoadModelRideHailing$.main(LoadModelRideHailing.scala:23) at car.LoadModelRideHailing.main(LoadModelRideHailing.scala) 进程已结束,退出代码为 1 package car import org.apache.spark.ml.classification.{LogisticRegressionModel, RandomForestClassificationModel} import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator import org.apache.spark.sql.{SparkSession, functions => F} object LoadModelRideHailing { def main(args: Array[String]): Unit = { val spark = SparkSession.builder() .master("local[3]") .appName("LoadModelRideHailing") .getOrCreate() spark.sparkContext.setLogLevel("Error") // 使用经过特征工程处理后的测试数据 val TestData = spark.read.option("header", "true").csv("C:\\Users\\wyatt\\Documents\\ride_hailing_test_data.csv") // 将 label 列转换为数值类型 val testDataWithNumericLabel = TestData.withColumn("label", F.col("label").cast("double")) // 检查 features 列是否存在 if (!testDataWithNumericLabel.columns.contains("features")) { throw new IllegalArgumentException("测试数据中不包含 features 列,请检查数据!") } // 修正后的模型路径(确保文件夹存在且包含元数据) val LogisticModel = LogisticRegressionModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_logistic_model") // 示例路径 val LogisticPre = LogisticModel.transform(testDataWithNumericLabel) val LogisticAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(LogisticPre) println("逻辑回归模型后期数据准确率:" + LogisticAcc) // 随机森林模型路径同步修正 val RandomForest = RandomForestClassificationModel.load("C:\\Users\\wyatt\\Documents\\ride_hailing_random_forest_model") // 示例路径 val RandomForestPre = RandomForest.transform(testDataWithNumericLabel) val RandomForestAcc = new MulticlassClassificationEvaluator() .setLabelCol("label") .setPredictionCol("prediction") .setMetricName("accuracy") .evaluate(RandomForestPre) println("随机森林模型后期数据准确率:" + RandomForestAcc) spark.stop() } }
06-09
"Z:\Program Files\Java\jdk1.8.0_181\bin\java.exe" "-javaagent:Z:\Java文件\lntelliJ IDEA\IDEA安装包\IntelliJ IDEA Community Edition 2022.2.1\lib\idea_rt.jar=52236:Z:\Java文件\lntelliJ IDEA\IDEA安装包\IntelliJ IDEA Community Edition 2022.2.1\bin" -Dfile.encoding=UTF-8 -classpath "Z:\Program Files\Java\jdk1.8.0_181\jre\lib\charsets.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\deploy.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\access-bridge-64.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\cldrdata.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\dnsns.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\jaccess.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\jfxrt.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\localedata.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\nashorn.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunec.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunjce_provider.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunmscapi.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\sunpkcs11.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\ext\zipfs.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\javaws.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jce.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jfr.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jfxswt.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\jsse.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\management-agent.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\plugin.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\resources.jar;Z:\Program Files\Java\jdk1.8.0_181\jre\lib\rt.jar;Z:\sparkdemo\sparkdemo2402\sparkdemo2402\target\classes;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-library.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-parser-combinators_2.12-1.0.7.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-reflect.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-swing_2.12-2.0.3.jar;D:\apach-maven-3.5.4\scala-2.12.15\lib\scala-xml_2.12-1.0.6.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-client\3.1.3\hadoop-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-common\3.1.3\hadoop-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-math3\3.1.1\commons-math3-3.1.1.jar;C:\Users\zxr17\.m2\repository\commons-io\commons-io\2.5\commons-io-2.5.jar;C:\Users\zxr17\.m2\repository\commons-net\commons-net\3.6\commons-net-3.6.jar;C:\Users\zxr17\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-servlet\9.3.24.v20180605\jetty-servlet-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-security\9.3.24.v20180605\jetty-security-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-webapp\9.3.24.v20180605\jetty-webapp-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-xml\9.3.24.v20180605\jetty-xml-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-servlet\1.19\jersey-servlet-1.19.jar;C:\Users\zxr17\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\zxr17\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\zxr17\.m2\repository\commons-beanutils\commons-beanutils\1.9.3\commons-beanutils-1.9.3.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-configuration2\2.1.1\commons-configuration2-2.1.1.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-lang3\3.4\commons-lang3-3.4.jar;C:\Users\zxr17\.m2\repository\com\google\re2j\re2j\1.1\re2j-1.1.jar;C:\Users\zxr17\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\Users\zxr17\.m2\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-client\2.13.0\curator-client-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-recipes\2.13.0\curator-recipes-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\htrace\htrace-core4\4.1.0-incubating\htrace-core4-4.1.0-incubating.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-compress\1.18\commons-compress-1.18.jar;C:\Users\zxr17\.m2\repository\org\codehaus\woodstox\stax2-api\3.1.4\stax2-api-3.1.4.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\woodstox\woodstox-core\5.0.3\woodstox-core-5.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-hdfs-client\3.1.3\hadoop-hdfs-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\com\squareup\okhttp\okhttp\2.7.5\okhttp-2.7.5.jar;C:\Users\zxr17\.m2\repository\com\squareup\okio\okio\1.6.0\okio-1.6.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.7.8\jackson-annotations-2.7.8.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-api\3.1.3\hadoop-yarn-api-3.1.3.jar;C:\Users\zxr17\.m2\repository\javax\xml\bind\jaxb-api\2.2.11\jaxb-api-2.2.11.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-client\3.1.3\hadoop-yarn-client-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-core\3.1.3\hadoop-mapreduce-client-core-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-yarn-common\3.1.3\hadoop-yarn-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\eclipse\jetty\jetty-util\9.3.24.v20180605\jetty-util-9.3.24.v20180605.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-core\1.19\jersey-core-1.19.jar;C:\Users\zxr17\.m2\repository\javax\ws\rs\jsr311-api\1.1.1\jsr311-api-1.1.1.jar;C:\Users\zxr17\.m2\repository\com\sun\jersey\jersey-client\1.19\jersey-client-1.19.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-jaxb-annotations\2.7.8\jackson-module-jaxb-annotations-2.7.8.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\jaxrs\jackson-jaxrs-json-provider\2.7.8\jackson-jaxrs-json-provider-2.7.8.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\jaxrs\jackson-jaxrs-base\2.7.8\jackson-jaxrs-base-2.7.8.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\3.1.3\hadoop-mapreduce-client-jobclient-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-common\3.1.3\hadoop-mapreduce-client-common-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-annotations\3.1.3\hadoop-annotations-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hadoop\hadoop-auth\3.1.3\hadoop-auth-3.1.3.jar;C:\Users\zxr17\.m2\repository\org\slf4j\slf4j-api\1.7.25\slf4j-api-1.7.25.jar;C:\Users\zxr17\.m2\repository\commons-codec\commons-codec\1.11\commons-codec-1.11.jar;C:\Users\zxr17\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\Users\zxr17\.m2\repository\org\slf4j\slf4j-log4j12\1.7.25\slf4j-log4j12-1.7.25.jar;C:\Users\zxr17\.m2\repository\org\apache\httpcomponents\httpclient\4.5.2\httpclient-4.5.2.jar;C:\Users\zxr17\.m2\repository\org\apache\httpcomponents\httpcore\4.4.4\httpcore-4.4.4.jar;C:\Users\zxr17\.m2\repository\com\nimbusds\nimbus-jose-jwt\4.41.1\nimbus-jose-jwt-4.41.1.jar;C:\Users\zxr17\.m2\repository\com\github\stephenc\jcip\jcip-annotations\1.0-1\jcip-annotations-1.0-1.jar;C:\Users\zxr17\.m2\repository\net\minidev\json-smart\2.3\json-smart-2.3.jar;C:\Users\zxr17\.m2\repository\net\minidev\accessors-smart\1.2\accessors-smart-1.2.jar;C:\Users\zxr17\.m2\repository\org\ow2\asm\asm\5.0.4\asm-5.0.4.jar;C:\Users\zxr17\.m2\repository\org\apache\zookeeper\zookeeper\3.4.13\zookeeper-3.4.13.jar;C:\Users\zxr17\.m2\repository\org\apache\yetus\audience-annotations\0.5.0\audience-annotations-0.5.0.jar;C:\Users\zxr17\.m2\repository\io\netty\netty\3.10.6.Final\netty-3.10.6.Final.jar;C:\Users\zxr17\.m2\repository\org\apache\curator\curator-framework\2.13.0\curator-framework-2.13.0.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-simplekdc\1.0.1\kerb-simplekdc-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-client\1.0.1\kerb-client-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-config\1.0.1\kerby-config-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-core\1.0.1\kerb-core-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-pkix\1.0.1\kerby-pkix-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-asn1\1.0.1\kerby-asn1-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-util\1.0.1\kerby-util-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-common\1.0.1\kerb-common-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-crypto\1.0.1\kerb-crypto-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-util\1.0.1\kerb-util-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\token-provider\1.0.1\token-provider-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-admin\1.0.1\kerb-admin-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-server\1.0.1\kerb-server-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerb-identity\1.0.1\kerb-identity-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\kerby\kerby-xdr\1.0.1\kerby-xdr-1.0.1.jar;C:\Users\zxr17\.m2\repository\com\google\guava\guava\27.0-jre\guava-27.0-jre.jar;C:\Users\zxr17\.m2\repository\com\google\guava\failureaccess\1.0\failureaccess-1.0.jar;C:\Users\zxr17\.m2\repository\com\google\guava\listenablefuture\9999.0-empty-to-avoid-conflict-with-guava\listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar;C:\Users\zxr17\.m2\repository\org\checkerframework\checker-qual\2.5.2\checker-qual-2.5.2.jar;C:\Users\zxr17\.m2\repository\com\google\errorprone\error_prone_annotations\2.2.0\error_prone_annotations-2.2.0.jar;C:\Users\zxr17\.m2\repository\com\google\j2objc\j2objc-annotations\1.1\j2objc-annotations-1.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\mojo\animal-sniffer-annotations\1.17\animal-sniffer-annotations-1.17.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-sql_2.12\3.0.3\spark-sql_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\com\univocity\univocity-parsers\2.9.0\univocity-parsers-2.9.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-sketch_2.12\3.0.3\spark-sketch_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-core_2.12\3.0.3\spark-core_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\com\thoughtworks\paranamer\paranamer\2.8\paranamer-2.8.jar;C:\Users\zxr17\.m2\repository\com\twitter\chill_2.12\0.9.5\chill_2.12-0.9.5.jar;C:\Users\zxr17\.m2\repository\com\esotericsoftware\kryo-shaded\4.0.2\kryo-shaded-4.0.2.jar;C:\Users\zxr17\.m2\repository\com\esotericsoftware\minlog\1.3.0\minlog-1.3.0.jar;C:\Users\zxr17\.m2\repository\org\objenesis\objenesis\2.5.1\objenesis-2.5.1.jar;C:\Users\zxr17\.m2\repository\com\twitter\chill-java\0.9.5\chill-java-0.9.5.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-launcher_2.12\3.0.3\spark-launcher_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-kvstore_2.12\3.0.3\spark-kvstore_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-network-common_2.12\3.0.3\spark-network-common_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-network-shuffle_2.12\3.0.3\spark-network-shuffle_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-unsafe_2.12\3.0.3\spark-unsafe_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\javax\activation\activation\1.1.1\activation-1.1.1.jar;C:\Users\zxr17\.m2\repository\javax\servlet\javax.servlet-api\3.1.0\javax.servlet-api-3.1.0.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-text\1.6\commons-text-1.6.jar;C:\Users\zxr17\.m2\repository\org\slf4j\jul-to-slf4j\1.7.30\jul-to-slf4j-1.7.30.jar;C:\Users\zxr17\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.30\jcl-over-slf4j-1.7.30.jar;C:\Users\zxr17\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\Users\zxr17\.m2\repository\org\xerial\snappy\snappy-java\1.1.8.2\snappy-java-1.1.8.2.jar;C:\Users\zxr17\.m2\repository\org\lz4\lz4-java\1.7.1\lz4-java-1.7.1.jar;C:\Users\zxr17\.m2\repository\com\github\luben\zstd-jni\1.4.4-3\zstd-jni-1.4.4-3.jar;C:\Users\zxr17\.m2\repository\org\roaringbitmap\RoaringBitmap\0.7.45\RoaringBitmap-0.7.45.jar;C:\Users\zxr17\.m2\repository\org\roaringbitmap\shims\0.7.45\shims-0.7.45.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\modules\scala-xml_2.12\1.2.0\scala-xml_2.12-1.2.0.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\scala-library\2.12.10\scala-library-2.12.10.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\scala-reflect\2.12.10\scala-reflect-2.12.10.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-jackson_2.12\3.6.6\json4s-jackson_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-core_2.12\3.6.6\json4s-core_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-ast_2.12\3.6.6\json4s-ast_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\json4s\json4s-scalap_2.12\3.6.6\json4s-scalap_2.12-3.6.6.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-client\2.30\jersey-client-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\ws\rs\jakarta.ws.rs-api\2.1.6\jakarta.ws.rs-api-2.1.6.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\external\jakarta.inject\2.6.1\jakarta.inject-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-common\2.30\jersey-common-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\annotation\jakarta.annotation-api\1.3.5\jakarta.annotation-api-1.3.5.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\osgi-resource-locator\1.0.3\osgi-resource-locator-1.0.3.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\core\jersey-server\2.30\jersey-server-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\media\jersey-media-jaxb\2.30\jersey-media-jaxb-2.30.jar;C:\Users\zxr17\.m2\repository\jakarta\validation\jakarta.validation-api\2.0.2\jakarta.validation-api-2.0.2.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet\2.30\jersey-container-servlet-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet-core\2.30\jersey-container-servlet-core-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\jersey\inject\jersey-hk2\2.30\jersey-hk2-2.30.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-locator\2.6.1\hk2-locator-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\external\aopalliance-repackaged\2.6.1\aopalliance-repackaged-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-api\2.6.1\hk2-api-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\glassfish\hk2\hk2-utils\2.6.1\hk2-utils-2.6.1.jar;C:\Users\zxr17\.m2\repository\org\javassist\javassist\3.25.0-GA\javassist-3.25.0-GA.jar;C:\Users\zxr17\.m2\repository\io\netty\netty-all\4.1.47.Final\netty-all-4.1.47.Final.jar;C:\Users\zxr17\.m2\repository\com\clearspring\analytics\stream\2.9.6\stream-2.9.6.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-core\4.1.1\metrics-core-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-jvm\4.1.1\metrics-jvm-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-json\4.1.1\metrics-json-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-graphite\4.1.1\metrics-graphite-4.1.1.jar;C:\Users\zxr17\.m2\repository\io\dropwizard\metrics\metrics-jmx\4.1.1\metrics-jmx-4.1.1.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.12\2.10.0\jackson-module-scala_2.12-2.10.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\module\jackson-module-paranamer\2.10.0\jackson-module-paranamer-2.10.0.jar;C:\Users\zxr17\.m2\repository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\Users\zxr17\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;C:\Users\zxr17\.m2\repository\net\razorvine\pyrolite\4.30\pyrolite-4.30.jar;C:\Users\zxr17\.m2\repository\net\sf\py4j\py4j\0.10.9\py4j-0.10.9.jar;C:\Users\zxr17\.m2\repository\org\apache\commons\commons-crypto\1.1.0\commons-crypto-1.1.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-catalyst_2.12\3.0.3\spark-catalyst_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\scala-lang\modules\scala-parser-combinators_2.12\1.1.2\scala-parser-combinators_2.12-1.1.2.jar;C:\Users\zxr17\.m2\repository\org\codehaus\janino\janino\3.0.16\janino-3.0.16.jar;C:\Users\zxr17\.m2\repository\org\codehaus\janino\commons-compiler\3.0.16\commons-compiler-3.0.16.jar;C:\Users\zxr17\.m2\repository\org\antlr\antlr4-runtime\4.7.1\antlr4-runtime-4.7.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-vector\0.15.1\arrow-vector-0.15.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-format\0.15.1\arrow-format-0.15.1.jar;C:\Users\zxr17\.m2\repository\org\apache\arrow\arrow-memory\0.15.1\arrow-memory-0.15.1.jar;C:\Users\zxr17\.m2\repository\com\google\flatbuffers\flatbuffers-java\1.9.0\flatbuffers-java-1.9.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-tags_2.12\3.0.3\spark-tags_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-core\1.5.10\orc-core-1.5.10.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-shims\1.5.10\orc-shims-1.5.10.jar;C:\Users\zxr17\.m2\repository\io\airlift\aircompressor\0.10\aircompressor-0.10.jar;C:\Users\zxr17\.m2\repository\org\threeten\threeten-extra\1.5.0\threeten-extra-1.5.0.jar;C:\Users\zxr17\.m2\repository\org\apache\orc\orc-mapreduce\1.5.10\orc-mapreduce-1.5.10.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-storage-api\2.7.1\hive-storage-api-2.7.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-column\1.10.1\parquet-column-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-common\1.10.1\parquet-common-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-encoding\1.10.1\parquet-encoding-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-hadoop\1.10.1\parquet-hadoop-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-format\2.4.0\parquet-format-2.4.0.jar;C:\Users\zxr17\.m2\repository\org\apache\parquet\parquet-jackson\1.10.1\parquet-jackson-1.10.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.10.0\jackson-databind-2.10.0.jar;C:\Users\zxr17\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.10.0\jackson-core-2.10.0.jar;C:\Users\zxr17\.m2\repository\org\apache\xbean\xbean-asm7-shaded\4.15\xbean-asm7-shaded-4.15.jar;C:\Users\zxr17\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\Users\zxr17\.m2\repository\org\apache\spark\spark-hive_2.12\3.0.3\spark-hive_2.12-3.0.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-common\2.3.7\hive-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\jline\jline\2.12\jline-2.12.jar;C:\Users\zxr17\.m2\repository\com\tdunning\json\1.8\json-1.8.jar;C:\Users\zxr17\.m2\repository\com\github\joshelser\dropwizard-metrics-hadoop-metrics2-reporter\0.1.2\dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-exec\2.3.7\hive-exec-2.3.7-core.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-vector-code-gen\2.3.7\hive-vector-code-gen-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\velocity\velocity\1.5\velocity-1.5.jar;C:\Users\zxr17\.m2\repository\org\antlr\antlr-runtime\3.5.2\antlr-runtime-3.5.2.jar;C:\Users\zxr17\.m2\repository\org\antlr\ST4\4.0.4\ST4-4.0.4.jar;C:\Users\zxr17\.m2\repository\stax\stax-api\1.0.1\stax-api-1.0.1.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-metastore\2.3.7\hive-metastore-2.3.7.jar;C:\Users\zxr17\.m2\repository\javolution\javolution\5.5.1\javolution-5.5.1.jar;C:\Users\zxr17\.m2\repository\com\jolbox\bonecp\0.8.0.RELEASE\bonecp-0.8.0.RELEASE.jar;C:\Users\zxr17\.m2\repository\com\zaxxer\HikariCP\2.5.1\HikariCP-2.5.1.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-api-jdo\4.2.4\datanucleus-api-jdo-4.2.4.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-rdbms\4.1.19\datanucleus-rdbms-4.1.19.jar;C:\Users\zxr17\.m2\repository\commons-pool\commons-pool\1.5.4\commons-pool-1.5.4.jar;C:\Users\zxr17\.m2\repository\commons-dbcp\commons-dbcp\1.4\commons-dbcp-1.4.jar;C:\Users\zxr17\.m2\repository\javax\jdo\jdo-api\3.0.1\jdo-api-3.0.1.jar;C:\Users\zxr17\.m2\repository\javax\transaction\jta\1.1\jta-1.1.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\javax.jdo\3.2.0-m3\javax.jdo-3.2.0-m3.jar;C:\Users\zxr17\.m2\repository\javax\transaction\transaction-api\1.1\transaction-api-1.1.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-serde\2.3.7\hive-serde-2.3.7.jar;C:\Users\zxr17\.m2\repository\net\sf\opencsv\opencsv\2.3\opencsv-2.3.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-shims\2.3.7\hive-shims-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-common\2.3.7\hive-shims-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-0.23\2.3.7\hive-shims-0.23-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\shims\hive-shims-scheduler\2.3.7\hive-shims-scheduler-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-llap-common\2.3.7\hive-llap-common-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\hive\hive-llap-client\2.3.7\hive-llap-client-2.3.7.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro\1.8.2\avro-1.8.2.jar;C:\Users\zxr17\.m2\repository\org\tukaani\xz\1.5\xz-1.5.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro-mapred\1.8.2\avro-mapred-1.8.2-hadoop2.jar;C:\Users\zxr17\.m2\repository\org\apache\avro\avro-ipc\1.8.2\avro-ipc-1.8.2.jar;C:\Users\zxr17\.m2\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\Users\zxr17\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\zxr17\.m2\repository\joda-time\joda-time\2.10.5\joda-time-2.10.5.jar;C:\Users\zxr17\.m2\repository\org\jodd\jodd-core\3.5.2\jodd-core-3.5.2.jar;C:\Users\zxr17\.m2\repository\com\google\code\findbugs\jsr305\3.0.0\jsr305-3.0.0.jar;C:\Users\zxr17\.m2\repository\org\datanucleus\datanucleus-core\4.1.17\datanucleus-core-4.1.17.jar;C:\Users\zxr17\.m2\repository\org\apache\thrift\libthrift\0.12.0\libthrift-0.12.0.jar;C:\Users\zxr17\.m2\repository\org\apache\thrift\libfb303\0.9.3\libfb303-0.9.3.jar;C:\Users\zxr17\.m2\repository\org\apache\derby\derby\10.12.1.1\derby-10.12.1.1.jar;C:\Users\zxr17\.m2\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar" bigdata2402.SparkDemo Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/14 15:36:52 INFO SparkContext: Running Spark version 3.0.3 25/10/14 15:36:52 WARN Shell: Did not find winutils.exe: {} java.io.FileNotFoundException: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. -see https://wiki.apache.org/hadoop/WindowsProblems at org.apache.hadoop.util.Shell.fileNotFoundException(Shell.java:549) at org.apache.hadoop.util.Shell.getHadoopHomeDir(Shell.java:570) at org.apache.hadoop.util.Shell.getQualifiedBin(Shell.java:593) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:690) at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:78) at org.apache.hadoop.conf.Configuration.getBoolean(Configuration.java:1665) at org.apache.hadoop.security.SecurityUtil.setConfigurationInternal(SecurityUtil.java:102) at org.apache.hadoop.security.SecurityUtil.<clinit>(SecurityUtil.java:86) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:315) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:303) at org.apache.hadoop.security.UserGroupInformation.doSubjectLogin(UserGroupInformation.java:1827) at org.apache.hadoop.security.UserGroupInformation.createLoginUser(UserGroupInformation.java:709) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:659) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:570) at org.apache.spark.util.Utils$.$anonfun$getCurrentUserName$1(Utils.scala:2414) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.util.Utils$.getCurrentUserName(Utils.scala:2414) at org.apache.spark.SparkContext.<init>(SparkContext.scala:308) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2589) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:937) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:931) at bigdata2402.SparkDemo$.main(SparkDemo.scala:27) at bigdata2402.SparkDemo.main(SparkDemo.scala) Caused by: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:469) at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:440) at org.apache.hadoop.util.Shell.<clinit>(Shell.java:517) ... 20 more 25/10/14 15:36:52 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/14 15:36:52 INFO ResourceUtils: ============================================================== 25/10/14 15:36:52 INFO ResourceUtils: Resources for spark.driver: 25/10/14 15:36:52 INFO ResourceUtils: ============================================================== 25/10/14 15:36:52 INFO SparkContext: Submitted application: ducsv 25/10/14 15:36:52 INFO SecurityManager: Changing view acls to: zxr17 25/10/14 15:36:52 INFO SecurityManager: Changing modify acls to: zxr17 25/10/14 15:36:52 INFO SecurityManager: Changing view acls groups to: 25/10/14 15:36:52 INFO SecurityManager: Changing modify acls groups to: 25/10/14 15:36:52 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(zxr17); groups with view permissions: Set(); users with modify permissions: Set(zxr17); groups with modify permissions: Set() 25/10/14 15:36:53 INFO Utils: Successfully started service 'sparkDriver' on port 52248. 25/10/14 15:36:53 INFO SparkEnv: Registering MapOutputTracker 25/10/14 15:36:53 INFO SparkEnv: Registering BlockManagerMaster 25/10/14 15:36:53 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/14 15:36:53 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up Exception in thread "main" java.lang.NoSuchFieldError: JAVA_9 at org.apache.spark.storage.StorageUtils$.<init>(StorageUtils.scala:207) at org.apache.spark.storage.StorageUtils$.<clinit>(StorageUtils.scala) at org.apache.spark.storage.BlockManagerMasterEndpoint.<init>(BlockManagerMasterEndpoint.scala:93) at org.apache.spark.SparkEnv$.$anonfun$create$9(SparkEnv.scala:370) at org.apache.spark.SparkEnv$.registerOrLookupEndpoint$1(SparkEnv.scala:311) at org.apache.spark.SparkEnv$.create(SparkEnv.scala:359) at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:272) at org.apache.spark.SparkContext.<init>(SparkContext.scala:448) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2589) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:937) at scala.Option.getOrElse(Option.scala:189) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:931) at bigdata2402.SparkDemo$.main(SparkDemo.scala:27) at bigdata2402.SparkDemo.main(SparkDemo.scala) 进程已结束,退出代码1
最新发布
10-15
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=35029:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:19:59 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:19:59 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:19:59 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:20:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:20:00 INFO ResourceUtils: ============================================================== 25/10/09 15:20:00 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:20:00 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:20:00 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:20:00 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:20:00 INFO SecurityManager: Changing view acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:20:00 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:20:00 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:20:00 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:20:00 INFO Utils: Successfully started service 'sparkDriver' on port 39167. 25/10/09 15:20:00 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:20:00 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:20:00 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:20:00 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-7824d7b3-541f-4ec6-88e6-298ad263b86a 25/10/09 15:20:00 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:20:00 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:20:01 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:20:01 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:20:01 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:20:01 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33353. 25/10/09 15:20:01 INFO NettyBlockTransferService: Server created on 192.168.75.3:33353 25/10/09 15:20:01 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:20:01 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:33353 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:01 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 33353, None) 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:02 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:03 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:20:03 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:20:03 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:20:04 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 预测结果已成功写入 MySQL 表 ml_result! Hive 中 dwd.fact_machine_learning_data_test 预测完毕。 请在 MySQL 中执行以下查询语句查看结果: SELECT * FROM ml_result WHERE machine_record_id IN (1,8,20,28,36); +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ +-----------------+--------------------+ 代码显示空值
10-10
/usr/local/jdk1.8.0_341/bin/java -javaagent:/opt/idea-IC-223.8836.41/lib/idea_rt.jar=46819:/opt/idea-IC-223.8836.41/bin -Dfile.encoding=UTF-8 -classpath /usr/local/jdk1.8.0_341/jre/lib/charsets.jar:/usr/local/jdk1.8.0_341/jre/lib/deploy.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/cldrdata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/dnsns.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jaccess.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/jfxrt.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/localedata.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/nashorn.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunec.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunjce_provider.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/sunpkcs11.jar:/usr/local/jdk1.8.0_341/jre/lib/ext/zipfs.jar:/usr/local/jdk1.8.0_341/jre/lib/javaws.jar:/usr/local/jdk1.8.0_341/jre/lib/jce.jar:/usr/local/jdk1.8.0_341/jre/lib/jfr.jar:/usr/local/jdk1.8.0_341/jre/lib/jfxswt.jar:/usr/local/jdk1.8.0_341/jre/lib/jsse.jar:/usr/local/jdk1.8.0_341/jre/lib/management-agent.jar:/usr/local/jdk1.8.0_341/jre/lib/plugin.jar:/usr/local/jdk1.8.0_341/jre/lib/resources.jar:/usr/local/jdk1.8.0_341/jre/lib/rt.jar:/root/IdeaProjects/demo20250411/target/classes:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-databind/2.10.4/jackson-databind-2.10.4.jar:/usr/local/src/repo/org/dom4j/dom4j/2.1.4/dom4j-2.1.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-core/2.10.4/jackson-core-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/core/jackson-annotations/2.10.4/jackson-annotations-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.10.4/jackson-jaxrs-json-provider-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.10.4/jackson-jaxrs-base-2.10.4.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.10.4/jackson-module-jaxb-annotations-2.10.4.jar:/usr/local/src/repo/jakarta/xml/bind/jakarta.xml.bind-api/2.3.2/jakarta.xml.bind-api-2.3.2.jar:/usr/local/src/repo/jakarta/activation/jakarta.activation-api/1.2.1/jakarta.activation-api-1.2.1.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-xml/2.10.4/jackson-dataformat-xml-2.10.4.jar:/usr/local/src/repo/org/codehaus/woodstox/stax2-api/4.2/stax2-api-4.2.jar:/usr/local/src/repo/com/fasterxml/woodstox/woodstox-core/6.2.0/woodstox-core-6.2.0.jar:/usr/local/src/repo/org/scala-lang/scala-reflect/2.12.10/scala-reflect-2.12.10.jar:/usr/local/src/repo/org/scala-lang/scala-compiler/2.12.10/scala-compiler-2.12.10.jar:/usr/local/src/repo/org/scala-lang/modules/scala-xml_2.12/1.0.6/scala-xml_2.12-1.0.6.jar:/usr/local/src/repo/org/scala-lang/scala-library/2.12.10/scala-library-2.12.10.jar:/usr/local/src/repo/org/apache/kafka/kafka_2.12/2.4.1/kafka_2.12-2.4.1.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-scala_2.12/2.10.0/jackson-module-scala_2.12-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/module/jackson-module-paranamer/2.10.0/jackson-module-paranamer-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/2.10.0/jackson-dataformat-csv-2.10.0.jar:/usr/local/src/repo/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.10.0/jackson-datatype-jdk8-2.10.0.jar:/usr/local/src/repo/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:/usr/local/src/repo/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/usr/local/src/repo/org/scala-lang/modules/scala-collection-compat_2.12/2.1.2/scala-collection-compat_2.12-2.1.2.jar:/usr/local/src/repo/org/scala-lang/modules/scala-java8-compat_2.12/0.9.0/scala-java8-compat_2.12-0.9.0.jar:/usr/local/src/repo/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar:/usr/local/src/repo/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper/3.5.7/zookeeper-3.5.7.jar:/usr/local/src/repo/org/apache/zookeeper/zookeeper-jute/3.5.7/zookeeper-jute-3.5.7.jar:/usr/local/src/repo/io/netty/netty-handler/4.1.45.Final/netty-handler-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-common/4.1.45.Final/netty-common-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-buffer/4.1.45.Final/netty-buffer-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport/4.1.45.Final/netty-transport-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-resolver/4.1.45.Final/netty-resolver-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-codec/4.1.45.Final/netty-codec-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-epoll/4.1.45.Final/netty-transport-native-epoll-4.1.45.Final.jar:/usr/local/src/repo/io/netty/netty-transport-native-unix-common/4.1.45.Final/netty-transport-native-unix-common-4.1.45.Final.jar:/usr/local/src/repo/commons-cli/commons-cli/1.4/commons-cli-1.4.jar:/usr/local/src/repo/org/apache/flink/flink-connector-jdbc_2.12/1.14.0/flink-connector-jdbc_2.12-1.14.0.jar:/usr/local/src/repo/com/h2database/h2/1.4.200/h2-1.4.200.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-force-shading/14.0/flink-shaded-force-shading-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime-web_2.12/1.14.0/flink-runtime-web_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-runtime/1.14.0/flink-runtime-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-core/1.14.0/flink-rpc-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-rpc-akka-loader/1.14.0/flink-rpc-akka-loader-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-queryable-state-client-java/1.14.0/flink-queryable-state-client-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-hadoop-fs/1.14.0/flink-hadoop-fs-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-14.0/flink-shaded-zookeeper-3-3.4.14-14.0.jar:/usr/local/src/repo/org/javassist/javassist/3.24.0-GA/javassist-3.24.0-GA.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-netty/4.1.65.Final-14.0/flink-shaded-netty-4.1.65.Final-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-guava/30.1.1-jre-14.0/flink-shaded-guava-30.1.1-jre-14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-jackson/2.12.4-14.0/flink-shaded-jackson-2.12.4-14.0.jar:/usr/local/src/repo/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/usr/local/src/repo/org/apache/flink/flink-clients_2.12/1.14.0/flink-clients_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-core/1.14.0/flink-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-annotations/1.14.0/flink-annotations-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-metrics-core/1.14.0/flink-metrics-core-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-shaded-asm-7/7.1-14.0/flink-shaded-asm-7-7.1-14.0.jar:/usr/local/src/repo/com/esotericsoftware/kryo/kryo/2.24.0/kryo-2.24.0.jar:/usr/local/src/repo/com/esotericsoftware/minlog/minlog/1.2/minlog-1.2.jar:/usr/local/src/repo/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/usr/local/src/repo/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/usr/local/src/repo/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:/usr/local/src/repo/org/apache/flink/flink-optimizer/1.14.0/flink-optimizer-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-java/1.14.0/flink-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-java_2.12/1.14.0/flink-streaming-java_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-file-sink-common/1.14.0/flink-file-sink-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-streaming-scala_2.12/1.14.0/flink-streaming-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-scala_2.12/1.14.0/flink-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-kafka_2.12/1.14.0/flink-connector-kafka_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-base/1.14.0/flink-connector-base-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-sql-connector-hbase-2.2_2.12/1.14.0/flink-sql-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-planner_2.12/1.14.0/flink-table-planner_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-common/1.14.0/flink-table-common-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java/1.14.0/flink-table-api-java-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala_2.12/1.14.0/flink-table-api-scala_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-java-bridge_2.12/1.14.0/flink-table-api-java-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-runtime_2.12/1.14.0/flink-table-runtime_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-code-splitter/1.14.0/flink-table-code-splitter-1.14.0.jar:/usr/local/src/repo/org/codehaus/janino/janino/3.0.11/janino-3.0.11.jar:/usr/local/src/repo/org/apache/calcite/avatica/avatica-core/1.17.0/avatica-core-1.17.0.jar:/usr/local/src/repo/org/apache/flink/flink-json/1.14.0/flink-json-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-table-api-scala-bridge_2.12/1.14.0/flink-table-api-scala-bridge_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-redis_2.11/1.1.5/flink-connector-redis_2.11-1.1.5.jar:/usr/local/src/repo/redis/clients/jedis/2.8.0/jedis-2.8.0.jar:/usr/local/src/repo/org/apache/commons/commons-pool2/2.3/commons-pool2-2.3.jar:/usr/local/src/repo/org/slf4j/slf4j-log4j12/1.7.7/slf4j-log4j12-1.7.7.jar:/usr/local/src/repo/log4j/log4j/1.2.17/log4j-1.2.17.jar:/usr/local/src/repo/org/apache/flink/force-shading/1.1.5/force-shading-1.1.5.jar:/usr/local/src/repo/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hive_2.12/1.14.0/flink-connector-hive_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-files/1.14.0/flink-connector-files-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-2.2_2.12/1.14.0/flink-connector-hbase-2.2_2.12-1.14.0.jar:/usr/local/src/repo/org/apache/flink/flink-connector-hbase-base_2.12/1.14.0/flink-connector-hbase-base_2.12-1.14.0.jar:/usr/local/src/repo/io/netty/netty-all/4.1.46.Final/netty-all-4.1.46.Final.jar:/usr/local/src/repo/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar:/usr/local/src/repo/org/apache/kafka/kafka-clients/2.6.0/kafka-clients-2.6.0.jar:/usr/local/src/repo/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar:/usr/local/src/repo/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar:/usr/local/src/repo/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar:/usr/local/src/repo/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar:/usr/local/src/repo/org/apache/spark/spark-graphx_2.12/3.1.1/spark-graphx_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-mllib-local_2.12/3.1.1/spark-mllib-local_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/xbean/xbean-asm7-shaded/4.15/xbean-asm7-shaded-4.15.jar:/usr/local/src/repo/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar:/usr/local/src/repo/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar:/usr/local/src/repo/org/apache/spark/spark-tags_2.12/3.1.1/spark-tags_2.12-3.1.1.jar:/usr/local/src/repo/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/usr/local/src/repo/org/apache/spark/spark-mllib_2.12/3.1.1/spark-mllib_2.12-3.1.1.jar:/usr/local/src/repo/org/scala-lang/modules/scala-parser-combinators_2.12/1.1.2/scala-parser-combinators_2.12-1.1.2.jar:/usr/local/src/repo/org/apache/spark/spark-streaming_2.12/3.1.1/spark-streaming_2.12-3.1.1.jar:/usr/local/src/repo/org/scalanlp/breeze_2.12/1.0/breeze_2.12-1.0.jar:/usr/local/src/repo/org/scalanlp/breeze-macros_2.12/1.0/breeze-macros_2.12-1.0.jar:/usr/local/src/repo/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/usr/local/src/repo/com/github/wendykierp/JTransforms/3.1/JTransforms-3.1.jar:/usr/local/src/repo/pl/edu/icm/JLargeArrays/1.5/JLargeArrays-1.5.jar:/usr/local/src/repo/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar:/usr/local/src/repo/org/typelevel/macro-compat_2.12/1.1.1/macro-compat_2.12-1.1.1.jar:/usr/local/src/repo/org/typelevel/spire_2.12/0.17.0-M1/spire_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-macros_2.12/0.17.0-M1/spire-macros_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-platform_2.12/0.17.0-M1/spire-platform_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/spire-util_2.12/0.17.0-M1/spire-util_2.12-0.17.0-M1.jar:/usr/local/src/repo/org/typelevel/machinist_2.12/0.6.8/machinist_2.12-0.6.8.jar:/usr/local/src/repo/org/typelevel/algebra_2.12/2.0.0-M2/algebra_2.12-2.0.0-M2.jar:/usr/local/src/repo/org/typelevel/cats-kernel_2.12/2.0.0-M4/cats-kernel_2.12-2.0.0-M4.jar:/usr/local/src/repo/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/usr/local/src/repo/org/glassfish/jaxb/jaxb-runtime/2.3.2/jaxb-runtime-2.3.2.jar:/usr/local/src/repo/com/sun/istack/istack-commons-runtime/3.0.8/istack-commons-runtime-3.0.8.jar:/usr/local/src/repo/org/apache/spark/spark-core_2.12/3.1.1/spark-core_2.12-3.1.1.jar:/usr/local/src/repo/com/thoughtworks/paranamer/paranamer/2.8/paranamer-2.8.jar:/usr/local/src/repo/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/usr/local/src/repo/org/tukaani/xz/1.5/xz-1.5.jar:/usr/local/src/repo/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar:/usr/local/src/repo/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar:/usr/local/src/repo/com/twitter/chill_2.12/0.9.5/chill_2.12-0.9.5.jar:/usr/local/src/repo/com/esotericsoftware/kryo-shaded/4.0.2/kryo-shaded-4.0.2.jar:/usr/local/src/repo/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/usr/local/src/repo/com/twitter/chill-java/0.9.5/chill-java-0.9.5.jar:/usr/local/src/repo/org/apache/spark/spark-launcher_2.12/3.1.1/spark-launcher_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-kvstore_2.12/3.1.1/spark-kvstore_2.12-3.1.1.jar:/usr/local/src/repo/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/usr/local/src/repo/org/apache/spark/spark-network-common_2.12/3.1.1/spark-network-common_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-network-shuffle_2.12/3.1.1/spark-network-shuffle_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-unsafe_2.12/3.1.1/spark-unsafe_2.12-3.1.1.jar:/usr/local/src/repo/javax/activation/activation/1.1.1/activation-1.1.1.jar:/usr/local/src/repo/org/apache/curator/curator-recipes/2.13.0/curator-recipes-2.13.0.jar:/usr/local/src/repo/jakarta/servlet/jakarta.servlet-api/4.0.3/jakarta.servlet-api-4.0.3.jar:/usr/local/src/repo/org/apache/commons/commons-text/1.6/commons-text-1.6.jar:/usr/local/src/repo/org/slf4j/jul-to-slf4j/1.7.30/jul-to-slf4j-1.7.30.jar:/usr/local/src/repo/org/slf4j/jcl-over-slf4j/1.7.30/jcl-over-slf4j-1.7.30.jar:/usr/local/src/repo/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/usr/local/src/repo/org/roaringbitmap/RoaringBitmap/0.9.0/RoaringBitmap-0.9.0.jar:/usr/local/src/repo/org/roaringbitmap/shims/0.9.0/shims-0.9.0.jar:/usr/local/src/repo/commons-net/commons-net/3.1/commons-net-3.1.jar:/usr/local/src/repo/org/json4s/json4s-jackson_2.12/3.7.0-M5/json4s-jackson_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-core_2.12/3.7.0-M5/json4s-core_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-ast_2.12/3.7.0-M5/json4s-ast_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/json4s/json4s-scalap_2.12/3.7.0-M5/json4s-scalap_2.12-3.7.0-M5.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-client/2.30/jersey-client-2.30.jar:/usr/local/src/repo/jakarta/ws/rs/jakarta.ws.rs-api/2.1.6/jakarta.ws.rs-api-2.1.6.jar:/usr/local/src/repo/org/glassfish/hk2/external/jakarta.inject/2.6.1/jakarta.inject-2.6.1.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar:/usr/local/src/repo/jakarta/annotation/jakarta.annotation-api/1.3.5/jakarta.annotation-api-1.3.5.jar:/usr/local/src/repo/org/glassfish/hk2/osgi-resource-locator/1.0.3/osgi-resource-locator-1.0.3.jar:/usr/local/src/repo/org/glassfish/jersey/core/jersey-server/2.30/jersey-server-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/media/jersey-media-jaxb/2.30/jersey-media-jaxb-2.30.jar:/usr/local/src/repo/jakarta/validation/jakarta.validation-api/2.0.2/jakarta.validation-api-2.0.2.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet/2.30/jersey-container-servlet-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/containers/jersey-container-servlet-core/2.30/jersey-container-servlet-core-2.30.jar:/usr/local/src/repo/org/glassfish/jersey/inject/jersey-hk2/2.30/jersey-hk2-2.30.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-locator/2.6.1/hk2-locator-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/external/aopalliance-repackaged/2.6.1/aopalliance-repackaged-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-api/2.6.1/hk2-api-2.6.1.jar:/usr/local/src/repo/org/glassfish/hk2/hk2-utils/2.6.1/hk2-utils-2.6.1.jar:/usr/local/src/repo/com/clearspring/analytics/stream/2.9.6/stream-2.9.6.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-core/4.1.1/metrics-core-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jvm/4.1.1/metrics-jvm-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-json/4.1.1/metrics-json-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-graphite/4.1.1/metrics-graphite-4.1.1.jar:/usr/local/src/repo/io/dropwizard/metrics/metrics-jmx/4.1.1/metrics-jmx-4.1.1.jar:/usr/local/src/repo/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/usr/local/src/repo/oro/oro/2.0.8/oro-2.0.8.jar:/usr/local/src/repo/net/razorvine/pyrolite/4.30/pyrolite-4.30.jar:/usr/local/src/repo/net/sf/py4j/py4j/0.10.9/py4j-0.10.9.jar:/usr/local/src/repo/org/apache/commons/commons-crypto/1.1.0/commons-crypto-1.1.0.jar:/usr/local/src/repo/org/apache/spark/spark-sql_2.12/3.1.1/spark-sql_2.12-3.1.1.jar:/usr/local/src/repo/com/univocity/univocity-parsers/2.9.1/univocity-parsers-2.9.1.jar:/usr/local/src/repo/org/apache/spark/spark-sketch_2.12/3.1.1/spark-sketch_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/spark/spark-catalyst_2.12/3.1.1/spark-catalyst_2.12-3.1.1.jar:/usr/local/src/repo/org/codehaus/janino/commons-compiler/3.0.16/commons-compiler-3.0.16.jar:/usr/local/src/repo/org/antlr/antlr4-runtime/4.8-1/antlr4-runtime-4.8-1.jar:/usr/local/src/repo/org/apache/arrow/arrow-vector/2.0.0/arrow-vector-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-format/2.0.0/arrow-format-2.0.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-core/2.0.0/arrow-memory-core-2.0.0.jar:/usr/local/src/repo/com/google/flatbuffers/flatbuffers-java/1.9.0/flatbuffers-java-1.9.0.jar:/usr/local/src/repo/org/apache/arrow/arrow-memory-netty/2.0.0/arrow-memory-netty-2.0.0.jar:/usr/local/src/repo/org/apache/orc/orc-core/1.5.12/orc-core-1.5.12.jar:/usr/local/src/repo/org/apache/orc/orc-shims/1.5.12/orc-shims-1.5.12.jar:/usr/local/src/repo/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/usr/local/src/repo/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/usr/local/src/repo/org/threeten/threeten-extra/1.5.0/threeten-extra-1.5.0.jar:/usr/local/src/repo/org/apache/orc/orc-mapreduce/1.5.12/orc-mapreduce-1.5.12.jar:/usr/local/src/repo/org/apache/hive/hive-storage-api/2.7.2/hive-storage-api-2.7.2.jar:/usr/local/src/repo/org/apache/parquet/parquet-column/1.10.1/parquet-column-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-common/1.10.1/parquet-common-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-encoding/1.10.1/parquet-encoding-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-hadoop/1.10.1/parquet-hadoop-1.10.1.jar:/usr/local/src/repo/org/apache/parquet/parquet-format/2.4.0/parquet-format-2.4.0.jar:/usr/local/src/repo/org/apache/parquet/parquet-jackson/1.10.1/parquet-jackson-1.10.1.jar:/usr/local/src/repo/org/apache/spark/spark-hive_2.12/3.1.1/spark-hive_2.12-3.1.1.jar:/usr/local/src/repo/org/apache/hive/hive-common/2.3.7/hive-common-2.3.7.jar:/usr/local/src/repo/jline/jline/2.12/jline-2.12.jar:/usr/local/src/repo/com/tdunning/json/1.8/json-1.8.jar:/usr/local/src/repo/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/usr/local/src/repo/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7-core.jar:/usr/local/src/repo/org/apache/hive/hive-vector-code-gen/2.3.7/hive-vector-code-gen-2.3.7.jar:/usr/local/src/repo/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/usr/local/src/repo/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/usr/local/src/repo/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/usr/local/src/repo/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/usr/local/src/repo/org/apache/hive/hive-metastore/2.3.7/hive-metastore-2.3.7.jar:/usr/local/src/repo/javolution/javolution/5.5.1/javolution-5.5.1.jar:/usr/local/src/repo/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/usr/local/src/repo/com/zaxxer/HikariCP/2.5.1/HikariCP-2.5.1.jar:/usr/local/src/repo/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/usr/local/src/repo/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/usr/local/src/repo/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/usr/local/src/repo/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/usr/local/src/repo/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/usr/local/src/repo/javax/transaction/jta/1.1/jta-1.1.jar:/usr/local/src/repo/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/usr/local/src/repo/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/usr/local/src/repo/org/apache/hive/hive-serde/2.3.7/hive-serde-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-shims/2.3.7/hive-shims-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-common/2.3.7/hive-shims-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-0.23/2.3.7/hive-shims-0.23-2.3.7.jar:/usr/local/src/repo/org/apache/hive/shims/hive-shims-scheduler/2.3.7/hive-shims-scheduler-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-common/2.3.7/hive-llap-common-2.3.7.jar:/usr/local/src/repo/org/apache/hive/hive-llap-client/2.3.7/hive-llap-client-2.3.7.jar:/usr/local/src/repo/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/usr/local/src/repo/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar:/usr/local/src/repo/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar:/usr/local/src/repo/org/apache/httpcomponents/httpcore/4.4.10/httpcore-4.4.10.jar:/usr/local/src/repo/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/usr/local/src/repo/commons-codec/commons-codec/1.10/commons-codec-1.10.jar:/usr/local/src/repo/joda-time/joda-time/2.10.5/joda-time-2.10.5.jar:/usr/local/src/repo/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar:/usr/local/src/repo/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/usr/local/src/repo/org/apache/thrift/libthrift/0.12.0/libthrift-0.12.0.jar:/usr/local/src/repo/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/usr/local/src/repo/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-client/3.1.3/hadoop-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-common/3.1.3/hadoop-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-servlet/9.3.24.v20180605/jetty-servlet-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-security/9.3.24.v20180605/jetty-security-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-webapp/9.3.24.v20180605/jetty-webapp-9.3.24.v20180605.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-xml/9.3.24.v20180605/jetty-xml-9.3.24.v20180605.jar:/usr/local/src/repo/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/usr/local/src/repo/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/usr/local/src/repo/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/usr/local/src/repo/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/usr/local/src/repo/com/google/re2j/re2j/1.1/re2j-1.1.jar:/usr/local/src/repo/org/apache/curator/curator-client/2.13.0/curator-client-2.13.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs-client/3.1.3/hadoop-hdfs-client-3.1.3.jar:/usr/local/src/repo/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/usr/local/src/repo/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-api/3.1.3/hadoop-yarn-api-3.1.3.jar:/usr/local/src/repo/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-client/3.1.3/hadoop-yarn-client-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.3/hadoop-mapreduce-client-core-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-yarn-common/3.1.3/hadoop-yarn-common-3.1.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util/9.3.24.v20180605/jetty-util-9.3.24.v20180605.jar:/usr/local/src/repo/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.3/hadoop-mapreduce-client-jobclient-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.3/hadoop-mapreduce-client-common-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-annotations/3.1.3/hadoop-annotations-3.1.3.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-auth/3.1.3/hadoop-auth-3.1.3.jar:/usr/local/src/repo/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/usr/local/src/repo/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/usr/local/src/repo/net/minidev/json-smart/2.3/json-smart-2.3.jar:/usr/local/src/repo/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/usr/local/src/repo/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar:/usr/local/src/repo/org/apache/curator/curator-framework/2.13.0/curator-framework-2.13.0.jar:/usr/local/src/repo/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/usr/local/src/repo/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/usr/local/src/repo/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar:/usr/local/src/repo/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar:/usr/local/src/repo/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/usr/local/src/repo/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar:/usr/local/src/repo/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar:/usr/local/src/repo/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar:/usr/local/src/repo/org/codehaus/mojo/animal-sniffer-annotations/1.17/animal-sniffer-annotations-1.17.jar:/usr/local/src/repo/org/apache/hbase/hbase-mapreduce/2.2.3/hbase-mapreduce-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/2.2.1/hbase-shaded-miscellaneous-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-netty/2.2.1/hbase-shaded-netty-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/thirdparty/hbase-shaded-protobuf/2.2.1/hbase-shaded-protobuf-2.2.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-common/2.2.3/hbase-common-2.2.3.jar:/usr/local/src/repo/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/usr/local/src/repo/org/apache/hbase/hbase-zookeeper/2.2.3/hbase-zookeeper-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol/2.2.3/hbase-protocol-2.2.3.jar:/usr/local/src/repo/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-protocol-shaded/2.2.3/hbase-protocol-shaded-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics/2.2.3/hbase-metrics-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-metrics-api/2.2.3/hbase-metrics-api-2.2.3.jar:/usr/local/src/repo/org/apache/htrace/htrace-core4/4.2.0-incubating/htrace-core4-4.2.0-incubating.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop-compat/2.2.3/hbase-hadoop-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-hadoop2-compat/2.2.3/hbase-hadoop2-compat-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-server/2.2.3/hbase-server-2.2.3.jar:/usr/local/src/repo/org/apache/hbase/hbase-http/2.2.3/hbase-http-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-util-ajax/9.3.27.v20190418/jetty-util-ajax-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-http/9.3.27.v20190418/jetty-http-9.3.27.v20190418.jar:/usr/local/src/repo/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/usr/local/src/repo/org/apache/hbase/hbase-procedure/2.2.3/hbase-procedure-2.2.3.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-server/9.3.27.v20190418/jetty-server-9.3.27.v20190418.jar:/usr/local/src/repo/org/eclipse/jetty/jetty-io/9.3.27.v20190418/jetty-io-9.3.27.v20190418.jar:/usr/local/src/repo/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/usr/local/src/repo/org/glassfish/javax.el/3.0.1-b12/javax.el-3.0.1-b12.jar:/usr/local/src/repo/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/usr/local/src/repo/org/jamon/jamon-runtime/2.4.1/jamon-runtime-2.4.1.jar:/usr/local/src/repo/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/usr/local/src/repo/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-distcp/2.8.5/hadoop-distcp-2.8.5.jar:/usr/local/src/repo/org/apache/hbase/hbase-replication/2.2.3/hbase-replication-2.2.3.jar:/usr/local/src/repo/commons-io/commons-io/2.5/commons-io-2.5.jar:/usr/local/src/repo/org/apache/hadoop/hadoop-hdfs/2.8.5/hadoop-hdfs-2.8.5.jar:/usr/local/src/repo/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/usr/local/src/repo/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/usr/local/src/repo/com/sun/jersey/jersey-core/1.9/jersey-core-1.9.jar:/usr/local/src/repo/com/sun/jersey/jersey-server/1.9/jersey-server-1.9.jar:/usr/local/src/repo/asm/asm/3.1/asm-3.1.jar:/usr/local/src/repo/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/usr/local/src/repo/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/usr/local/src/repo/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/usr/local/src/repo/org/apache/hbase/hbase-client/2.2.3/hbase-client-2.2.3.jar:/usr/local/src/repo/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/usr/local/src/repo/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/usr/local/src/repo/ru/yandex/clickhouse/clickhouse-jdbc/0.3.2/clickhouse-jdbc-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-http-client/0.3.2/clickhouse-http-client-0.3.2.jar:/usr/local/src/repo/com/clickhouse/clickhouse-client/0.3.2/clickhouse-client-0.3.2.jar:/usr/local/src/repo/com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:/usr/local/src/repo/org/apache/httpcomponents/httpmime/4.5.13/httpmime-4.5.13.jar:/opt/scala-2.12.10/lib/scala-parser-combinators_2.12-1.0.7.jar:/opt/scala-2.12.10/lib/scala-xml_2.12-1.0.6.jar:/opt/scala-2.12.10/lib/scala-swing_2.12-2.0.3.jar:/opt/scala-2.12.10/lib/scala-reflect.jar:/opt/scala-2.12.10/lib/scala-library.jar gs8.shujuwaqu2 log4j:WARN No appenders could be found for logger (org.apache.hadoop.hive.conf.HiveConf). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 25/10/09 15:31:36 WARN Utils: Your hostname, pbcp resolves to a loopback address: 127.0.1.1; using 192.168.75.3 instead (on interface ens33) 25/10/09 15:31:36 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 25/10/09 15:31:36 INFO SparkContext: Running Spark version 3.1.1 25/10/09 15:31:36 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO ResourceUtils: No custom resources configured for spark.driver. 25/10/09 15:31:36 INFO ResourceUtils: ============================================================== 25/10/09 15:31:36 INFO SparkContext: Submitted application: RandomForestModel 25/10/09 15:31:36 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 25/10/09 15:31:36 INFO ResourceProfile: Limiting resource is cpu 25/10/09 15:31:36 INFO ResourceProfileManager: Added ResourceProfile id: 0 25/10/09 15:31:36 INFO SecurityManager: Changing view acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls to: root 25/10/09 15:31:36 INFO SecurityManager: Changing view acls groups to: 25/10/09 15:31:36 INFO SecurityManager: Changing modify acls groups to: 25/10/09 15:31:36 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set() 25/10/09 15:31:37 INFO Utils: Successfully started service 'sparkDriver' on port 33909. 25/10/09 15:31:37 INFO SparkEnv: Registering MapOutputTracker 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMaster 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 25/10/09 15:31:37 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 25/10/09 15:31:37 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-fc5a25f6-f8d7-41e2-8c52-61cc65b7fc90 25/10/09 15:31:37 INFO MemoryStore: MemoryStore started with capacity 1948.2 MiB 25/10/09 15:31:37 INFO SparkEnv: Registering OutputCommitCoordinator 25/10/09 15:31:37 INFO Utils: Successfully started service 'SparkUI' on port 4040. 25/10/09 15:31:37 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.75.3:4040 25/10/09 15:31:37 INFO Executor: Starting executor ID driver on host 192.168.75.3 25/10/09 15:31:37 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41227. 25/10/09 15:31:37 INFO NettyBlockTransferService: Server created on 192.168.75.3:41227 25/10/09 15:31:37 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 25/10/09 15:31:37 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.75.3:41227 with 1948.2 MiB RAM, BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:37 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.75.3, 41227, None) 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:38 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 25/10/09 15:31:39 INFO Persistence: Property datanucleus.metadata.validate unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 25/10/09 15:31:39 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/lib doesn't exist 25/10/09 15:31:39 ERROR FileUtils: The jar file path /opt/module/hive-3.1.2/jdbc doesn't exist 随机森林模型训练完成! 预测完成,前5条结果: +-----------------+--------------------+ |machine_record_id|machine_record_state| +-----------------+--------------------+ |1.4747628E7 |0.0 | |1.4747629E7 |0.0 | |1.474763E7 |0.0 | |1.4747631E7 |0.0 | |1.4747632E7 |0.0 | +-----------------+--------------------+ only showing top 5 rows c
10-10
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值