java利用nfs-client连接nfs,读取/下载/上传文件

本文介绍了如何在Java项目中通过ECS NFS Client库实现NFS文件系统操作,包括文件获取、上传、下载和目录遍历。重点展示了NfsUtil工具类的创建及关键方法的使用,适合NFS技术开发者参考。
该文章已生成可运行项目,

引入pom

        <dependency>
            <groupId>com.emc.ecs</groupId>
            <artifactId>nfs-client</artifactId>
            <version>1.0.3</version>
        </dependency>
NfsUtil
package xxx;

import com.emc.ecs.nfsclient.nfs.io.Nfs3File;
import com.emc.ecs.nfsclient.nfs.io.NfsFileInputStream;
import com.emc.ecs.nfsclient.nfs.io.NfsFileOutputStream;
import com.emc.ecs.nfsclient.nfs.nfs3.Nfs3;
import com.emc.ecs.nfsclient.rpc.CredentialUnix;
import lombok.var;

import java.io.IOException;
import java.io.InputStream;
import java.util.List;

/**
 * @描述 NFS工具类
 * @参考文档 https://www.cnblogs.com/yshyee/p/9520181.html
 * @参考文档 https://blog.youkuaiyun.com/ZYQ_1004/article/details/104947117
 * @参考文档 https://blog.youkuaiyun.com/fromfire2/article/details/123695955
 */
public class NfsUtil {
    private static final String NFS_IP = "xxx.xxx.xxx.xxx";
    private static final String NFS_DIR = "/xxx/xxx";//这个结尾有没有/不影响,可以填入子路径

    static Nfs3 client;

    static {
        try {
            client = new Nfs3(NFS_IP, NFS_DIR, new CredentialUnix(), 3);
        } catch (Exception e) {
            LogUtil.log(e.getMessage());
            e.printStackTrace();
        }
    }

    public static Nfs3File getFile(String path) {
        try {
            if (!path.startsWith("/"))
                path = "/" + path;//无论NFS_DIR末尾带不带/,这里开头必须/,否者堆栈溢出
            return client.newFile(path);
        } catch (Exception e) {
            LogUtil.log(e.getMessage());
            e.printStackTrace();
            return null;
        }
    }

    public static List<Nfs3File> getRootFiles() throws IOException {
        return getFile("/").listFiles();
    }

    public static InputStream read(Nfs3File file) throws IOException {
        return new NfsFileInputStream(file);
    }

    public static InputStream read(String path) throws IOException {
        return read(getFile(path));
    }

    public static String readText(String path) throws IOException {
        return BinaryUtil.readText(read(path));
    }

    public static void upload(String path, byte[] data) throws IOException {
        var file = getFile(path);
        BinaryUtil.write(new NfsFileOutputStream(file), data);
    }

    public static void upload(String path, InputStream data) throws IOException {
        upload(path, BinaryUtil.getBytes(data));
    }

    public static void upload(String path, String text) throws IOException {
        upload(path, BinaryUtil.getBytes(text));
    }

    public static void test() throws IOException {
        var path = StringUtil.guid(".txt");
        upload(path, "中国智造,惠及全球");
        var text = readText(path);
        LogUtil.log(text);
        LogUtil.log(getRootFiles());
    }
}

下面是用到的工具类,如果你需要,一般用不到,自己项目都有封装

BinaryUtil
package xxx;

import cn.hutool.core.util.HashUtil;
import lombok.var;

import java.io.*;
import java.nio.charset.StandardCharsets;

public class BinaryUtil {

    public static ByteArrayOutputStream getMemoryOutputStream() {
        return new ByteArrayOutputStream();
    }

    public static ByteArrayOutputStream getMemoryOutputStream(InputStream inputStream) throws IOException {
        var byteArrayOutputStream = new ByteArrayOutputStream();
        int _byte;
        while ((_byte = inputStream.read()) != -1)
            byteArrayOutputStream.write(_byte);
        inputStream.close();
        return byteArrayOutputStream;
    }

    public static ByteArrayInputStream getMemoryInputStream(ByteArrayOutputStream output) {
        return new ByteArrayInputStream(output.toByteArray());
    }

    public static ByteArrayInputStream getMemoryInputStream(InputStream inputStream) throws IOException {
        return getMemoryInputStream(getMemoryOutputStream(inputStream));
    }

    public static byte[] readBytes(InputStream inputStream) throws IOException {
        var stream = getMemoryOutputStream(inputStream);
        var data = stream.toByteArray();
        stream.close();
        return data;
    }

    public static byte[] getBytes(InputStream inputStream) throws IOException {
        return readBytes(inputStream);
    }

    public static byte[] getBytes(String text) {
        return text.getBytes(StandardCharsets.UTF_8);
    }

    public static InputStream getStream(byte[] data) {
        return new ByteArrayInputStream(data);
    }

    public static InputStream getStream(String text) {
        return getStream(getBytes(text));
    }

    public static String readText(InputStream inputStream, String charset) throws IOException {
        var stream = getMemoryOutputStream(inputStream);
        var text = stream.toString(charset);
        stream.close();
        return text;
    }

    public static String readText(InputStream inputStream) throws IOException {
        return readText(inputStream, StandardCharsets.UTF_8.name());
    }


    public static void write(OutputStream outputStream, byte[] data) throws IOException {
        outputStream.write(data);
        outputStream.close();
    }

    public static void write(OutputStream outputStream, InputStream data) throws IOException {
        write(outputStream, getBytes(data));
    }

    public static void write(OutputStream outputStream, String text) throws IOException {
        write(outputStream, getBytes(text));
    }

    public static long getHashCode(InputStream inputStream) throws IOException {
        return HashUtil.cityHash32(getBytes(inputStream));
    }

}

 StringUtil 

package xxx;

import lombok.var;
import org.springframework.util.StringUtils;

import java.util.*;

public class StringUtil {

    public static boolean equals(String value, String... keywords) {
        for (var keyword : keywords)
            if (keyword.equals(value))
                return true;
        return false;
    }

    public static boolean equals(Integer value, Integer... keywords) {
        for (var keyword : keywords)
            if (keyword == value)
                return true;
        return false;
    }

    public static boolean like(String value, String... keywords) {
        if (value == null)
            return false;
        for (var keyword : keywords)
            if (keyword.contains(value))
                return true;
        return false;
    }

    public static <T> String join(List<T> list) {
        var builder = new StringBuilder();
        for (var index = 0; index < list.size(); index++) {
            if (builder.length() != 0)
                builder.append(",");
            builder.append(list.get(index));
        }
        return builder.toString();
    }

    public static String addSingleQuotationMarks(String value) {
        if (value.startsWith("'"))
            return value;
        return "'" + value + "'";
    }

    public static String addDoubleQuotationMarks(String value) {
        if (value.startsWith("\""))
            return value;
        return "\"" + value + "\"";
    }

    public static String[] removeEmpty(String[] array) {
        var list = LinqUtil.filter(array, m -> isNotEmpty(m));
        return list.toArray(new String[0]);
    }

    public static boolean contains(String[] values, String keyword) {
        for (var value : values) {
            if (value.equals(keyword))
                return true;
        }
        return false;
    }

    public static boolean in(String value, String... values) {
        for (var item : values)
            if (item.equals(value))
                return true;
        return false;
    }

    public static List<String> list(List<?> values) {
        var list = new ArrayList<String>();
        for (var value : values)
            list.add(value.toString());
        return list;
    }

    public static String digit2Hanzi(String value) {
        return value.replace('0', '零')
                .replace('1', '一')
                .replace('2', '二')
                .replace('3', '三')
                .replace('4', '四')
                .replace('5', '五')
                .replace('6', '六')
                .replace('7', '七')
                .replace('8', '八')
                .replace('9', '九');
    }

    public static boolean contains(String text, String keyword) {
        if (keyword == null || text == null)
            return false;
        return text.contains(keyword);
    }

    public static String getSex(Integer sex) {
        if (sex == 0)
            return "男";
        if (sex == 1)
            return "女";
        return "";
    }

    public static String[] combination(String[] array1, String[] array2, String spo) {
        var list = new ArrayList<String>();
        for (var item1 : array1)
            for (var item2 : array2)
                list.add(item1 + spo + item2);
        return ArrayUtil.toArray(list, array1);
    }

    public static String[] getDistinct(String[] array) {
        List list = Arrays.asList(array);
        Set set = new HashSet(list);
        return (String[]) set.toArray(new String[0]);
    }

    public static boolean contains(List<String> keywords, String keyword) {
        for (var value : keywords) {
            if (value.equals(keyword))
                return true;
        }
        return false;
    }

    public static List<String> split(String text) {
        if (text == null)
            return new ArrayList<>();
        var array = text.split(",");
        return ArrayUtil.arrayToList(array);
    }

    public static String listToStringSplit(List list, String split) {
        if (list == null || list.size() == 0)
            return null;

        return StringUtils.trimAllWhitespace(list.toString()
                .replaceAll("\\[", "")
                .replaceAll("]", ""))
                .replaceAll(",", split);
    }

    public static String listToStringSplit(List list, String split, boolean isTrimSpace) {
        if (list == null || list.size() == 0)
            return null;
        if (isTrimSpace) {
            return listToStringSplit(list, split);
        }
        return list.toString()
                .replaceAll("\\[", "")
                .replaceAll("]", "")
                .replaceAll(",", split);
    }

    public static List joinList(List list, String startJoin, String endJoin) {
        if (list == null || list.size() == 0)
            return null;
        for (int i = 0; i < list.size(); i++) {
            list.set(i, joinString(list.get(i), startJoin, endJoin));
        }
        return list;
    }

    public static String joinString(Object obj, String startJoin, String endJoin) {
        String value = obj.toString();
        if (isEmpty(value))
            return null;
        value = startJoin + value + endJoin;
        return value;
    }

    public static List<String> getLetters(String text) {
        var list = new ArrayList<String>();
        for (var ch : text.toCharArray())
            list.add(ch + "");
        return list;
    }

    public static String newId() {
        return UUID.randomUUID().toString().replace("-", "");
    }

    public static String newId(int length) {
        return newId().substring(0, length);
    }

    public static String guid(String tail) {
        return newId() + tail;
    }

    public static boolean isEmpty(Object text) {
        return StringUtils.isEmpty(text);
    }

    public static boolean isNotEmpty(Object... values) {
        for (var value : values)
            if (StringUtils.isEmpty(value))
                return false;
        return true;
    }

    public static String format(Object value, Object... args) {
        if (value == null)
            return "";
        if (args.length > 0)
            return String.format(value.toString().replace("{}", "%s"), args);
        return value.toString();
    }

    public static String number(long value, int length) {
        return String.format("%0" + length + "d", value);
    }

    public static String number(int value, int length) {
        return String.format("%0" + length + "d", value);
    }

    public static String upperFirst(String value) {
        char[] cs = value.toCharArray();
        if (cs[0] > 96 && cs[0] < 123)
            cs[0] -= 32;
        return String.valueOf(cs);
    }

    public static String[] upperFirst(String[] values) {
        for (var index = 0; index < values.length; index++)
            values[index] = upperFirst(values[index]);
        return values;
    }

    public static String join(String[] values, String spor) {
        var sb = new StringBuilder(values[0]);
        for (int index = 1; index < values.length; index++) {
            sb.append(spor);
            sb.append(values[index]);
        }
        return sb.toString();
    }

    public static String join(List<String> values, String spor) {
        return join(ArrayUtil.toArray(values, new String[0]), spor);
    }

}
LogUtil
package xxx;

import cn.hutool.json.JSONUtil;
import lombok.var;

public class LogUtil {

    static int count;

    public static void log(Object value, Object... args) {
        var msg = StringUtil.format(value, args);
        System.out.println(StringUtil.format("\r\n[{}][{}][LogUtil]\r\n" +
                        "╭───────────────────────────────────────────────────────────────────────────────────────────╮" +
                        "\r\n{}\r\n" +
                        "╰━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╯",
                ++count,
                TimeUtil.getNowString(TimeUtil.format_ymdhmss),
                msg));
    }

    public static void info(Object value, Object... args) {
        log(value, args);
    }

    public static void debug(Object value, Object... args) {
        log(value, args);
    }

    public static void json(Object value) {
        if (value == null)
            log("null");
        log(JSONUtil.toJsonStr(value));
    }

    public static void log() {
        log(null);
    }

    public static void logJson(Object value) {
        json(value);
    }

}

本文章已经生成可运行项目
hdfs namenode -format 2025-10-10 20:45:17,586 INFO namenode.NameNode: STARTUP_MSG: /************************************************************ STARTUP_MSG: Starting NameNode STARTUP_MSG: host = master/192.168.56.130 STARTUP_MSG: args = [-format] STARTUP_MSG: version = 3.1.3 STARTUP_MSG: classpath = /opt/soft/hadoop/etc/hadoop:/opt/soft/hadoop/share/hadoop/common/lib/accessors-smart-1.2.jar:/opt/soft/hadoop/share/hadoop/common/lib/animal-sniffer-annotations-1.17.jar:/opt/soft/hadoop/share/hadoop/common/lib/asm-5.0.4.jar:/opt/soft/hadoop/share/hadoop/common/lib/audience-annotations-0.5.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/avro-1.7.7.jar:/opt/soft/hadoop/share/hadoop/common/lib/checker-qual-2.5.2.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-beanutils-1.9.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-codec-1.11.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-collections-3.2.2.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-compress-1.18.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-configuration2-2.1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-io-2.5.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-lang-2.6.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-lang3-3.4.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-logging-1.1.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-math3-3.1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/commons-net-3.6.jar:/opt/soft/hadoop/share/hadoop/common/lib/curator-client-2.13.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/curator-framework-2.13.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/curator-recipes-2.13.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/error_prone_annotations-2.2.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/failureaccess-1.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/gson-2.2.4.jar:/opt/soft/hadoop/share/hadoop/common/lib/guava-27.0-jre.jar:/opt/soft/hadoop/share/hadoop/common/lib/hadoop-annotations-3.1.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/hadoop-auth-3.1.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/htrace-core4-4.1.0-incubating.jar:/opt/soft/hadoop/share/hadoop/common/lib/httpclient-4.5.2.jar:/opt/soft/hadoop/share/hadoop/common/lib/httpcore-4.4.4.jar:/opt/soft/hadoop/share/hadoop/common/lib/j2objc-annotations-1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-annotations-2.7.8.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-core-2.7.8.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-databind-2.7.8.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/opt/soft/hadoop/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/soft/hadoop/share/hadoop/common/lib/javax.servlet-api-3.1.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/jaxb-api-2.2.11.jar:/opt/soft/hadoop/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/opt/soft/hadoop/share/hadoop/common/lib/jcip-annotations-1.0-1.jar:/opt/soft/hadoop/share/hadoop/common/lib/jersey-core-1.19.jar:/opt/soft/hadoop/share/hadoop/common/lib/jersey-json-1.19.jar:/opt/soft/hadoop/share/hadoop/common/lib/jersey-server-1.19.jar:/opt/soft/hadoop/share/hadoop/common/lib/jersey-servlet-1.19.jar:/opt/soft/hadoop/share/hadoop/common/lib/jettison-1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-http-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-io-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-security-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-server-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-servlet-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-util-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-webapp-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jetty-xml-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/common/lib/jsch-0.1.54.jar:/opt/soft/hadoop/share/hadoop/common/lib/json-smart-2.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/jsp-api-2.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/jsr311-api-1.1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-admin-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-client-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-common-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-core-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-crypto-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-identity-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-server-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-simplekdc-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerb-util-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerby-asn1-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerby-config-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerby-pkix-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerby-util-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/kerby-xdr-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/soft/hadoop/share/hadoop/common/lib/log4j-1.2.17.jar:/opt/soft/hadoop/share/hadoop/common/lib/netty-3.10.5.Final.jar:/opt/soft/hadoop/share/hadoop/common/lib/nimbus-jose-jwt-4.41.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/paranamer-2.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/opt/soft/hadoop/share/hadoop/common/lib/re2j-1.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/slf4j-api-1.7.25.jar:/opt/soft/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.25.jar:/opt/soft/hadoop/share/hadoop/common/lib/snappy-java-1.0.5.jar:/opt/soft/hadoop/share/hadoop/common/lib/stax2-api-3.1.4.jar:/opt/soft/hadoop/share/hadoop/common/lib/token-provider-1.0.1.jar:/opt/soft/hadoop/share/hadoop/common/lib/woodstox-core-5.0.3.jar:/opt/soft/hadoop/share/hadoop/common/lib/zookeeper-3.4.13.jar:/opt/soft/hadoop/share/hadoop/common/lib/jul-to-slf4j-1.7.25.jar:/opt/soft/hadoop/share/hadoop/common/lib/metrics-core-3.2.4.jar:/opt/soft/hadoop/share/hadoop/common/hadoop-common-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/common/hadoop-common-3.1.3.jar:/opt/soft/hadoop/share/hadoop/common/hadoop-nfs-3.1.3.jar:/opt/soft/hadoop/share/hadoop/common/hadoop-kms-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-util-ajax-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/netty-all-4.0.52.Final.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/okhttp-2.7.5.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/okio-1.6.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jersey-servlet-1.19.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jersey-json-1.19.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/hadoop-auth-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-codec-1.11.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/httpclient-4.5.2.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/httpcore-4.4.4.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/nimbus-jose-jwt-4.41.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jcip-annotations-1.0-1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/json-smart-2.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/accessors-smart-1.2.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/asm-5.0.4.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/zookeeper-3.4.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/audience-annotations-0.5.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/netty-3.10.5.Final.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/curator-framework-2.13.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/curator-client-2.13.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/guava-27.0-jre.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/failureaccess-1.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jsr305-3.0.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/checker-qual-2.5.2.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/error_prone_annotations-2.2.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/j2objc-annotations-1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/animal-sniffer-annotations-1.17.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-simplekdc-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-client-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerby-config-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-core-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerby-pkix-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerby-asn1-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerby-util-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-common-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-crypto-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-io-2.5.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-util-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/token-provider-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-admin-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-server-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerb-identity-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/kerby-xdr-1.0.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jersey-core-1.19.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jsr311-api-1.1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jersey-server-1.19.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/javax.servlet-api-3.1.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/json-simple-1.1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-server-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-http-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-util-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-io-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-webapp-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-xml-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-servlet-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jetty-security-9.3.24.v20180605.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/hadoop-annotations-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-math3-3.1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-net-3.6.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-collections-3.2.2.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jettison-1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jaxb-impl-2.2.3-1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jaxb-api-2.2.11.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-jaxrs-1.9.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-xc-1.9.13.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-beanutils-1.9.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-configuration2-2.1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-lang3-3.4.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/avro-1.7.7.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/paranamer-2.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/snappy-java-1.0.5.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/commons-compress-1.18.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/re2j-1.1.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/gson-2.2.4.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jsch-0.1.54.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/curator-recipes-2.13.0.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/htrace-core4-4.1.0-incubating.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-databind-2.7.8.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-annotations-2.7.8.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/jackson-core-2.7.8.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/stax2-api-3.1.4.jar:/opt/soft/hadoop/share/hadoop/hdfs/lib/woodstox-core-5.0.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-nfs-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-client-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-client-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-native-client-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-native-client-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-rbf-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-rbf-3.1.3.jar:/opt/soft/hadoop/share/hadoop/hdfs/hadoop-hdfs-httpfs-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/lib/junit-4.11.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-app-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-common-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-3.1.3-tests.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-nativetask-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-uploader-3.1.3.jar:/opt/soft/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn:/opt/soft/hadoop/share/hadoop/yarn/lib/HikariCP-java7-2.4.12.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/aopalliance-1.0.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/dnsjava-2.1.7.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/ehcache-3.3.1.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/fst-2.50.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/guice-4.0.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/guice-servlet-4.0.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/jackson-jaxrs-base-2.7.8.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/jackson-jaxrs-json-provider-2.7.8.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/jackson-module-jaxb-annotations-2.7.8.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/java-util-1.9.0.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/javax.inject-1.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/jersey-client-1.19.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/jersey-guice-1.19.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/json-io-2.5.1.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/metrics-core-3.2.4.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/mssql-jdbc-6.2.1.jre7.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/objenesis-1.0.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/snakeyaml-1.16.jar:/opt/soft/hadoop/share/hadoop/yarn/lib/swagger-annotations-1.5.4.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-api-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-client-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-common-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-registry-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-common-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-nodemanager-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-router-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-tests-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-timeline-pluginstorage-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-server-web-proxy-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-services-api-3.1.3.jar:/opt/soft/hadoop/share/hadoop/yarn/hadoop-yarn-services-core-3.1.3.jar STARTUP_MSG: build = https://gitbox.apache.org/repos/asf/hadoop.git -r ba631c436b806728f8ec2f54ab1e289526c90579; compiled by 'ztang' on 2019-09-12T02:47Z STARTUP_MSG: java = 1.8.0_451 ************************************************************/ 2025-10-10 20:45:17,599 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT] 2025-10-10 20:45:17,733 INFO namenode.NameNode: createNameNode [-format] 2025-10-10 20:45:18,105 ERROR conf.Configuration: error parsing conf mapred-site.xml com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at com.ctc.wstx.sr.StreamScanner.constructWfcException(StreamScanner.java:621) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:491) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:475) at com.ctc.wstx.sr.BasicStreamReader.reportWrongEndElem(BasicStreamReader.java:3365) at com.ctc.wstx.sr.BasicStreamReader.readEndElem(BasicStreamReader.java:3292) at com.ctc.wstx.sr.BasicStreamReader.nextFromTree(BasicStreamReader.java:2911) at com.ctc.wstx.sr.BasicStreamReader.next(BasicStreamReader.java:1123) at org.apache.hadoop.conf.Configuration$Parser.parseNext(Configuration.java:3320) at org.apache.hadoop.conf.Configuration$Parser.parse(Configuration.java:3114) at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3007) at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) at org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) at org.apache.hadoop.conf.Configuration.getTrimmed(Configuration.java:1254) at org.apache.hadoop.conf.Configuration.getLong(Configuration.java:1532) at org.apache.hadoop.security.Groups.<init>(Groups.java:113) at org.apache.hadoop.security.Groups.<init>(Groups.java:102) at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:451) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:336) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:303) at org.apache.hadoop.security.UserGroupInformation.isAuthenticationMethodEnabled(UserGroupInformation.java:391) at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:385) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1156) at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1645) at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1755) 2025-10-10 20:45:18,114 ERROR namenode.NameNode: Failed to start namenode. java.lang.RuntimeException: com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3024) at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) at org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) at org.apache.hadoop.conf.Configuration.getTrimmed(Configuration.java:1254) at org.apache.hadoop.conf.Configuration.getLong(Configuration.java:1532) at org.apache.hadoop.security.Groups.<init>(Groups.java:113) at org.apache.hadoop.security.Groups.<init>(Groups.java:102) at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:451) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:336) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:303) at org.apache.hadoop.security.UserGroupInformation.isAuthenticationMethodEnabled(UserGroupInformation.java:391) at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:385) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1156) at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1645) at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1755) Caused by: com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at com.ctc.wstx.sr.StreamScanner.constructWfcException(StreamScanner.java:621) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:491) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:475) at com.ctc.wstx.sr.BasicStreamReader.reportWrongEndElem(BasicStreamReader.java:3365) at com.ctc.wstx.sr.BasicStreamReader.readEndElem(BasicStreamReader.java:3292) at com.ctc.wstx.sr.BasicStreamReader.nextFromTree(BasicStreamReader.java:2911) at com.ctc.wstx.sr.BasicStreamReader.next(BasicStreamReader.java:1123) at org.apache.hadoop.conf.Configuration$Parser.parseNext(Configuration.java:3320) at org.apache.hadoop.conf.Configuration$Parser.parse(Configuration.java:3114) at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3007) ... 15 more 2025-10-10 20:45:18,127 INFO util.ExitUtil: Exiting with status 1: java.lang.RuntimeException: com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] 2025-10-10 20:45:18,138 INFO namenode.NameNode: SHUTDOWN_MSG: /************************************************************ SHUTDOWN_MSG: Shutting down NameNode at master/192.168.56.130 ************************************************************/ 2025-10-10 20:45:18,169 ERROR conf.Configuration: error parsing conf mapred-site.xml com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at com.ctc.wstx.sr.StreamScanner.constructWfcException(StreamScanner.java:621) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:491) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:475) at com.ctc.wstx.sr.BasicStreamReader.reportWrongEndElem(BasicStreamReader.java:3365) at com.ctc.wstx.sr.BasicStreamReader.readEndElem(BasicStreamReader.java:3292) at com.ctc.wstx.sr.BasicStreamReader.nextFromTree(BasicStreamReader.java:2911) at com.ctc.wstx.sr.BasicStreamReader.next(BasicStreamReader.java:1123) at org.apache.hadoop.conf.Configuration$Parser.parseNext(Configuration.java:3320) at org.apache.hadoop.conf.Configuration$Parser.parse(Configuration.java:3114) at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3007) at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) at org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) at org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1812) at org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1789) at org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183) at org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145) at org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65) at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102) Exception in thread "Thread-1" java.lang.RuntimeException: com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3024) at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) at org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) at org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1812) at org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1789) at org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183) at org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145) at org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65) at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102) Caused by: com.ctc.wstx.exc.WstxParsingException: Unexpected close tag </property>; expected </value>. at [row,col,system-id]: [23,11,"file:/opt/soft/hadoop/etc/hadoop/mapred-site.xml"] at com.ctc.wstx.sr.StreamScanner.constructWfcException(StreamScanner.java:621) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:491) at com.ctc.wstx.sr.StreamScanner.throwParseError(StreamScanner.java:475) at com.ctc.wstx.sr.BasicStreamReader.reportWrongEndElem(BasicStreamReader.java:3365) at com.ctc.wstx.sr.BasicStreamReader.readEndElem(BasicStreamReader.java:3292) at com.ctc.wstx.sr.BasicStreamReader.nextFromTree(BasicStreamReader.java:2911) at com.ctc.wstx.sr.BasicStreamReader.next(BasicStreamReader.java:1123) at org.apache.hadoop.conf.Configuration$Parser.parseNext(Configuration.java:3320) at org.apache.hadoop.conf.Configuration$Parser.parse(Configuration.java:3114) at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3007) ... 9 more 现在呢?
最新发布
10-11
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

上海好程序员

给上海好程序员加个鸡腿!!!

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值