获取音频的时域频谱-振幅图-音频可视化-音量图-音谱图

本文介绍了一种用于音频文件振幅图绘制的方法,通过解析音频文件获取振幅信息,并利用自定义View组件进行可视化展示。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

找了很久的资料,基本都是音频的实时频谱图,即通过MediaPlayer-Visualizer-onWaveFormDataCapture、onFftDataCapture获取数据,然后解析、绘制出来的,但和我想要的不一样,我是为了做音频裁剪,要展示一个音频文件整个时间长度内的一个固定的振幅图,我不知道它实际上叫什么名字。
先上一个效果图吧
在这里插入图片描述

参考资料来源于
https://blog.youkuaiyun.com/jhl122/article/details/86996446?spm=1001.2014.3001.5502
https://ask.youkuaiyun.com/questions/748385?spm=1005.2026.3001.5635

因为从参考资料中获取到资料、代码,没有使用方式,使用的也不太符合我的需要,所以就改了些,上干货
1,获取绘制数据的工具类

package com.cs;

import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;

import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Arrays;

/**
 * author:做好事不留名的雷锋叔叔
 * Date:2021/6/24
 * Description: 获取音频音量振幅信息
 */
public class AudioVolumeInfoUtil {

    private static AudioVolumeInfoUtil audioVolumeInfoUtil;

    public static AudioVolumeInfoUtil getInstance() {
        if (audioVolumeInfoUtil == null) {
            audioVolumeInfoUtil = new AudioVolumeInfoUtil();
        }
        return audioVolumeInfoUtil;
    }

    public void getInfo(String mediaPath, OnActionListener<AudioVolumeInfo> onActionListener) {
        File f = new File(mediaPath);
        if (!f.exists() || f.length() == 0 || f.isDirectory()) {
            if (onActionListener != null) {
                onActionListener.onFail("文件异常");
            }
            return;
        }
        readFile(f, onActionListener);
    }

    private void readFile(File inputFile, OnActionListener<AudioVolumeInfo> onActionListener) {
        ScreenUtils.showLog("ReadFile");
        if (onActionListener != null) {
            onActionListener.onStart();
        }
        MediaExtractor extractor = null;
        MediaCodec codec = null;
        try {
            AudioVolumeInfo audioVolumeInfo = new AudioVolumeInfo();
            extractor = new MediaExtractor();
            MediaFormat format = null;
            int i;

            String[] components = inputFile.getPath().split("\\.");
            audioVolumeInfo.mFileType = components[components.length - 1];
            audioVolumeInfo.mFileSize = (int) inputFile.length();
            extractor.setDataSource(inputFile.getPath());
            int numTracks = extractor.getTrackCount();
            // find and select the first audio track present in the file.
            for (i = 0; i < numTracks; i++) {
                format = extractor.getTrackFormat(i);
                if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) {
                    extractor.selectTrack(i);
                    break;
                }
            }
            ScreenUtils.showLog("i = " + i);
            ScreenUtils.showLog("numTracks = " + numTracks);
            if (format == null || i == numTracks) {
                if (onActionListener != null) {
                    onActionListener.onFail("文件中找不到音频");
                }
                return;
            }
            int mChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
            int mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
            ScreenUtils.showLog("mChannels = " + mChannels);
            ScreenUtils.showLog("mSampleRate = " + mSampleRate);
            // Expected total number of samples per channel.
            int expectedNumSamples =
                    (int) ((format.getLong(MediaFormat.KEY_DURATION) / 100.f) * mSampleRate + 0.5f);
//        (int)((format.getLong(MediaFormat.KEY_DURATION) / 1000000.f) * mSampleRate + 0.5f);
            codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
            codec.configure(format, null, null, 0);
            codec.start();

            int decodedSamplesSize = 0;  // size of the output buffer containing decoded samples.
            byte[] decodedSamples = null;
            ByteBuffer[] inputBuffers = codec.getInputBuffers();
            ByteBuffer[] outputBuffers = codec.getOutputBuffers();
            int sample_size;
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            long presentation_time;
            int tot_size_read = 0;
            boolean done_reading = false;

            // Set the size of the decoded samples buffer to 1MB (~6sec of a stereo stream at 44.1kHz).
            // For longer streams, the buffer size will be increased later on, calculating a rough
            // estimate of the total size needed to store all the samples in order to resize the buffer
            // only once.
            // Raw audio data
            ByteBuffer mDecodedBytes = ByteBuffer.allocate(1 << 20);
            boolean firstSampleData = true;
            while (true) {
                // read data from file and feed it to the decoder input buffers.
                int inputBufferIndex = codec.dequeueInputBuffer(100);
                if (!done_reading && inputBufferIndex >= 0) {
                    sample_size = extractor.readSampleData(inputBuffers[inputBufferIndex], 0);
                    if (firstSampleData
                            && format.getString(MediaFormat.KEY_MIME).equals("audio/mp4a-latm")
                            && sample_size == 2) {
                        // For some reasons on some devices (e.g. the Samsung S3) you should not
                        // provide the first two bytes of an AAC stream, otherwise the MediaCodec will
                        // crash. These two bytes do not contain music data but basic info on the
                        // stream (e.g. channel configuration and sampling frequency), and skipping them
                        // seems OK with other devices (MediaCodec has already been configured and
                        // already knows these parameters).
                        extractor.advance();
                        tot_size_read += sample_size;
                    } else if (sample_size < 0) {
                        // All samples have been read.
                        codec.queueInputBuffer(
                                inputBufferIndex, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        done_reading = true;
                    } else {
                        presentation_time = extractor.getSampleTime();
                        codec.queueInputBuffer(inputBufferIndex, 0, sample_size, presentation_time, 0);
                        extractor.advance();
                        tot_size_read += sample_size;
                        if (onActionListener != null && onActionListener.isNeedProgress()) {
                            float progress = (float) (tot_size_read) / audioVolumeInfo.mFileSize;
                            int progressInt = (int) (progress * 100);
                            if (progressInt > 100) progressInt = 100;
                            onActionListener.onProgress(progressInt);
                        }
                    }
                    firstSampleData = false;
                }

                // Get decoded stream from the decoder output buffers.
                int outputBufferIndex = codec.dequeueOutputBuffer(info, 100);
                if (outputBufferIndex >= 0 && info.size > 0) {
                    if (decodedSamplesSize < info.size) {
                        decodedSamplesSize = info.size;
                        decodedSamples = new byte[decodedSamplesSize];
                    }
                    outputBuffers[outputBufferIndex].get(decodedSamples, 0, info.size);
                    outputBuffers[outputBufferIndex].clear();
                    // Check if buffer is big enough. Resize it if it's too small.
                    if (mDecodedBytes.remaining() < info.size) {
                        // Getting a rough estimate of the total size, allocate 20% more, and
                        // make sure to allocate at least 5MB more than the initial size.
                        int position = mDecodedBytes.position();
                        int newSize = (int) ((position * (1.0 * audioVolumeInfo.mFileSize / tot_size_read)) * 1.2);
                        if (newSize - position < info.size + 5 * (1 << 20)) {
                            newSize = position + info.size + 5 * (1 << 20);
                        }
                        ByteBuffer newDecodedBytes = null;
                        // Try to allocate memory. If we are OOM, try to run the garbage collector.
                        int retry = 10;
                        while (retry > 0) {
                            try {
                                newDecodedBytes = ByteBuffer.allocate(newSize);
                                break;
                            } catch (OutOfMemoryError oome) {
                                // setting android:largeHeap="true" in <application> seem to help not
                                // reaching this section.
                                retry--;
                            }
                        }
                        if (retry == 0) {
                            // Failed to allocate memory... Stop reading more data and finalize the
                            // instance with the data decoded so far.
                            break;
                        }
                        //ByteBuffer newDecodedBytes = ByteBuffer.allocate(newSize);
                        mDecodedBytes.rewind();
                        newDecodedBytes.put(mDecodedBytes);
                        mDecodedBytes = newDecodedBytes;
                        mDecodedBytes.position(position);
                    }
                    mDecodedBytes.put(decodedSamples, 0, info.size);
                    codec.releaseOutputBuffer(outputBufferIndex, false);
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    outputBuffers = codec.getOutputBuffers();
                } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    // Subsequent data will conform to new format.
                    // We could check that codec.getOutputFormat(), which is the new output format,
                    // is what we expect.
                }
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0
                        || (mDecodedBytes.position() / (2 * mChannels)) >= expectedNumSamples) {
                    // We got all the decoded data from the decoder. Stop here.
                    // Theoretically dequeueOutputBuffer(info, ...) should have set info.flags to
                    // MediaCodec.BUFFER_FLAG_END_OF_STREAM. However some phones (e.g. Samsung S3)
                    // won't do that for some files (e.g. with mono AAC files), in which case subsequent
                    // calls to dequeueOutputBuffer may result in the application crashing, without
                    // even an exception being thrown... Hence the second check.
                    // (for mono AAC files, the S3 will actually double each sample, as if the stream
                    // was stereo. The resulting stream is half what it's supposed to be and with a much
                    // lower pitch.)
                    break;
                }
            }
            audioVolumeInfo.mNumSamples = mDecodedBytes.position() / (mChannels * 2);  // One sample = 2 bytes.
            mDecodedBytes.rewind();
            mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN);
            audioVolumeInfo.mDecodedSamples = mDecodedBytes.asShortBuffer();
            audioVolumeInfo.mAvgBitRate = (int) ((audioVolumeInfo.mFileSize * 8) * ((float) mSampleRate / audioVolumeInfo.mNumSamples) / 1000);

            extractor.release();
            extractor = null;
            codec.stop();
            codec.release();
            codec = null;

            ScreenUtils.showLog("mNumSamples = " + audioVolumeInfo.mNumSamples);
            ScreenUtils.showLog("getSamplesPerFrame() = " + audioVolumeInfo.getSamplesPerFrame());
            // Temporary hack to make it work with the old version.
            audioVolumeInfo.mNumFrames = audioVolumeInfo.mNumSamples / audioVolumeInfo.getSamplesPerFrame();
            audioVolumeInfo.mNumFramesFloat = (float) audioVolumeInfo.mNumSamples / audioVolumeInfo.getSamplesPerFrame();
            System.out.println(audioVolumeInfo.mNumSamples + "sstest" + audioVolumeInfo.getSamplesPerFrame() + "--" + audioVolumeInfo.mNumFramesFloat);
            if (audioVolumeInfo.mNumSamples % audioVolumeInfo.getSamplesPerFrame() != 0) {
                audioVolumeInfo.mNumFrames++;
            }
            audioVolumeInfo.mFrameGains = new int[audioVolumeInfo.mNumFrames];
            int[] mFrameLens = new int[audioVolumeInfo.mNumFrames];
            int[] mFrameOffsets = new int[audioVolumeInfo.mNumFrames];
            int j;
            int gain, value;
            int frameLens = (int) ((1000 * audioVolumeInfo.mAvgBitRate / 8) *
                    ((float) audioVolumeInfo.getSamplesPerFrame() / mSampleRate));
            for (i = 0; i < audioVolumeInfo.mNumFrames; i++) {
                gain = -1;
                for (j = 0; j < audioVolumeInfo.getSamplesPerFrame(); j++) {
                    value = 0;
                    for (int k = 0; k < mChannels; k++) {
                        if (audioVolumeInfo.mDecodedSamples.remaining() > 0) {
                            value += Math.abs(audioVolumeInfo.mDecodedSamples.get());
                        }
                    }
                    value /= mChannels;
                    if (gain < value) {
                        gain = value;
                    }
                }
                audioVolumeInfo.mFrameGains[i] = (int) Math.sqrt(gain);  // here gain = sqrt(max value of 1st channel)...
                mFrameLens[i] = frameLens;  // totally not accurate...
                mFrameOffsets[i] = (int) (i * (1000 * audioVolumeInfo.mAvgBitRate / 8) *  //  = i * frameLens
                        ((float) audioVolumeInfo.getSamplesPerFrame() / mSampleRate));
            }
            audioVolumeInfo.mDecodedSamples.rewind();
            // DumpSamples();  // Uncomment this line to dump the samples in a TSV file.

            prepareForView(audioVolumeInfo);
            if (onActionListener != null) {
                onActionListener.onSuccess(audioVolumeInfo);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (extractor != null) {
                extractor.release();
                extractor = null;
            }
            if (codec != null) {
                codec.stop();
                codec.release();
                codec = null;
            }
        }
    }

    private void prepareForView(AudioVolumeInfo audioVolumeInfo) {
        int numFrames = audioVolumeInfo.getNumFrames();
        int[] frameGains = audioVolumeInfo.getFrameGains();
        ScreenUtils.showLog("numFrames = " + numFrames);
        ScreenUtils.showLog("frameGains = " + Arrays.toString(frameGains));
        double[] smoothedGains = new double[numFrames];
        if (numFrames == 1) {
            smoothedGains[0] = frameGains[0];
        } else if (numFrames == 2) {
            smoothedGains[0] = frameGains[0];
            smoothedGains[1] = frameGains[1];
        } else if (numFrames > 2) {
            smoothedGains[0] = (frameGains[0] / 2.0) +
                    (frameGains[1] / 2.0);
            for (int i = 1; i < numFrames - 1; i++) {
                smoothedGains[i] = (frameGains[i - 1] / 3.0) +
                        (frameGains[i] / 3.0) +
                        (frameGains[i + 1] / 3.0);
            }
            smoothedGains[numFrames - 1] = (frameGains[numFrames - 2] / 2.0) +
                    (frameGains[numFrames - 1] / 2.0);
        }

        double maxGain = 1.0;
        for (int i = 0; i < numFrames; i++) {
            if (smoothedGains[i] > maxGain) {
                maxGain = smoothedGains[i];
            }
        }
        double scaleFactor = 1.0;
        if (maxGain > 255.0) {
            scaleFactor = 255 / maxGain;
        }

        maxGain = 0;
        int[] gainHist = new int[256];
        for (int i = 0; i < numFrames; i++) {
            int smoothedGain = (int) (smoothedGains[i] * scaleFactor);
            if (smoothedGain < 0)
                smoothedGain = 0;
            if (smoothedGain > 255)
                smoothedGain = 255;
            if (smoothedGain > maxGain)
                maxGain = smoothedGain;

            gainHist[smoothedGain]++;
        }

        double minGain = 0;
        int sum = 0;
        while (minGain < 255 && sum < numFrames / 20) {
            sum += gainHist[(int) minGain];
            minGain++;
        }

        sum = 0;
        while (maxGain > 2 && sum < numFrames / 100) {
            sum += gainHist[(int) maxGain];
            maxGain--;
        }
        if (maxGain <= 50) {
            maxGain = 80;
        } else if (maxGain > 50 && maxGain < 120) {
            maxGain = 142;
        } else {
            maxGain += 10;
        }


        double[] heights = new double[numFrames];
        double range = maxGain - minGain;
        for (int i = 0; i < numFrames; i++) {
            double value = (smoothedGains[i] * scaleFactor - minGain) / range;
            if (value < 0.0)
                value = 0.0;
            if (value > 1.0)
                value = 1.0;
            heights[i] = value * value;
        }

        audioVolumeInfo.mNumZoomLevels = 5;
        audioVolumeInfo.mLenByZoomLevel = new int[5];
        double[] mZoomFactorByZoomLevel = new double[5];
        audioVolumeInfo.mValuesByZoomLevel = new double[5][];

        // Level 0 is doubled, with interpolated values
        audioVolumeInfo.mLenByZoomLevel[0] = numFrames * 2;
        System.out.println("ssnum" + numFrames);
        mZoomFactorByZoomLevel[0] = 2.0;
        audioVolumeInfo.mValuesByZoomLevel[0] = new double[audioVolumeInfo.mLenByZoomLevel[0]];
        if (numFrames > 0) {
            audioVolumeInfo.mValuesByZoomLevel[0][0] = 0.5 * heights[0];
            audioVolumeInfo.mValuesByZoomLevel[0][1] = heights[0];
        }
        for (int i = 1; i < numFrames; i++) {
            audioVolumeInfo.mValuesByZoomLevel[0][2 * i] = 0.5 * (heights[i - 1] + heights[i]);
            audioVolumeInfo.mValuesByZoomLevel[0][2 * i + 1] = heights[i];
        }

        // Level 1 is normal
        audioVolumeInfo.mLenByZoomLevel[1] = numFrames;
        audioVolumeInfo.mValuesByZoomLevel[1] = new double[audioVolumeInfo.mLenByZoomLevel[1]];
        mZoomFactorByZoomLevel[1] = 1.0;
        for (int i = 0; i < audioVolumeInfo.mLenByZoomLevel[1]; i++) {
            audioVolumeInfo.mValuesByZoomLevel[1][i] = heights[i];
        }

        // 3 more levels are each halved
        for (int j = 2; j < 5; j++) {
            audioVolumeInfo.mLenByZoomLevel[j] = audioVolumeInfo.mLenByZoomLevel[j - 1] / 2;
            audioVolumeInfo.mValuesByZoomLevel[j] = new double[audioVolumeInfo.mLenByZoomLevel[j]];
            mZoomFactorByZoomLevel[j] = mZoomFactorByZoomLevel[j - 1] / 2.0;
            for (int i = 0; i < audioVolumeInfo.mLenByZoomLevel[j]; i++) {
                audioVolumeInfo.mValuesByZoomLevel[j][i] =
                        0.5 * (audioVolumeInfo.mValuesByZoomLevel[j - 1][2 * i] +
                                audioVolumeInfo.mValuesByZoomLevel[j - 1][2 * i + 1]);
            }
        }


        if (numFrames > 5000) {
            audioVolumeInfo.mZoomLevel = 3;
        } else if (numFrames > 1000) {
            audioVolumeInfo.mZoomLevel = 2;
        } else if (numFrames > 300) {
            audioVolumeInfo.mZoomLevel = 1;
        } else {
            audioVolumeInfo.mZoomLevel = 0;
        }
        computeIntsForThisZoomLevel(audioVolumeInfo);
    }

    private void computeIntsForThisZoomLevel(AudioVolumeInfo audioVolumeInfo) {
        ScreenUtils.showLog("mZoomLevel = " + audioVolumeInfo.mZoomLevel);
        ScreenUtils.showLog("mLenByZoomLevel = " + Arrays.toString(audioVolumeInfo.mLenByZoomLevel));
        ScreenUtils.showLog("mValuesByZoomLevel = " + Arrays.toString(audioVolumeInfo.mValuesByZoomLevel));
//        int halfHeight = (int) ((getMeasuredHeight() * 1f / 5) - 1);
        audioVolumeInfo.mHeightsAtThisZoomLevel = new double[audioVolumeInfo.mLenByZoomLevel[audioVolumeInfo.mZoomLevel]];
        for (int i = 0; i < audioVolumeInfo.mLenByZoomLevel[audioVolumeInfo.mZoomLevel]; i++) {
            audioVolumeInfo.mHeightsAtThisZoomLevel[i] =
                    audioVolumeInfo.mValuesByZoomLevel[audioVolumeInfo.mZoomLevel][i];
//                    (int) (audioVolumeInfo.mValuesByZoomLevel[audioVolumeInfo.mZoomLevel][i] * halfHeight);
        }
        ScreenUtils.showLog("mHeightsAtThisZoomLevel = " + Arrays.toString(audioVolumeInfo.mHeightsAtThisZoomLevel));
    }

    public static class AudioVolumeInfo {

        public String mFileType;
        public int mFileSize;
        public int mNumSamples;
        public ShortBuffer mDecodedSamples;
        public int mAvgBitRate;
        public float mNumFramesFloat;
        public int mNumFrames;
        public int[] mFrameGains;
        //
        public int mNumZoomLevels;
        public int mZoomLevel;
        public double[][] mValuesByZoomLevel;
        public int[] mLenByZoomLevel;
        public double[] mHeightsAtThisZoomLevel;

        public int maxPos() {
            return mLenByZoomLevel[mZoomLevel];
        }

        public int getNumFrames() {
            return mNumFrames;
        }

        public int[] getFrameGains() {
            return mFrameGains;
        }

        // Should be removed when the app will use directly the samples instead of the frames.
        public int getSamplesPerFrame() {
            return 16000 / 50;  // just a fixed value here...
//        return 1024/2;  // just a fixed value here...
        }
    }
} 

2,由获取到的数据自定义View来显示(原文是弄成可滑动的,全部展示,而我是缩减了,在一屏内展示,各位可根据需要自主绘制控制)

package com.cs;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;

import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;

/**
 * author:做好事不留名的雷锋叔叔
 * Date:2021/6/24
 * Description:
 */
public class AudioVolumeView extends View {

    private AudioVolumeInfoUtil.AudioVolumeInfo audioVolumeInfo;
    private Paint barPaint;

    public AudioVolumeView(Context context) {
        super(context);
    }

    public AudioVolumeView(Context context, @Nullable AttributeSet attrs) {
        super(context, attrs);
    }

    public AudioVolumeView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public AudioVolumeView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
        super(context, attrs, defStyleAttr, defStyleRes);
    }

    private int lineWidth;
    private int lineCount;
    private int[] lineHeights;

    private void init() {
        int width = getWidth();
        int height = getHeight();
        if (width == 0) return;
        lineWidth = ScreenUtils.dp2px(getContext(), 1);
        lineCount = width / (lineWidth * 2);//线条多宽,间距就多宽
        int dataCount = audioVolumeInfo.mHeightsAtThisZoomLevel.length;
        if (dataCount < lineCount) {
            //数据条目 < 可绘制条目
            lineCount = dataCount;
            lineHeights = new int[lineCount];
            for (int i = 0; i < lineCount; i++) {
                int lineHeight = (int) (audioVolumeInfo.mHeightsAtThisZoomLevel[i] * height / 2);
                if (lineHeight == 0) lineHeight = 1;
                lineHeights[i] = lineHeight;
            }
        } else {
            lineHeights = new int[lineCount];
            float scale = dataCount * 1f / lineCount;
            for (int i = 0; i < lineCount; i++) {
                int dataIndex = (int) (i * scale);
                if (dataIndex > dataCount - 1) dataIndex = dataCount - 1;//非标准计算可能会越界
                int lineHeight = (int) (audioVolumeInfo.mHeightsAtThisZoomLevel[dataIndex] * height / 2);
                if (lineHeight == 0) lineHeight = 1;
                lineHeights[i] = lineHeight;
            }
        }

        barPaint = new Paint();
        barPaint.setAntiAlias(false);
        barPaint.setStyle(Paint.Style.FILL);
        barPaint.setStrokeWidth(lineWidth);
        barPaint.setColor(Color.RED);
    }

    public void setAudioVolumeInfo(AudioVolumeInfoUtil.AudioVolumeInfo audioVolumeInfo) {
        this.audioVolumeInfo = audioVolumeInfo;
        invalidate();
    }

    @Override
    protected void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        drawReallyLines(canvas);
    }

    /**
     * 画真实的线
     */
    private void drawReallyLines(Canvas canvas) {
        if (audioVolumeInfo == null) return;
        int centerY = getMeasuredHeight() / 2;

        if (barPaint == null) {
            init();
        }
        for (int i = 0; i < lineCount; i++) {
            float x = i * lineWidth * 2;
            float topY = centerY - lineHeights[i];
            float bottomY = centerY + lineHeights[i];
            canvas.drawLine(x, topY, x, bottomY, barPaint);
        }
    }
}

3,简单的工具类

package com.cs;

import android.content.Context;
import android.graphics.Point;
import android.os.Build;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;

/**
 * author:做好事不留名的雷锋叔叔
 * Date:2021/6/23
 * Description:
 */
public class ScreenUtils {

    public static int dp2px(Context context, float dipValue) {
        if (context == null) return (int) (dipValue * 2);
        float scale = context.getResources().getDisplayMetrics().density;
        return (int) (dipValue * scale + 0.5 * (dipValue >= 0 ? 1 : -1));
    }

    public static float getScreenWidth(Context context) {
        WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
        if (manager == null) return 1080;
        Display display = manager.getDefaultDisplay();
        Point sizePoint = new Point();
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
            display.getRealSize(sizePoint);
        } else {
            display.getSize(sizePoint);
        }
        return sizePoint.x;
    }

    public static void showLog(String str) {
        Log.e("音频", "" + str);
    }
}

4,使用方法
1、布局

    <com.cs.AudioVolumeView
        android:id="@+id/AudioVolumeView"
        android:layout_width="match_parent"
        android:layout_height="200dp" />
2、Activity或Fragment或别的中
final AudioVolumeView audioVolumeView = findViewById(R.id.AudioVolumeView);
        AudioVolumeInfoUtil.getInstance().getInfo(path, new OnActionListener<AudioVolumeInfoUtil.AudioVolumeInfo>() {
            @Override
            public void onSuccess(AudioVolumeInfoUtil.AudioVolumeInfo audioVolumeInfo) {
                audioVolumeView.setAudioVolumeInfo(audioVolumeInfo);
            }
            @Override
            public void onFail(String error) {
            }
            @Override
            public void onStart() {
            }
            @Override
            public void onProgress(int progress) {
            }
            @Override
            public boolean isNeedProgress() {
                return false;
            }
        });

完工了
为辛苦的码农做一点微不足道的整理、传播人~

评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值