application.yml
server:
port: 6000
video:
recorder:
# file-name: E://alldemo
file-name: /usr/local/video/
Pom
<!-- 录屏需要的核心依赖 -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
<version>1.4.4</version>
</dependency>
<dependency>
<groupId>org.bytedeco.javacpp-presets</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>4.1-1.4.4</version>
</dependency>
<!-- 工具依赖 -->
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.7.10</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
Controller
import com.example.tcpclient.seralport.VideoRecorder;
import com.example.tcpclient.utils.RecorderUtil;
import com.example.tcpclient.vo.RecordeVO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
@RestController
@RequestMapping("/screenRecorder")
public class ScreenRecorderController {
@Autowired
private RecorderUtil recorderUtil;
private VideoRecorder videoRecorder;
@PostMapping("/start")
public String startRecording(@RequestBody RecordeVO recordeVO) {
try {
long l = System.currentTimeMillis();
recordeVO.setVideoName(l+"");
videoRecorder = recorderUtil.getVideoRecorderInstance(recordeVO);
videoRecorder.start();
return "Recording started";
} catch (Exception e) {
e.printStackTrace();
return "Error starting recording";
}
}
@PostMapping("/caputre")
public String caputre() {
videoRecorder.caputre();
return "截屏成功";
}
@PostMapping("/pause")
public String pauseRecording() {
videoRecorder.pause();
return "Recording stopped";
}
@PostMapping("/stop")
public String stopRecording() {
videoRecorder.stop();
return "Recording stopped";
}
}
Component
import com.example.tcpclient.seralport.VideoRecorder;
import com.example.tcpclient.vo.RecordeVO;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.awt.*;
import java.io.File;
@Component
@Data
public class RecorderUtil {
@Value("${video.recorder.file-name}")
private String fileName;
public VideoRecorder getVideoRecorderInstance(RecordeVO recordeVO){
Rectangle rectangle = new Rectangle(recordeVO.getStartx(),recordeVO.getStarty(),recordeVO.getWidth(),recordeVO.getHeight());
String path = fileName+recordeVO.getProjectName()+"//video/";
File file = new File(path);
if (!file.exists()){
file.mkdirs();
}
String fileHoldName = path + recordeVO.getVideoName();
VideoRecorder videoRecord = new VideoRecorder(fileHoldName, true,rectangle);
return videoRecord;
}
}
VideoRecorder
import lombok.Data;
import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import javax.sound.sampled.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@Data
public class VideoRecorder {
private ScheduledThreadPoolExecutor screenTimer;
private static final int WIDTH = 900;
private static final int HEIGHT = 600;
private Rectangle rectangle;
private FFmpegFrameRecorder recorder;
private Robot robot;
private ScheduledThreadPoolExecutor exec;
private TargetDataLine line;
private AudioFormat audioFormat;
private DataLine.Info dataLineInfo;
private boolean isHaveDevice = true;
private long startTime = 0;
private long videoTS = 0;
private long pauseTime = 0;
private double frameRate = 24;
public VideoRecorder(String fileName, boolean isHaveDevice, Rectangle rectangle1) {
rectangle = rectangle1;
System.out.println(rectangle1);
recorder = new FFmpegFrameRecorder(fileName + ".mp4", (int)rectangle1.getWidth(), (int)rectangle1.getHeight(),2);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
recorder.setFormat("mp4");
recorder.setSampleRate(44100);
recorder.setFrameRate(frameRate);
recorder.setVideoQuality(1);
recorder.setVideoOption("crf", "23");
recorder.setVideoBitrate(1000000);
recorder.setVideoOption("preset", "slow");
recorder.setPixelFormat(0);
recorder.setAudioChannels(2);
recorder.setAudioOption("crf", "0");
recorder.setAudioQuality(0);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
try {
robot = new Robot();
} catch (AWTException e) {
e.printStackTrace();
}
try {
recorder.start();
} catch (Exception e) {
System.out.print("*******************************");
}
this.isHaveDevice = isHaveDevice;
}
public void start() {
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
if (pauseTime == 0) {
pauseTime = System.currentTimeMillis();
}
screenTimer = new ScheduledThreadPoolExecutor(5);
screenTimer.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
BufferedImage screenCapture = robot.createScreenCapture(rectangle);
BufferedImage videoImg = new BufferedImage((int)rectangle.getWidth(), (int)rectangle.getHeight(),
BufferedImage.TYPE_3BYTE_BGR);
Graphics2D videoGraphics = videoImg.createGraphics();
videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE);
videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_SPEED);
videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
videoGraphics.drawImage(screenCapture, 0, 0, null);
Java2DFrameConverter java2dConverter = new Java2DFrameConverter();
Frame frame = java2dConverter.convert(videoImg);
try {
videoTS = 1000L
* (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime));
if (videoTS > recorder.getTimestamp()) {
recorder.setTimestamp(videoTS);
}
recorder.record(frame);
} catch (Exception e) {
e.printStackTrace();
}
videoGraphics.dispose();
videoGraphics = null;
videoImg.flush();
videoImg = null;
java2dConverter = null;
screenCapture.flush();
screenCapture = null;
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
public void caputre() {
audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
try {
line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
} catch (LineUnavailableException e1) {
System.out.println("#################");
}
try {
line.open(audioFormat);
} catch (LineUnavailableException e1) {
e1.printStackTrace();
}
line.start();
final int sampleRate = (int) audioFormat.getSampleRate();
final int numChannels = audioFormat.getChannels();
int audioBufferSize = sampleRate * numChannels;
final byte[] audioBytes = new byte[audioBufferSize];
exec = new ScheduledThreadPoolExecutor(1);
exec.scheduleAtFixedRate(new Runnable() {
public void run() {
try {
int nBytesRead = line.read(audioBytes, 0, line.available());
int nSamplesRead = nBytesRead / 2;
short[] samples = new short[nSamplesRead];
ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
recorder.recordSamples(sampleRate, numChannels, sBuff);
} catch (Exception e) {
e.printStackTrace();
}
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
public void stop() {
if (null != screenTimer) {
screenTimer.shutdownNow();
}
try {
recorder.stop();
recorder.release();
recorder.close();
screenTimer = null;
if (isHaveDevice) {
if (null != exec) {
exec.shutdownNow();
}
if (null != line) {
line.stop();
line.close();
}
dataLineInfo = null;
audioFormat = null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void pause(){
screenTimer.shutdownNow();
screenTimer = null;
if (isHaveDevice) {
exec.shutdownNow();
exec = null;
line.stop();
line.close();
dataLineInfo = null;
audioFormat = null;
line = null;
}
pauseTime = System.currentTimeMillis();
}
}
RecordeVO
import lombok.Data;
@Data
public class RecordeVO {
private int startx;
private int starty;
private int width;
private int height;
private String videoName;
private String imgName;
private String projectName;
}