unity截屏截图录屏

本文介绍了如何在Unity中进行截屏和截图操作,包括使用ScreenCapture.CaptureScreenshotIntoRenderTexture方法截取屏幕图像,以及利用CameraCaptureBridge.AddCaptureAction直接捕获相机画面的技术。

unity截屏截图录屏

截取屏幕图像

  • 重点推荐用 ScreenCapture.CaptureScreenshotIntoRenderTexture 截取图像,缩放后取回
    	public class TestScreenShot : MonoBehaviour
        {
            public Button RecordBtn;
            public RawImage ScreenImage;
    
            RenderTexture m_renderTexture;
            RenderTexture m_targetTexture;
            Texture2D m_captureTexture;
    
            // Start is called before the first frame update
            void Start()
            {
                RecordBtn.onClick.AddListener(OnRecordClick);
            }
    
            private void OnRecordClick()
            {
                StartCoroutine(RecordCoroutine2());
            }
    
            //  截屏方式1: 使用 Texture2D.ReadPixels
            //  缺点是不能在GPU上缩放后再取回数据,只能取回数据后再缩放,分辨率大的情况下会卡
            IEnumerator RecordCoroutine()
            {
                //  必须等渲染完成,否则会报
                //  ReadPixels was called to read pixels from system frame buffer, while not inside drawing frame
                yield return new WaitForEndOfFrame();
    
                Texture2D texture = GetCaptureTexture(Screen.width, Screen.height);
                //  ReadPixels 从 GPU 取回图像数据,比较耗时
                //  不会进行任何缩放,截取屏幕矩形区域,然后把该矩形直接帖到纹理上,超出范围的被裁剪
                //  destX 和 destY 是像素坐标
                texture.ReadPixels(new Rect(0, 0, Screen.width / 2, Screen.height / 2), 0, 0, false);
                //  Apply 把图像数据上传 GPU,比较耗时,如果是保存到图片,则不需要调用,如果是给 RawImage 显示,则需要调用
                texture.Apply();
    
                SaveTexture(texture);
            }
    
            //  截屏方式2(最高效): 使用 ScreenCapture.CaptureScreenshotIntoRenderTexture 
            //  截取的是GPU图像,还未取回CPU,所以非常高效,配合 Graphics.Blit 进行缩放再取回数据,分辨率再大也不卡
            IEnumerator RecordCoroutine2()
            {
                //  必须等渲染完成,否则会报
                //  ReadPixels was called to read pixels from system frame buffer, while not inside drawing frame
                yield return new WaitForEndOfFrame();
    
    
                RenderTexture screen = GetRenderTexture(Screen.width, Screen.height);
                //  截取屏幕图像存在 GPU中,速度快,但要注意图像是上下颠倒的
                //  在 Editor 模式下,横屏场景中的相机录出来变形,原因可能是 Editor 模式下 Screen.orientation 永远是 Portrait
                ScreenCapture.CaptureScreenshotIntoRenderTexture(screen);
    
                RenderTexture target = GetTargetTexture(Screen.width/2, Screen.height/2);
    
                //  直接在 GPU 中缩放,速度快
                //  Blit 操作是进行纹理映射,target 上的纹理坐标uv的颜色 color=texture(sourceTexture,uv*scale+offset)
                //  由于截屏是上下颠倒的,通过设置 scale=(1,-1) offset=(0,1) 可以实现上下颠倒,摆正图像
                //  Blit 会设置 RenderTexture.active,所以调用完要设置 active=null,否则target可能会被其它代码写入其它东西
                Graphics.Blit(screen, target, new Vector2(1f, -1f), new Vector2(0f,1f));
                RenderTexture.active = null;
    
                SaveTexture(target);
            }
    
            //  截屏方式3(最简单): 使用 ScreenCapture.CaptureScreenshot 
            IEnumerator RecordCoroutine3()
            {
                //  必须等渲染完成,否则会报
                //  ReadPixels was called to read pixels from system frame buffer, while not inside drawing frame
                yield return new WaitForEndOfFrame();
    
                ScreenCapture.CaptureScreenshot(SAVE_IMAGE_PATH);
            }
    
            public string SAVE_IMAGE_PATH => Path.Combine(Application.persistentDataPath, "ScreenShot.png");
            void SaveTexture(Texture texture)
            {
                ScreenImage.texture = texture;
    
                Texture2D saveTexture = null;
                if ( texture is Texture2D t2d )
                {
                    saveTexture = t2d;
                }
                else if (texture is RenderTexture rt)
                {
                    saveTexture = GetCaptureTexture(texture.width, texture.height);
                    RenderTexture old = RenderTexture.active;
                    RenderTexture.active = rt;
                    saveTexture.ReadPixels(new Rect(0, 0, texture.width, texture.height), 0, 0, false);
                    RenderTexture.active = old;
                }
    
                byte[] data = saveTexture.EncodeToPNG();
                File.WriteAllBytes(SAVE_IMAGE_PATH, data);
    
                Debug.Log($"ScreenShot SaveToPng {SAVE_IMAGE_PATH}");
            }
    
            RenderTexture GetRenderTexture(int width, int height)
            {
                if ( m_renderTexture != null && m_renderTexture.width == width && m_renderTexture.height == height )
                    return m_renderTexture;
    
                if ( m_renderTexture != null )
                {
                    Destroy(m_renderTexture);
                }
    
                m_renderTexture = CreateRenderTexture(width, height, 0);
                return m_renderTexture;
            }
    
            RenderTexture GetTargetTexture(int width, int height)
            {
                if (m_targetTexture != null && m_targetTexture.width == width && m_targetTexture.height == height)
                    return m_targetTexture;
    
                if (m_targetTexture != null)
                {
                    Destroy(m_targetTexture);
                }
    
                m_targetTexture = CreateRenderTexture(width, height, 0);
                return m_targetTexture;
            }
    
            Texture2D GetCaptureTexture(int width, int height)
            {
                if (m_captureTexture != null && m_captureTexture.width == width && m_captureTexture.height == height)
                    return m_captureTexture;
    
                if (m_captureTexture != null)
                    Destroy(m_captureTexture);
    
                m_captureTexture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
                return m_captureTexture;
            }
    
            public static RenderTexture CreateRenderTexture(int width, int height, int depth = 24,
                    RenderTextureFormat format = RenderTextureFormat.ARGB32, bool usequaAnti = true)
            {
                var rt = new RenderTexture(width, height, depth, format);
                rt.wrapMode = TextureWrapMode.Clamp;
                if (QualitySettings.antiAliasing > 0 && usequaAnti)
                {
                    rt.antiAliasing = QualitySettings.antiAliasing;
                }
                rt.Create();
                return rt;
            }
        }
    
    

截取相机图像

  • 重点推荐用 CameraCaptureBridge.AddCaptureAction 直接抓取相机画面
        public class TestCameraShot : MonoBehaviour
        {
            public Button RecordBtn;
            public RawImage ScreenImage;
            public Camera RecordCamera;
    
            RenderTexture m_renderTexture;
            Texture2D m_captureTexture;
    
            // Start is called before the first frame update
            void Start()
            {
                RecordCamera = RecordCamera ?? Camera.main;
                RecordBtn.onClick.AddListener(OnRecordClick);
            }
    
            private void OnRecordClick()
            {
                StartCoroutine(RecordCoroutine2());
            }
    
            //  截屏方式1: 使用 Camera.Render
            //  缺点是相机渲染2遍,效率低
            IEnumerator RecordCoroutine()
            {
                //  必须等渲染完成,否则会报
                //  ReadPixels was called to read pixels from system frame buffer, while not inside drawing frame
                yield return new WaitForEndOfFrame();
    
                RenderTexture texture = GetRenderTexture(Screen.width, Screen.height);
                RenderTexture old = RecordCamera.targetTexture;
                RecordCamera.targetTexture = texture;
                RecordCamera.Render();
                RecordCamera.targetTexture = old;
                SaveTexture(texture);
            }
    
            //  截屏方式2(最高效): URP使用 CameraCaptureBridge.AddCaptureAction,内置管线使用 Camera.AddCommandBuffer
            //  直接抓取相机渲染的画面,注意只对最后一个相机生效,并且会叠加前面的相机
            IEnumerator RecordCoroutine2()
            {
                RenderTexture renderTexture = GetRenderTexture(Screen.width, Screen.height);
                
                if (GraphicsSettings.renderPipelineAsset == null )
                {
                    //  内置管线
                    CommandBuffer cb = new CommandBuffer { name = "Record: copy frame buffer" };
                    //  内置管线抓取的是颠倒的,要翻转下
                    AddCaptureCommands(BuiltinRenderTextureType.CurrentActive, cb, renderTexture, true);
                    //  向相机添加渲染指令
                    RecordCamera.AddCommandBuffer(CameraEvent.AfterEverything, cb);
                    yield return null;
                    RecordCamera.RemoveCommandBuffer(CameraEvent.AfterEverything, cb);
                    cb.Release();
                }
                else
                {
                    //  URP管线
                    bool record = false;
                    void AddCaptureCommandsWrap(RenderTargetIdentifier source, CommandBuffer cb)
                    {
                        //  URP截取的图像是正常的 
                        AddCaptureCommands(source, cb, renderTexture, false);
                        record = true;
                    }
                    //  添加抓取相机画面指令
                    CameraCaptureBridge.AddCaptureAction(RecordCamera, AddCaptureCommandsWrap);
    
                    yield return new WaitUntil(() => record);
    
                    CameraCaptureBridge.RemoveCaptureAction(RecordCamera, AddCaptureCommandsWrap);
                }
    
                SaveTexture(renderTexture);
            }
    
            //  添加抓取渲染画面的指令
            protected void AddCaptureCommands(RenderTargetIdentifier source, CommandBuffer cb, RenderTexture renderTexture, bool flipY)
            {
                if (source == BuiltinRenderTextureType.CurrentActive)
                {
                    var tid = Shader.PropertyToID("_MainTex");
                    cb.GetTemporaryRT(tid, renderTexture.width, renderTexture.height, 0, FilterMode.Bilinear);
                    cb.Blit(source, tid);
                    if (flipY)
                        cb.Blit(tid, renderTexture, new Vector2(1, -1), new Vector2(0, 1));
                    else
                        cb.Blit(tid, renderTexture);
                    cb.ReleaseTemporaryRT(tid);
                }
                else
                {
                    if (flipY)
                        cb.Blit(source, renderTexture, new Vector2(1, -1), new Vector2(0, 1));
                    else
                        cb.Blit(source, renderTexture);
                }
            }
    
            public string SAVE_IMAGE_PATH => Path.Combine(Application.persistentDataPath, "ScreenShot.png");
            void SaveTexture(Texture texture)
            {
                ScreenImage.texture = texture;
    
                Texture2D saveTexture = null;
                if (texture is Texture2D t2d)
                {
                    saveTexture = t2d;
                }
                else if (texture is RenderTexture rt)
                {
                    saveTexture = GetCaptureTexture(texture.width, texture.height);
                    RenderTexture old = RenderTexture.active;
                    RenderTexture.active = rt;
                    saveTexture.ReadPixels(new Rect(0, 0, texture.width, texture.height), 0, 0, false);
                    RenderTexture.active = old;
                }
    
                byte[] data = saveTexture.EncodeToPNG();
                File.WriteAllBytes(SAVE_IMAGE_PATH, data);
    
                Debug.Log($"ScreenShot SaveToPng {SAVE_IMAGE_PATH}");
            }
    
            RenderTexture GetRenderTexture(int width, int height)
            {
                if (m_renderTexture != null && m_renderTexture.width == width && m_renderTexture.height == height)
                    return m_renderTexture;
    
                if (m_renderTexture != null)
                {
                    Destroy(m_renderTexture);
                }
    
                m_renderTexture = CreateRenderTexture(width, height, 0);
                return m_renderTexture;
            }
    
            Texture2D GetCaptureTexture(int width, int height)
            {
                if (m_captureTexture != null && m_captureTexture.width == width && m_captureTexture.height == height)
                    return m_captureTexture;
    
                if (m_captureTexture != null)
                    Destroy(m_captureTexture);
    
                m_captureTexture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
                return m_captureTexture;
            }
    
            public static RenderTexture CreateRenderTexture(int width, int height, int depth = 24,
                    RenderTextureFormat format = RenderTextureFormat.ARGB32, bool usequaAnti = true)
            {
                var rt = new RenderTexture(width, height, depth, format);
                rt.wrapMode = TextureWrapMode.Clamp;
                if (QualitySettings.antiAliasing > 0 && usequaAnti)
                {
                    rt.antiAliasing = QualitySettings.antiAliasing;
                }
                rt.Create();
                return rt;
            }
    
        }
    
<think>嗯,用户想在Unity WebGL中实现功能。首先,我需要回忆一下相关的引用内容,看看有没有可用的信息。根据引用[4],提到了在Unity WebGL中实现截屏的方法,主要是通过定义图片、等待一帧再执行访问。这可能和有关联,但需要连续截取多帧,所以可能需要不同的方法。 然后看看其他引用,引用[1]提到Unity自带的工具和Unity Recorder插件,但这些可能在WebGL平台上不可用,因为WebGL有浏览器的限制,不能直接访问系统资源。引用[2]和[3]提到了第三方插件,比如AVPro Movie Capture,但这些插件是否支持WebGL呢?需要确认一下,可能这些插件主要是针对桌面或移动平台的,WebGL可能需要不同的处理方式。 接下来,用户需要的是在WebGL中,所以可能需要使用JavaScript的库或者Web API。例如,使用MediaRecorder API来幕内容。不过Unity WebGL如何与JavaScript交互呢?可能需要通过Unity的插件系统调用JS代码。 记得引用[4]中提到的截屏方法,可能需要将每一帧的画面转换为数据,然后通过JS发送到浏览器的API进行制。但连续截屏可能会有性能问题,需要优化。 步骤可能包括:1. 在Unity中捕获每一帧的图像数据。2. 通过WebGL的插件机制将数据传递到JavaScript。3. 使用MediaRecorder API进行视频编码和保存。或者,使用第三方库如CCapture.js来实现。 另外,需要考虑浏览器兼容性和性能问题,比如帧率、分辨率和内存使用。可能还需要处理音频的制,但根据引用[3],AVPro插件支持音频,但WebGL可能需要单独处理。 总结下来,可能的解决方案是结合Unity截屏功能和JavaScript的制API,通过插件交互实现。需要详细步骤和代码示例,确保用户能够逐步实施。同时,要提到可能的限制,比如浏览器的权限请求和性能注意事项。</think>在Unity WebGL中实现功能需要结合浏览器API和Unity的特殊处理,以下是具体实现方法: ### 一、技术实现原理 WebGL平台受限于浏览器安全策略,需通过以下两种方式实现: 1. **逐帧截屏+编码**:通过连续截取画面帧,使用JavaScript库编码为视频 2. **浏览器原生API**:利用MediaRecorder API直接制Canvas输出[^4] ### 二、具体实现步骤 #### 方法1:JS插件实现(推荐) 1. 引入CCapture.js库 ```javascript // 在index.html添加 <script src="https://rawgit.com/spite/ccapture.js/master/build/CCapture.all.min.js"></script> ``` 2. Unity中创建插件脚本 ```csharp // ScreenRecorder.cs using System.Runtime.InteropServices; public class ScreenRecorder : MonoBehaviour { [DllImport("__Internal")] private static extern void StartRecording(); [DllImport("__Internal")] private static extern void StopRecording(); public void StartRecord() { #if UNITY_WEBGL && !UNITY_EDITOR StartRecording(); #endif } public void StopRecord() { #if UNITY_WEBGL && !UNITY_EDITOR StopRecording(); #endif } } ``` 3. 创建.jslib插件 ```javascript // Plugins/WebGL/WebGLRecorder.jslib mergeInto(LibraryManager.library, { StartRecording: function() { const canvas = document.querySelector('#canvas'); capturer = new CCapture({ format: 'webm' }); capturer.start(); function captureFrame() { capturer.capture(canvas); requestAnimationFrame(captureFrame); } requestAnimationFrame(captureFrame); }, StopRecording: function() { capturer.stop(); capturer.save(function(blob) { const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = 'recording.webm'; a.click(); }); } }); ``` #### 方法2:Canvas直接制 ```javascript // 通过MediaRecorder API实现 let mediaRecorder; navigator.mediaDevices.getUserMedia({ video: { mediaSource: 'screen', width: { ideal: 1280 }, height: { ideal: 720 } }}).then(stream => { mediaRecorder = new MediaRecorder(stream); mediaRecorder.ondataavailable = handleDataAvailable; }); function startRecording() { mediaRecorder.start(1000); // 每1秒收集数据 } function stopRecording() { mediaRecorder.stop(); } ``` ### 三、性能优化建议 1. 设置合适的分辨率比例:推荐使用$0.5-0.7$倍幕分辨率 2. 调整帧率限制:通过`Application.targetFrameRate = 30`控制帧率 3. 使用WebWorker进行后台编码[^1] 4. 采用渐进式JPEG压缩减少传输数据量 ### 四、注意事项 1. 需要用户主动触发操作(浏览器安全策略) 2. iOS Safari有额外限制,需检测`navigator.mediaDevices.getDisplayMedia`支持性 3. 输出格式建议优先选择WebM格式,兼容性最佳[^3]
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值