这篇是接去年年初的那边博文:http://blog.youkuaiyun.com/chenxupro/article/details/7237084
当时想把代码分享来着,后来渐渐就忘了,十一放假,有些空余时间,于是把以前做的音视频相关的东西都整理出来。
Windows Phone平台相比于Android和iOS播放H.264裸流要方便点,可以直接使用API调用硬件解码。
核心原理是设置MediaElement的MediaStreamSource,设置MediaStream的Attributes以及媒体流相关的信息。
我这里是直接读取H.264裸流文件,取出NALU,拼成帧之后送入队列进行解码。
测试文件为海思Hi3512编码的CIF裸码流存成的文件。
核心代码包括MainPage.xaml.cs+Sample.cs+H264MediaStreamSource.cs
Sample.cs代码:
using System;
namespace WP7H264Player
{
public class Sample
{
/// <summary>
/// Timestamp when it was created
/// </summary>
private DateTime Time;
/// <summary>
/// Byte buffer to hold the retrived sample
/// </summary>
private byte[] Buffer;
/// <summary>
/// buffer size->BufSize
/// </summary>
private int BufSize;
public int sampleBufSize
{
get
{
return BufSize;
}
set
{
BufSize = value;
}
}
public DateTime sampleTime
{
get
{
return Time;
}
set
{
Time = value;
}
}
public byte[] sampleBuffer
{
get
{
return Buffer;
}
set
{
Buffer = value;
}
}
}
}
MainPage.xaml.cs代码:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Shapes;
using Microsoft.Phone.Controls;
//用于输出调试信息
using System.Diagnostics;
namespace WP7H264Player
{
public partial class MainPage : PhoneApplicationPage
{
// Instantiate our derived MediaStreamSource class
H264MediaStreamSource _mediaSource;
/// <summary>
/// Flag to indicate if our media has been opened or not
/// </summary>
bool mediaOpen = false;
// 构造函数
public MainPage()
{
InitializeComponent();
}
private void button_StartPlay_Click(object sender, RoutedEventArgs e)
{
System.Windows.Resources.StreamResourceInfo reader = Application.GetResourceStream(new Uri("test.h264", UriKind.Relative));
button_StartPlay.Visibility = System.Windows.Visibility.Collapsed;
// initialize our media stream object
_mediaSource = new H264MediaStreamSource();
if (_mediaSource.startStreamThread(reader.Stream))
{
// set flag to true - media has been opened
mediaOpen = true;
// set the source of our media stream to the MediaElement
mediaElement_H264Win.SetSource(_mediaSource);
}
}
private void button_StopPlay_Click(object sender, RoutedEventArgs e)
{
if (mediaOpen)
{
_mediaSource.closeStream();
mediaElement_H264Win.Stop();
_mediaSource = null;
mediaOpen = false;
}
}
}
}
H264MediaStreamSource.cs代码:
using System;
using System.Net;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Shapes;
using System.IO;
using System.Collections.Generic;
using System.ComponentModel;
using System.Threading;
//用于输出调试信息
using System.Diagnostics;
namespace WP7H264Player
{
public class H264MediaStreamSource : MediaStreamSource
{
/// <summary>
/// Width of the Video Frame (set as constant)
/// </summary>
private const int _frameWidth = 352; // *** best to set this when you have acquired actual video width using obj.videoWidth()
/// <summary>
/// Height of the Video frame (set as constant)
/// </summary>
private const int _frameHeight = 288; // *** best to set this when you have acquired actual video height using obj.videoHeight()
/// <summary>
/// Rendering time in the media
/// </summary>
private long _timeStamp = 0;
/// <summary>
/// Number of bytes of each pixel (4 bytes - RGBA)
/// </summary>
private const int _framePixelSize = 4;
/// <summary>
/// Size in bytes for each Sample of type RGBA (4 bytes per pixel)
/// </summary>
private const int _count = _frameHeight * _frameWidth * _framePixelSize;
/// <summary>
/// Size in bytes of the stream (same as the frame size in bytes)
/// </summary>
private const int _frameStreamSize = _count;
/// <summary>
/// Stream to contain a Sample
/// </summary>
private MemoryStream _stream = new MemoryStream(_frameStreamSize);
/// <summary>
/// The Offset into the stream where the actual sample data begins
/// </summary>
private int _offset = 0; //实际sample采样样本起始位置offset偏移
/// <summary>
/// Buffer to hold a collection of type Sample.
/// </summary>
private Queue<Sample> sampleBufferList = new Queue<Sample>();
/// <summary>
/// variable holds the FPS of the video played
/// </summary>
public static long _speed = 25;
/// <summary>
/// Timeout period (fps from video is used).
/// </summary>
private TimeSpan timeout = TimeSpan.FromSeconds((double)_speed);
/// <summary>
/// Empty Dictionary used in the returned empty sample.
/// </summary>
private Dictionary<MediaSampleAttributeKeys, string> emptyDictionary = new Dictionary<MediaSampleAttributeKeys, string>();
/// <summary>
/// Total number of Samples to buffer.
/// </summary>
private const int numberOfSamplesBuffer = 10; // I set to 10 as an example, but you can increase or decrease
/// <summary>
/// flag to kill a Sample processing thread
/// </summary>
private static bool _done = true;
/// <summary>
/// Background Worker Thread to process Samples
/// </summary>
private BackgroundWorker _worker = new BackgroundWorker();
static readonly byte[] startCode = new byte[] { 0, 0, 0, 1 };
//媒体流描述类
/// <summary>
/// Describes the Media Stream
/// </summary>
private MediaStreamDescription videoStreamDescription;
//打开媒体后的异步操作
protected override void OpenMediaAsync()
{
//The description will also contain information about one or more streams. Each stream’s description must include the following information:
//The identity of the codec.
//A set of bytes, called the codec private data, to initialize the codec.
//If the media has a video stream, the description of the stream must also include the following:
//Width and height of the original encoded images.
//对于流的描述信息
//MediaStreamAttributeKeys->VideoFourCC->H264
Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
//Video CodecPrivateData
//Video codecs are identified by a four character code stored with MediaStreamAttributeKeys.VideoFourCC.
//Codec initialization bytes are stored with MediaStreamAttributeKeys.CodecPrivateData as a base16 encoded string.
//CodecPrivateData---->>>>A base16-encoded string of the form:
// 0x00000001 SequenceParameterSet 0x00000001 PictureParameterSet
// See ISO/IEC-14496-10 for details on Start Codes, Sequence Parameter Set (SPS) and Picture Parameter Set (PPS) formats.
//比如->"00000001674D401E965201405FF2E020100000000168EF3880"
videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
videoStreamAttributes[MediaStreamAttributeKeys.Height] = _frameHeight.ToString();
videoStreamAttributes[MediaStreamAttributeKeys.Width] = _frameWidth.ToString();
//videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = "000000016742E014DA0582510000000168CE30A480";
//这里不需要SPS PPS之类的byte数组大概是因为H264是能认出的视频编码
this.videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);
//For the media, the following information must be included in the description:
//Media
//mediaSourceAttributes的属性
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
//The duration. ->Timeout是和1/x相关的
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(0).Ticks.ToString();
//mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(25).Ticks.ToString();
//Whether or not the media supports seeking.
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();
//MediaStreamDescription->媒体流描述
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
mediaStreamDescriptions.Add(this.videoStreamDescription);
// The MediaStreamSource implementation should respond by calling ReportOpenMediaCompleted once it can describe the media to Silverlight.
//This information is passed to Silverlight by the MediaStreamSource.ReportOpenMediaCompleted method.
//The ReportOpenMediaCompleted method takes the following parameters:
// A dictionary of attributes and values describing the media. ->描述媒体的属性和值->用键值对
// A collection of MediaStreamDescription objects for both audio and video. Each object is created with a dictionary of attributes and values conveying the above stream information.
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
return;
}
//异步调用处理sample采样样本线程
protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
if (mediaStreamType == MediaStreamType.Video)
{
// start a thread to get the sample
Thread thread = new Thread(new ThreadStart(this.retrieveSampleThread));
thread.Start();
return;
}
}
protected override void SeekAsync(long seekToTime)
{
_timeStamp = seekToTime;
ReportSeekCompleted(seekToTime);
//throw new NotImplementedException();
}
protected override void GetDiagnosticAsync(MediaStreamSourceDiagnosticKind diagnosticKind)
{
//throw new NotImplementedException();
}
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
{
//throw new NotImplementedException();
}
//sampleBufferList->样本缓冲区列表清空
protected override void CloseMedia()
{
// Do your cleanup here
sampleBufferList.Clear();
sampleBufferList = null;
//Stream to contain a Sample
_stream.Close();
_stream = null;
}
public void closeStream()
{
// set to true to stop the processing thread
_done = true;
//this.CloseMedia();
}
/// <summary>
/// Method that checks availability of a Sample
/// </summary>
private void retrieveSampleThread()
{
// We always start at the beginning of the stream
// this is because we always reset the stream with one sample at a time
// if you decide to add more than one sample into the stream then you
// can modify the logic to increment this offset by the size of the sample
// everytime there is a call to return a sample
_offset = 0;
_stream.Seek(0, SeekOrigin.Begin);
// Instantiate a Sample
// The Sample has two members (Time & Buffer)
//类的成员:DateTime + byte[]
Sample _sample = null;
// try to lock the sampleBufferList
lock (this)
{
// check if our sampleBufferList is empty
if (this.sampleBufferList.Count == 0)
{
Debug.WriteLine("this.sampleBufferList.Count == 0!");
// indeed sampleBufferList is empty
// now release it to give the other thread a chance to
// add a Sample.
// After release, keep trying to reacquire sampleBufferList
// for a period defined by timeout,
// hopefully it will have a Sample.
// if after the timeout passes, return a empty sample and return.
if (!Monitor.Wait(this, this.timeout))
{
we could not reacquire the sampleBufferList which means
we do not have a valid sample to return to MediaElement
so, let's just tell MediaElement that we are still buffring and return.
this.ReportGetSampleProgress(0);
return;
}
}
// we managed to reacquire sampleBufferList before the timeout period
// therefore dequeue first Sample in the buffer
_sample = this.sampleBufferList.Dequeue();
// immediately notify a waiting thread in the queue
Monitor.Pulse(this);
}
// write the retrieved Sample into the stream
// remember our stream is just one Sample
_stream.Write(_sample.sampleBuffer, 0, _sample.sampleBufSize);
MediaStreamSample mediaSample = new MediaStreamSample(
this.videoStreamDescription,
_stream,
_offset,
_sample.sampleBufSize,
_timeStamp,
this.emptyDictionary);
// uncomment this code and comment out the one after and see the strange effect
//_currentTime += (int)TimeSpan.FromSeconds((double)1 / _speed).Ticks
// I had used the code line above and could not get the frames playing correctly
// but i just tried to multiply by 2 and suprisingly playback was smooth
// if anyone understands why please let me know.
_timeStamp += (int)TimeSpan.FromSeconds((double)1 / _speed).Ticks * 2;
_timeStamp += (int)TimeSpan.FromSeconds((double)1 / _speed).Ticks;
// report back a successful Sample
this.ReportGetSampleCompleted(mediaSample);
return;
}
//NALU分析相关变量!!!
Boolean IsStop = false;
byte[] CurrentUDPData = null;
UInt32 mTrans = 0xFFFFFFFF;
int CurrentNALUSplitFlag = 0;
Boolean bFirst = true;
Boolean bFindSPS = false;
public int MergeBuffer(byte[] NalBuf, int NalBufUsed, byte[] SockBuf, int SockBufUsed, int SockRemain)
{
int i = 0;
byte Temp = 0x00;
for (i = 0; i < SockRemain; i++)
{
Temp = SockBuf[i + SockBufUsed];
NalBuf[i + NalBufUsed] = Temp;
mTrans <<= 8;
mTrans |= Temp;
if (mTrans == 1)
{
i++;
CurrentNALUSplitFlag = 4;
break;
}
else if ((mTrans << 8) == 0x00000100)
{
i++;
CurrentNALUSplitFlag = 3;
break;
}
}
return i;
}
public void SetStartFlagTrue()
{
bFindSPS = false;
}
/// <summary>
/// Method that retrieves a Sample from an AVI Stream
/// </summary>
/// <returns></returns>
public Boolean startStreamThread(Stream OpenedH264FileStream)
{
if (true)
{
_done = false;
_worker.WorkerReportsProgress = true;
_worker.DoWork += (s, ex) =>
{
// getTotalNumberOfFrames:
// get the total number of frames in the video
// *** Not used in this sample code ***
numFrames = obj.getTotalNumberOfFrames();
// videoFPS:
// get the fps of the video
_speed = obj.videoFPS();
// if not specified, the default it to 30 frames per second
if (_speed == 0)
_speed = 25;
int iTemp = 0;
int nalLen;
int bytesRead = 0;
int NalBufUsed = 0;
int SockBufUsed = 0;
byte[] NalBuf = new byte[409600];
byte[] SockBuf = null;
byte[] MyNalBuf = null;
byte[] DecodeBuf = new byte[409600];
int DecodeBufLength = 0;
CurrentNALUSplitFlag = 0;
byte[] UDPPacketData = new byte[4096];
while (!_done)
{
int GetUDPDataLength = ((Stream)OpenedH264FileStream).Read(UDPPacketData, 0, 4096);
if (GetUDPDataLength != 0)
{
if ((GetUDPDataLength > 0))
{
SockBuf = new byte[GetUDPDataLength];
Array.Copy(UDPPacketData, 0, SockBuf, 0, GetUDPDataLength);
bytesRead = GetUDPDataLength;
if (bytesRead <= 0)
{
// break;
continue;
_done = true;
}
else
{
SockBufUsed = 0;
while (bytesRead - SockBufUsed > 0)
{
CurrentNALUSplitFlag = 0;
nalLen = MergeBuffer(NalBuf, NalBufUsed, SockBuf, SockBufUsed, bytesRead - SockBufUsed);
NalBufUsed += nalLen;
SockBufUsed += nalLen;
while ((mTrans == 1) || ((mTrans != 1) && ((mTrans << 8) == 0x00000100)))
{
mTrans = 0xFFFFFFFF;
if (bFirst == true) // the first start flag
{
bFirst = false;
}
else // a complete NAL data, include 0x00000001 trail,OR 0x000001
{
if (bFindSPS == false)
{
if ((NalBuf[4] & 0x1F) == 7)
{
bFindSPS = true;
}
else
{
NalBuf[0] = 0;
NalBuf[1] = 0;
NalBuf[2] = 0;
NalBuf[3] = 1;
NalBufUsed = 4;
break;
}
}
int MyNALULength = NalBufUsed - CurrentNALUSplitFlag;
MyNalBuf = new byte[NalBufUsed - CurrentNALUSplitFlag]; //by chenxupro
Array.Copy(NalBuf, 0, MyNalBuf, 0, NalBufUsed - CurrentNALUSplitFlag);
if (((NalBuf[4] & 0x1F) == 5) || ((NalBuf[4] & 0x1F) == 1))
{
if (((NalBuf[5] >> 7) & 1) == 1)
{
if (DecodeBufLength > 0)
{
Sample localSample = new Sample();
byte[] NewDecodeBuf = new byte[DecodeBufLength];
Array.Copy(DecodeBuf, 0, NewDecodeBuf, 0, DecodeBufLength);
localSample.sampleBufSize = DecodeBufLength;
localSample.sampleBuffer = NewDecodeBuf;
localSample.sampleTime = DateTime.Now;
lock (this)
{
this.sampleBufferList.Enqueue(localSample);
Monitor.Pulse(this);
}
}
DecodeBufLength = 0;
Array.Copy(MyNalBuf, 0, DecodeBuf, DecodeBufLength, MyNalBuf.Length);
DecodeBufLength += MyNalBuf.Length;
}
else
{
if (((NalBuf[5] >> 7) & 1) == 0)
{
Array.Copy(MyNalBuf, 0, DecodeBuf, DecodeBufLength, MyNalBuf.Length);
DecodeBufLength += MyNalBuf.Length;
}
}
}
else
{
// enqueue the sample
// Instantiate and initialize a new Sample
Sample localSample = new Sample();
byte[] NewDecodeBuf = new byte[NalBufUsed - CurrentNALUSplitFlag];
Array.Copy(MyNalBuf, 0, NewDecodeBuf, 0, NalBufUsed - CurrentNALUSplitFlag);
localSample.sampleBufSize = NalBufUsed - CurrentNALUSplitFlag;
localSample.sampleBuffer = NewDecodeBuf;
localSample.sampleTime = DateTime.Now;
lock (this)
{
this.sampleBufferList.Enqueue(localSample);
Monitor.Pulse(this);
}
}
}
NalBuf[0] = 0;
NalBuf[1] = 0;
NalBuf[2] = 0;
NalBuf[3] = 1;
NalBufUsed = 4;
}
}
}
}
}
}
};
_worker.ProgressChanged += (s, ex) =>
{
// if you want to report progress
};
_worker.RunWorkerCompleted += (s, ex) =>
{
// Do your thread clean up here
};
//开始异步执行背景线程
_worker.RunWorkerAsync();
}
else
{
return false;
}
return true;
}
}
}
运行效果如下:
完整代码可以从这里下载: