想了安卓多媒体的整体流程整体流程,理清mediaplayer、mediaplayerservice、mediacodec、omx之间的关系,还是从自己写个最简单的demo跟下流程,mediaplayer的播放其实很简单,主要分为如下6个步骤,代码如下:
(1)new 一个mediaplayer对象;
(2)调用setDataSource接口,数据流不同入参不同;
(3)调用setSurface给mediaplayer提供视频渲染区域有;
(4)调用prepareAsync接口,异步准备视频播放;
(5)调用start,开始视频播放;
(6)调用stop,结束视频播放;
if(mediaPlayer == null){
mediaPlayer = new MediaPlayer();
}
try {
if(!path.isEmpty()) {
mediaPlayer.setDataSource(path);
mediaPlayer.setSurface(surfaceHolder.getSurface());
mediaPlayer.prepareAsync();
mediaPlayer.setOnPreparedListener(mediaOnPreparedListener);
}
} catch (IOException e) {
e.printStackTrace();
}
将demo跑起来后,根据demo 看看mediaplay整个过程都做了什么。
(1)先看MediaPlayer.java,先是加载了libmedia_jni.so, 看native_init的实现也只是初始化部分的动作,
static {
System.loadLibrary("media_jni");
native_init();
}
private MediaPlayer(int sessionId) {
super(new AudioAttributes.Builder().build(),
AudioPlaybackConfiguration.PLAYER_TYPE_JAM_MEDIAPLAYER);
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
mTimeProvider = new TimeProvider(this);
mOpenSubtitleSources = new Vector<InputStream>();
AttributionSource attributionSource = AttributionSource.myAttributionSource();
// set the package name to empty if it was null
if (attributionSource.getPackageName() == null) {
attributionSource = attributionSource.withPackageName("");
}
/* Native setup requires a weak reference to our object.
* It's easier to create it here than in C++.
*/
try (ScopedParcelState attributionSourceState = attributionSource.asScopedParcelState()) {
native_setup(new WeakReference<MediaPlayer>(this), attributionSourceState.getParcel());
}
baseRegisterPlayer(sessionId);
}
看 Mediaplayer 也只是一些初始的行为。
(2)setDataSource(MediaPlayer.java)->android_media_MediaPlayer_setDataSourceFD(android_media_MediaPlayer.cpp)->setDataSource(mediaplayer.cpp)-> setDataSource (IMediaPlayer.cpp), 如下是setDataSource的代码。看到如下代码应该就可以清楚为什么在mediaplayer后需要调用setDataSource,因为setDataSource 才是真正的开始创建播放器,这里也可以看到同mediaplayerservice的交互。
1 const sp<IMediaPlayerService> service(getMediaPlayerService()); 这一步其实是获取了BpMediaPlayerService 2 sp<IMediaPlayer> player(service->create(this, mAudioSessionId)); 这一步是通过BpMediaPlayerService 的create方法返回了BpMediaPlayer 3 attachNewPlayer 将player赋值给mPlayer
status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length)
{
ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
status_t err = UNKNOWN_ERROR;
// 获取BpMediaPlayerService
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service != 0) {
//通过BpMediaPlayerService 的create方法返回了BpMediaPlayer
sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(fd, offset, length))) {
player.clear();
}
//将player赋值给mPlayer
err = attachNewPlayer(player);
}
return err;
}
IMediaDeathNotifier::getMediaPlayerService()
{
ALOGV("getMediaPlayerService");
Mutex::Autolock _l(sServiceLock);
if (sMediaPlayerService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
do {
binder = sm->getService(String16("media.player"));
if (binder != 0) {
break;
}
ALOGW("Media player service not published, waiting...");
usleep(500000); // 0.5 s
} while (true);
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
binder->linkToDeath(sDeathNotifier);
sMediaPlayerService = interface_cast<IMediaPlayerService>(binder);
}
ALOGE_IF(sMediaPlayerService == 0, "no media player service!?");
return sMediaPlayerService;
}
void MediaPlayerService::instantiate() {
defaultServiceManager()->addService(
String16("media.player"), new MediaPlayerService());
}
virtual sp<IMediaPlayer> create(
const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId,
const AttributionSourceState& attributionSource) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(client));
data.writeInt32(audioSessionId);
data.writeParcelable(attributionSource);
remote()->transact(CREATE, data, &reply);
return interface_cast<IMediaPlayer>(reply.readStrongBinder());
}
status_t BnMediaPlayerService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch (code) {
case CREATE: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IMediaPlayerClient> client =
interface_cast<IMediaPlayerClient>(data.readStrongBinder());
audio_session_t audioSessionId = (audio_session_t) data.readInt32();
AttributionSourceState attributionSource;
status_t status = data.readParcelable(&attributionSource);
if (status != NO_ERROR) {
return status;
}
sp<IMediaPlayer> player = create(client, audioSessionId, attributionSource);
reply->writeStrongBinder(IInterface::asBinder(player));
return NO_ERROR;
} break;
…………
}
}
这一块也是C/S架构的核心,主要围绕C++层MediaPlayer通过BpMediaPlayerService这个proxy对象,经过IPC与远程服务端MediaPlayerService(BnMediaPlayerService)通信,完成C/S架构。(Bpxxx是一个代理外包,所有真正的工作是再Bnxxx里面做的,这里p指proxy,n指native, BpMediaPlayerService\BnMediaPlayerService都在IMediaPlayerService.cpp中实现)
当Server端接受到Client端的请求,MediaPlayerService会为每一个Client进程创建一个会话,这里就是new一个MediaPlayer::Client对象与其交互,然后这个对象再根据Client端请求的资源类型去判断创建了什么类型的Player。在mediaplayerservice:create中,创建了一个MediaPlayerService::Client实例,也就是说,MediaPlayerService会为每个client应用程序创建一个相应的MediaPlayerServece::Client,来提供服务。也就是说,mediaplayer.cpp中执行的setDataSource函数,在MediaPlayerService.cpp中,实际上只是创建了一个client,并将两者对应起来,如果再次执行setDataSource函数,会创建第二个client。同时看这个函数的返回值类型,为sp<IMediaPlayer>,而在函数的最后,return c,这个c的类型是sp<Client>,从而从侧面也证明了两者几乎是等同的关系。
sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId, const AttributionSourceState& attributionSource)
{
int32_t connId = android_atomic_inc(&mNextConnId);
// TODO b/182392769: use attribution source util
AttributionSourceState verifiedAttributionSource = attributionSource;
verifiedAttributionSource.pid = VALUE_OR_FATAL(
legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
verifiedAttributionSource.uid = VALUE_OR_FATAL(
legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
sp<Client> c = new Client(
this, verifiedAttributionSource, connId, client, audioSessionId);
ALOGV("Create new client(%d) from %s, ", connId,
verifiedAttributionSource.toString().c_str());
wp<Client> w = c;
{
Mutex::Autolock lock(mLock);
mClients.add(w);
}
return c;
}
再往回看mediaplayer.cpp的setDataSource,在service->create获取mediaplayerservice创建的
client后执行player(即client)->setDataSource,这里才开始创建真正的player,在MediaPlayerFactory通过最高分数获取最匹配播放器类型,目前MediaPlayerFactory还保留的medialayer有NuPlayerFactory、TestPlayerFactory、IC厂商自己的播放器。
status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64_t length)
{
…………
player_type playerType = MediaPlayerFactory::getPlayerType(this,
fd,
offset,
length);
sp<MediaPlayerBase> p = setDataSource_pre(playerType);
if (p == NULL) {
return NO_INIT;
}
// now set data source
return mStatus = setDataSource_post(p, p->setDataSource(fd, offset, length));
}
根据获取的播放器类型创建对应的播放器,安装原生的话是创建Nuplayer。
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
player_type playerType)
{
ALOGV("player type = %d", playerType);
// create the right type of player
sp<MediaPlayerBase> p = createPlayer(playerType);
…………
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId, mAttributionSource,
mAudioAttributes, mAudioDeviceUpdatedListener);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
return p;
}
sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerType)
{
// determine if we have the right player type
sp<MediaPlayerBase> p = getPlayer();
if ((p != NULL) && (p->playerType() != playerType)) {
ALOGV("delete player");
p.clear();
}
if (p == NULL) {
p = MediaPlayerFactory::createPlayer(playerType, mListener,
VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid)));
}
if (p != NULL) {
p->setUID(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid)));
}
return p;
}
status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
ALOGV("setDataSource(%p) file(%d)", this, fd);
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
return INVALID_OPERATION;
}
mState = STATE_SET_DATASOURCE_PENDING;
mPlayer->setDataSourceAsync(fd, offset, length);
while (mState == STATE_SET_DATASOURCE_PENDING) {
mCondition.wait(mLock);
}
return mAsyncResult;
}
void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
sp<GenericSource> source =
new GenericSource(notify, mUIDValid, mUID, mMediaClock);
ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
fd, (long long)offset, (long long)length, source.get());
status_t err = source->setDataSource(fd, offset, length);
if (err != OK) {
ALOGE("Failed to set data source!");
source = NULL;
}
msg->setObject("source", source);
msg->post();
mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
}
首先mediaplayer.cpp中发出setDataSource的函数 ---> 到达IMediaPlayer的Bp端 ---> 传到IMediaPlayer的Bn端 ---> 到达MediaPlayerService::Client端的MediaPlayerService::Client::setDataSource函数 ---> NuPlayerDriver::setDataSource ---> NuPlayer: setDataSourceAsync
这里就基本将mediaplayer跟mediaplayerservice的交互理清了。
mediaplayer跟mediaplayerservice 交互可以看如下的博客,写的极好:
2. Android MultiMedia框架完全解析 - MediaPlayer的C/S架构与Binder机制实现_yanbixing123的博客-优快云博客