Merge "Added a MediaCoded based video view to the widevine sample player app." into jb-dev
This commit is contained in:
@@ -3,6 +3,8 @@
|
||||
package="com.widevine.demo"
|
||||
>
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
||||
|
||||
<uses-sdk android:minSdkVersion="12"></uses-sdk>
|
||||
<application android:icon="@drawable/icon" android:label="@string/app_name" android:theme="@android:style/Theme.Holo.NoActionBar">
|
||||
<activity android:name=".WidevineSamplePlayer"
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
<string name="not_provisioned">Device Not Provisioned</string>
|
||||
<string name="play">Play</string>
|
||||
<string name="stop">Stop</string>
|
||||
<string name="mediacodec_mode">MediaCodec Mode</string>
|
||||
<string name="normal_mode">Normal Mode</string>
|
||||
<string name="constraints">Constraints</string>
|
||||
<string name="acquire_rights">Acquire Rights</string>
|
||||
<string name="remove_rights">Remove Rights</string>
|
||||
|
||||
@@ -0,0 +1,725 @@
|
||||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.widevine.demo;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioManager;
|
||||
import android.media.AudioTrack;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaCrypto;
|
||||
import android.media.MediaCryptoException;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.net.Uri;
|
||||
import android.os.Handler;
|
||||
import android.os.Message;
|
||||
import android.util.AttributeSet;
|
||||
import android.util.Log;
|
||||
import android.view.MotionEvent;
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
import android.view.View;
|
||||
import android.widget.MediaController;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.IllegalStateException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
class CodecState {
|
||||
private static final String TAG = "CodecState";
|
||||
|
||||
private MediaCodecView mView;
|
||||
private MediaExtractor mExtractor;
|
||||
private int mTrackIndex;
|
||||
private MediaFormat mFormat;
|
||||
private boolean mSawInputEOS, mSawOutputEOS;
|
||||
|
||||
private MediaCodec mCodec;
|
||||
private MediaFormat mOutputFormat;
|
||||
private ByteBuffer[] mCodecInputBuffers;
|
||||
private ByteBuffer[] mCodecOutputBuffers;
|
||||
|
||||
private LinkedList<Integer> mAvailableInputBufferIndices;
|
||||
private LinkedList<Integer> mAvailableOutputBufferIndices;
|
||||
private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
|
||||
|
||||
private NonBlockingAudioTrack mAudioTrack;
|
||||
|
||||
private long mLastMediaTimeUs;
|
||||
|
||||
public CodecState(
|
||||
MediaCodecView view,
|
||||
MediaExtractor extractor,
|
||||
int trackIndex,
|
||||
MediaFormat format,
|
||||
MediaCodec codec) {
|
||||
mView = view;
|
||||
mExtractor = extractor;
|
||||
mTrackIndex = trackIndex;
|
||||
mFormat = format;
|
||||
mSawInputEOS = mSawOutputEOS = false;
|
||||
|
||||
mCodec = codec;
|
||||
|
||||
mCodec.start();
|
||||
mCodecInputBuffers = mCodec.getInputBuffers();
|
||||
mCodecOutputBuffers = mCodec.getOutputBuffers();
|
||||
|
||||
mAvailableInputBufferIndices = new LinkedList();
|
||||
mAvailableOutputBufferIndices = new LinkedList();
|
||||
mAvailableOutputBufferInfos = new LinkedList();
|
||||
|
||||
mLastMediaTimeUs = 0;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
mCodec.stop();
|
||||
mCodecInputBuffers = null;
|
||||
mCodecOutputBuffers = null;
|
||||
mOutputFormat = null;
|
||||
|
||||
mAvailableOutputBufferInfos = null;
|
||||
mAvailableOutputBufferIndices = null;
|
||||
mAvailableInputBufferIndices = null;
|
||||
|
||||
mCodec.release();
|
||||
mCodec = null;
|
||||
|
||||
if (mAudioTrack != null) {
|
||||
mAudioTrack.release();
|
||||
mAudioTrack = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
if (mAudioTrack != null) {
|
||||
mAudioTrack.play();
|
||||
}
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
if (mAudioTrack != null) {
|
||||
mAudioTrack.pause();
|
||||
}
|
||||
}
|
||||
|
||||
public long getCurrentPositionUs() {
|
||||
return mLastMediaTimeUs;
|
||||
}
|
||||
|
||||
public void flush() {
|
||||
mAvailableInputBufferIndices.clear();
|
||||
mAvailableOutputBufferIndices.clear();
|
||||
mAvailableOutputBufferInfos.clear();
|
||||
|
||||
mSawInputEOS = false;
|
||||
mSawOutputEOS = false;
|
||||
|
||||
if (mAudioTrack != null
|
||||
&& mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED) {
|
||||
mAudioTrack.play();
|
||||
}
|
||||
|
||||
mCodec.flush();
|
||||
}
|
||||
|
||||
public void doSomeWork() {
|
||||
int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);
|
||||
|
||||
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
mAvailableInputBufferIndices.add(new Integer(index));
|
||||
}
|
||||
|
||||
while (feedInputBuffer()) {}
|
||||
|
||||
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||
index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
|
||||
|
||||
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
mOutputFormat = mCodec.getOutputFormat();
|
||||
onOutputFormatChanged();
|
||||
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
mCodecOutputBuffers = mCodec.getOutputBuffers();
|
||||
} else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
mAvailableOutputBufferIndices.add(new Integer(index));
|
||||
mAvailableOutputBufferInfos.add(info);
|
||||
}
|
||||
|
||||
while (drainOutputBuffer()) {}
|
||||
}
|
||||
|
||||
/** returns true if more input data could be fed */
|
||||
private boolean feedInputBuffer() {
|
||||
if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int index = mAvailableInputBufferIndices.peekFirst().intValue();
|
||||
|
||||
ByteBuffer codecData = mCodecInputBuffers[index];
|
||||
|
||||
int trackIndex = mExtractor.getSampleTrackIndex();
|
||||
|
||||
if (trackIndex == mTrackIndex) {
|
||||
int sampleSize =
|
||||
mExtractor.readSampleData(codecData, 0 /* offset */);
|
||||
|
||||
long sampleTime = mExtractor.getSampleTime();
|
||||
|
||||
int sampleFlags = mExtractor.getSampleFlags();
|
||||
|
||||
try {
|
||||
if ((sampleFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
|
||||
MediaCodec.CryptoInfo info = new MediaCodec.CryptoInfo();
|
||||
mExtractor.getSampleCryptoInfo(info);
|
||||
|
||||
mCodec.queueSecureInputBuffer(
|
||||
index, 0 /* offset */, info, sampleTime, 0 /* flags */);
|
||||
} else {
|
||||
mCodec.queueInputBuffer(
|
||||
index, 0 /* offset */, sampleSize, sampleTime,
|
||||
0 /* flags */);
|
||||
}
|
||||
|
||||
mAvailableInputBufferIndices.removeFirst();
|
||||
mExtractor.advance();
|
||||
} catch (MediaCodec.CryptoException e) {
|
||||
Log.d(TAG, "CryptoException w/ errorCode "
|
||||
+ e.getErrorCode() + ", '" + e.getMessage() + "'");
|
||||
}
|
||||
|
||||
return true;
|
||||
} else if (trackIndex < 0) {
|
||||
Log.d(TAG, "saw input EOS on track " + mTrackIndex);
|
||||
|
||||
mSawInputEOS = true;
|
||||
|
||||
try {
|
||||
mCodec.queueInputBuffer(
|
||||
index, 0 /* offset */, 0 /* sampleSize */,
|
||||
0 /* sampleTime */, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
|
||||
mAvailableInputBufferIndices.removeFirst();
|
||||
} catch (MediaCodec.CryptoException e) {
|
||||
Log.d(TAG, "CryptoException w/ errorCode "
|
||||
+ e.getErrorCode() + ", '" + e.getMessage() + "'");
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private void onOutputFormatChanged() {
|
||||
String mime = mOutputFormat.getString(MediaFormat.KEY_MIME);
|
||||
|
||||
if (mime.startsWith("audio/")) {
|
||||
int sampleRate =
|
||||
mOutputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
||||
|
||||
int channelCount =
|
||||
mOutputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
|
||||
mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount);
|
||||
mAudioTrack.play();
|
||||
}
|
||||
}
|
||||
|
||||
/** returns true if more output data could be drained */
|
||||
private boolean drainOutputBuffer() {
|
||||
if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int index = mAvailableOutputBufferIndices.peekFirst().intValue();
|
||||
MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
|
||||
|
||||
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
Log.d(TAG, "saw output EOS on track " + mTrackIndex);
|
||||
|
||||
mSawOutputEOS = true;
|
||||
|
||||
if (mAudioTrack != null) {
|
||||
mAudioTrack.stop();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
long realTimeUs =
|
||||
mView.getRealTimeUsForMediaTime(info.presentationTimeUs);
|
||||
|
||||
long nowUs = mView.getNowUs();
|
||||
|
||||
long lateUs = nowUs - realTimeUs;
|
||||
|
||||
if (mAudioTrack != null) {
|
||||
ByteBuffer buffer = mCodecOutputBuffers[index];
|
||||
buffer.clear();
|
||||
buffer.position(0 /* offset */);
|
||||
|
||||
byte[] audioCopy = new byte[info.size];
|
||||
buffer.get(audioCopy, 0, info.size);
|
||||
|
||||
mAudioTrack.write(audioCopy, info.size);
|
||||
|
||||
mCodec.releaseOutputBuffer(index, false /* render */);
|
||||
|
||||
mLastMediaTimeUs = info.presentationTimeUs;
|
||||
|
||||
mAvailableOutputBufferIndices.removeFirst();
|
||||
mAvailableOutputBufferInfos.removeFirst();
|
||||
return true;
|
||||
} else {
|
||||
// video
|
||||
boolean render;
|
||||
|
||||
if (lateUs < -10000) {
|
||||
// too early;
|
||||
return false;
|
||||
} else if (lateUs > 30000) {
|
||||
Log.d(TAG, "video late by " + lateUs + " us.");
|
||||
render = false;
|
||||
} else {
|
||||
render = true;
|
||||
mLastMediaTimeUs = info.presentationTimeUs;
|
||||
}
|
||||
|
||||
mCodec.releaseOutputBuffer(index, render);
|
||||
|
||||
mAvailableOutputBufferIndices.removeFirst();
|
||||
mAvailableOutputBufferInfos.removeFirst();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public long getAudioTimeUs() {
|
||||
if (mAudioTrack == null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return mAudioTrack.getAudioTimeUs();
|
||||
}
|
||||
}
|
||||
|
||||
class MediaCodecView extends SurfaceView
|
||||
implements MediaController.MediaPlayerControl {
|
||||
private static final String TAG = "MediaCodecView";
|
||||
|
||||
private Context mContext;
|
||||
private Uri mUri;
|
||||
private Map<String, String> mHeaders;
|
||||
private boolean mEncrypted;
|
||||
|
||||
private MediaCrypto mCrypto;
|
||||
private MediaExtractor mExtractor;
|
||||
|
||||
private Map<Integer, CodecState> mCodecStates;
|
||||
CodecState mAudioTrackState;
|
||||
|
||||
private int mState;
|
||||
private static final int STATE_IDLE = 1;
|
||||
private static final int STATE_PREPARING = 2;
|
||||
private static final int STATE_PLAYING = 3;
|
||||
private static final int STATE_PAUSED = 4;
|
||||
|
||||
private Handler mHandler;
|
||||
private static final int EVENT_PREPARE = 1;
|
||||
private static final int EVENT_DO_SOME_WORK = 2;
|
||||
|
||||
private long mDeltaTimeUs;
|
||||
private long mDurationUs;
|
||||
|
||||
private MediaController mMediaController;
|
||||
|
||||
public MediaCodecView(Context context) {
|
||||
super(context);
|
||||
initMediaCodecView();
|
||||
}
|
||||
|
||||
public MediaCodecView(Context context, AttributeSet attrs) {
|
||||
this(context, attrs, 0);
|
||||
initMediaCodecView();
|
||||
}
|
||||
|
||||
public MediaCodecView(Context context, AttributeSet attrs, int defStyle) {
|
||||
super(context, attrs, defStyle);
|
||||
initMediaCodecView();
|
||||
}
|
||||
|
||||
private void initMediaCodecView() {
|
||||
mState = STATE_IDLE;
|
||||
|
||||
mHandler = new Handler() {
|
||||
public void handleMessage(Message msg) {
|
||||
switch (msg.what) {
|
||||
case EVENT_PREPARE:
|
||||
{
|
||||
try {
|
||||
prepare();
|
||||
start();
|
||||
} catch (IOException e) {
|
||||
Log.d(TAG, "prepare failed.");
|
||||
} catch (MediaCryptoException e) {
|
||||
Log.d(TAG, "failed to initialize crypto.");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case EVENT_DO_SOME_WORK:
|
||||
{
|
||||
doSomeWork();
|
||||
|
||||
mHandler.sendMessageDelayed(
|
||||
mHandler.obtainMessage(EVENT_DO_SOME_WORK), 5);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public void setDataSource(
|
||||
Context context, Uri uri, Map<String, String> headers,
|
||||
boolean encrypted) {
|
||||
reset();
|
||||
|
||||
mContext = context;
|
||||
mUri = uri;
|
||||
mHeaders = headers;
|
||||
mEncrypted = encrypted;
|
||||
}
|
||||
|
||||
private void prepare() throws IOException, MediaCryptoException {
|
||||
if (mEncrypted) {
|
||||
UUID uuid = new UUID(
|
||||
(long)0xedef8ba979d64aceL, (long)0xa3c827dcd51d21edL);
|
||||
|
||||
try {
|
||||
mCrypto = new MediaCrypto(uuid, null);
|
||||
} catch (MediaCryptoException e) {
|
||||
reset();
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
mExtractor = new MediaExtractor();
|
||||
|
||||
mExtractor.setDataSource(mContext, mUri, mHeaders);
|
||||
} catch (IOException e) {
|
||||
reset();
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
mCodecStates = new HashMap();
|
||||
|
||||
boolean haveAudio = false;
|
||||
boolean haveVideo = false;
|
||||
for (int i = mExtractor.getTrackCount(); i-- > 0;) {
|
||||
MediaFormat format = mExtractor.getTrackFormat(i);
|
||||
Log.d(TAG, "track format #" + i + " is " + format);
|
||||
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
|
||||
boolean isVideo = mime.startsWith("video/");
|
||||
boolean isAudio = mime.startsWith("audio/");
|
||||
|
||||
if (!haveAudio && isAudio || !haveVideo && isVideo) {
|
||||
mExtractor.selectTrack(i);
|
||||
addTrack(i, format, mEncrypted);
|
||||
|
||||
if (isAudio) {
|
||||
haveAudio = true;
|
||||
} else {
|
||||
haveVideo = true;
|
||||
}
|
||||
|
||||
if (format.containsKey(MediaFormat.KEY_DURATION)) {
|
||||
long durationUs = format.getLong(MediaFormat.KEY_DURATION);
|
||||
|
||||
if (durationUs > mDurationUs) {
|
||||
mDurationUs = durationUs;
|
||||
}
|
||||
}
|
||||
|
||||
if (haveAudio && haveVideo) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mState = STATE_PAUSED;
|
||||
}
|
||||
|
||||
private String getSecureDecoderNameForMime(String mime) {
|
||||
int n = MediaCodecList.getCodecCount();
|
||||
for (int i = 0; i < n; ++i) {
|
||||
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
|
||||
|
||||
if (info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String[] supportedTypes = info.getSupportedTypes();
|
||||
|
||||
for (int j = 0; j < supportedTypes.length; ++j) {
|
||||
if (supportedTypes[j].equalsIgnoreCase(mime)) {
|
||||
return info.getName() + ".secure";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void addTrack(
|
||||
int trackIndex, MediaFormat format, boolean encrypted) {
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
|
||||
boolean isVideo = mime.startsWith("video/");
|
||||
boolean isAudio = mime.startsWith("audio/");
|
||||
|
||||
MediaCodec codec;
|
||||
|
||||
if (encrypted && mCrypto.requiresSecureDecoderComponent(mime)) {
|
||||
codec = MediaCodec.createByCodecName(
|
||||
getSecureDecoderNameForMime(mime));
|
||||
} else {
|
||||
codec = MediaCodec.createDecoderByType(mime);
|
||||
}
|
||||
|
||||
codec.configure(
|
||||
format,
|
||||
isVideo ? getHolder().getSurface() : null,
|
||||
mCrypto,
|
||||
0);
|
||||
|
||||
CodecState state =
|
||||
new CodecState(this, mExtractor, trackIndex, format, codec);
|
||||
|
||||
mCodecStates.put(new Integer(trackIndex), state);
|
||||
|
||||
if (isAudio) {
|
||||
mAudioTrackState = state;
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
Log.d(TAG, "start");
|
||||
|
||||
if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
|
||||
return;
|
||||
} else if (mState == STATE_IDLE) {
|
||||
mState = STATE_PREPARING;
|
||||
mHandler.sendMessage(mHandler.obtainMessage(EVENT_PREPARE));
|
||||
return;
|
||||
} else if (mState != STATE_PAUSED) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
state.start();
|
||||
}
|
||||
|
||||
mHandler.sendMessage(mHandler.obtainMessage(EVENT_DO_SOME_WORK));
|
||||
|
||||
mDeltaTimeUs = -1;
|
||||
mState = STATE_PLAYING;
|
||||
|
||||
if (mMediaController != null) {
|
||||
mMediaController.show();
|
||||
}
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
Log.d(TAG, "pause");
|
||||
|
||||
if (mState == STATE_PAUSED) {
|
||||
return;
|
||||
} else if (mState != STATE_PLAYING) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
mHandler.removeMessages(EVENT_DO_SOME_WORK);
|
||||
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
state.pause();
|
||||
}
|
||||
|
||||
mState = STATE_PAUSED;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
if (mState == STATE_PLAYING) {
|
||||
pause();
|
||||
}
|
||||
|
||||
if (mCodecStates != null) {
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
state.release();
|
||||
}
|
||||
mCodecStates = null;
|
||||
}
|
||||
|
||||
if (mExtractor != null) {
|
||||
mExtractor.release();
|
||||
mExtractor = null;
|
||||
}
|
||||
|
||||
if (mCrypto != null) {
|
||||
mCrypto.release();
|
||||
mCrypto = null;
|
||||
}
|
||||
|
||||
mDurationUs = -1;
|
||||
mState = STATE_IDLE;
|
||||
}
|
||||
|
||||
public void setMediaController(MediaController ctrl) {
|
||||
mMediaController = ctrl;
|
||||
attachMediaController();
|
||||
}
|
||||
|
||||
private void attachMediaController() {
|
||||
View anchorView =
|
||||
this.getParent() instanceof View ? (View)this.getParent() : this;
|
||||
|
||||
mMediaController.setMediaPlayer(this);
|
||||
mMediaController.setAnchorView(anchorView);
|
||||
mMediaController.setEnabled(true);
|
||||
}
|
||||
|
||||
private void doSomeWork() {
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
state.doSomeWork();
|
||||
}
|
||||
}
|
||||
|
||||
public long getNowUs() {
|
||||
if (mAudioTrackState == null) {
|
||||
return System.currentTimeMillis() * 1000;
|
||||
}
|
||||
|
||||
return mAudioTrackState.getAudioTimeUs();
|
||||
}
|
||||
|
||||
public long getRealTimeUsForMediaTime(long mediaTimeUs) {
|
||||
if (mDeltaTimeUs == -1) {
|
||||
long nowUs = getNowUs();
|
||||
mDeltaTimeUs = nowUs - mediaTimeUs;
|
||||
}
|
||||
|
||||
return mDeltaTimeUs + mediaTimeUs;
|
||||
}
|
||||
|
||||
public int getDuration() {
|
||||
return (int)((mDurationUs + 500) / 1000);
|
||||
}
|
||||
|
||||
public int getCurrentPosition() {
|
||||
long positionUs = 0;
|
||||
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
long trackPositionUs = state.getCurrentPositionUs();
|
||||
|
||||
if (trackPositionUs > positionUs) {
|
||||
positionUs = trackPositionUs;
|
||||
}
|
||||
}
|
||||
|
||||
return (int)((positionUs + 500) / 1000);
|
||||
}
|
||||
|
||||
public void seekTo(int timeMs) {
|
||||
if (mState != STATE_PLAYING && mState != STATE_PAUSED) {
|
||||
return;
|
||||
}
|
||||
|
||||
mExtractor.seekTo(timeMs * 1000, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
|
||||
|
||||
for (CodecState state : mCodecStates.values()) {
|
||||
state.flush();
|
||||
}
|
||||
|
||||
Log.d(TAG, "seek to " + timeMs * 1000);
|
||||
|
||||
mDeltaTimeUs = -1;
|
||||
}
|
||||
|
||||
public boolean isPlaying() {
|
||||
return mState == STATE_PLAYING;
|
||||
}
|
||||
|
||||
public int getBufferPercentage() {
|
||||
long cachedDurationUs = mExtractor.getCachedDuration();
|
||||
|
||||
if (cachedDurationUs < 0 || mDurationUs < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int nowMs = getCurrentPosition();
|
||||
|
||||
int percentage =
|
||||
100 * (nowMs + (int)(cachedDurationUs / 1000))
|
||||
/ (int)(mDurationUs / 1000);
|
||||
|
||||
if (percentage > 100) {
|
||||
percentage = 100;
|
||||
}
|
||||
|
||||
return percentage;
|
||||
}
|
||||
|
||||
public boolean canPause() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canSeekBackward() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean canSeekForward() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private void toggleMediaControlsVisiblity() {
|
||||
if (mMediaController.isShowing()) {
|
||||
mMediaController.hide();
|
||||
} else {
|
||||
mMediaController.show();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onTouchEvent(MotionEvent ev) {
|
||||
if (mState != STATE_IDLE && mMediaController != null) {
|
||||
toggleMediaControlsVisiblity();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.widevine.demo;
|
||||
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioManager;
|
||||
import android.media.AudioTrack;
|
||||
import android.os.Handler;
|
||||
import android.os.Message;
|
||||
import android.util.Log;
|
||||
|
||||
import java.util.LinkedList;
|
||||
|
||||
class NonBlockingAudioTrack {
|
||||
private static final String TAG = "NonBlockingAudioTrack";
|
||||
|
||||
private AudioTrack mAudioTrack;
|
||||
private int mSampleRate;
|
||||
private int mFrameSize;
|
||||
private int mBufferSizeInFrames;
|
||||
private int mNumFramesSubmitted = 0;
|
||||
|
||||
class QueueElem {
|
||||
byte[] data;
|
||||
int offset;
|
||||
int size;
|
||||
}
|
||||
private LinkedList<QueueElem> mQueue = new LinkedList();
|
||||
|
||||
private Handler mHandler;
|
||||
private boolean mWriteMorePending = false;
|
||||
private static final int EVENT_WRITE_MORE = 1;
|
||||
|
||||
public NonBlockingAudioTrack(int sampleRate, int channelCount) {
|
||||
int channelConfig;
|
||||
switch (channelCount) {
|
||||
case 1:
|
||||
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
|
||||
break;
|
||||
case 2:
|
||||
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
|
||||
break;
|
||||
case 6:
|
||||
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
int minBufferSize =
|
||||
AudioTrack.getMinBufferSize(
|
||||
sampleRate,
|
||||
channelConfig,
|
||||
AudioFormat.ENCODING_PCM_16BIT);
|
||||
|
||||
int bufferSize = 2 * minBufferSize;
|
||||
|
||||
mAudioTrack = new AudioTrack(
|
||||
AudioManager.STREAM_MUSIC,
|
||||
sampleRate,
|
||||
channelConfig,
|
||||
AudioFormat.ENCODING_PCM_16BIT,
|
||||
bufferSize,
|
||||
AudioTrack.MODE_STREAM);
|
||||
|
||||
mSampleRate = sampleRate;
|
||||
mFrameSize = 2 * channelCount;
|
||||
mBufferSizeInFrames = bufferSize / mFrameSize;
|
||||
|
||||
mHandler = new Handler() {
|
||||
public void handleMessage(Message msg) {
|
||||
switch (msg.what) {
|
||||
case EVENT_WRITE_MORE:
|
||||
mWriteMorePending = false;
|
||||
writeMore();
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public long getAudioTimeUs() {
|
||||
int numFramesPlayed = mAudioTrack.getPlaybackHeadPosition();
|
||||
|
||||
return (numFramesPlayed * 1000000L) / mSampleRate;
|
||||
}
|
||||
|
||||
public void play() {
|
||||
mAudioTrack.play();
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
cancelWriteMore();
|
||||
|
||||
mAudioTrack.stop();
|
||||
|
||||
mNumFramesSubmitted = 0;
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
cancelWriteMore();
|
||||
|
||||
mAudioTrack.pause();
|
||||
}
|
||||
|
||||
public void release() {
|
||||
cancelWriteMore();
|
||||
|
||||
mAudioTrack.release();
|
||||
mAudioTrack = null;
|
||||
}
|
||||
|
||||
public int getPlayState() {
|
||||
return mAudioTrack.getPlayState();
|
||||
}
|
||||
|
||||
private void writeMore() {
|
||||
if (mQueue.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
int numFramesPlayed = mAudioTrack.getPlaybackHeadPosition();
|
||||
int numFramesPending = mNumFramesSubmitted - numFramesPlayed;
|
||||
int numFramesAvailableToWrite = mBufferSizeInFrames - numFramesPending;
|
||||
int numBytesAvailableToWrite = numFramesAvailableToWrite * mFrameSize;
|
||||
|
||||
while (numBytesAvailableToWrite > 0) {
|
||||
QueueElem elem = mQueue.peekFirst();
|
||||
|
||||
int numBytes = elem.size;
|
||||
if (numBytes > numBytesAvailableToWrite) {
|
||||
numBytes = numBytesAvailableToWrite;
|
||||
}
|
||||
|
||||
int written = mAudioTrack.write(elem.data, elem.offset, numBytes);
|
||||
assert(written == numBytes);
|
||||
|
||||
mNumFramesSubmitted += written / mFrameSize;
|
||||
|
||||
elem.size -= numBytes;
|
||||
if (elem.size == 0) {
|
||||
mQueue.removeFirst();
|
||||
|
||||
if (mQueue.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
elem.offset += numBytes;
|
||||
break;
|
||||
}
|
||||
|
||||
numBytesAvailableToWrite -= numBytes;
|
||||
}
|
||||
|
||||
if (!mQueue.isEmpty()) {
|
||||
scheduleWriteMore();
|
||||
}
|
||||
}
|
||||
|
||||
private void scheduleWriteMore() {
|
||||
if (mWriteMorePending) {
|
||||
return;
|
||||
}
|
||||
|
||||
int numFramesPlayed = mAudioTrack.getPlaybackHeadPosition();
|
||||
int numFramesPending = mNumFramesSubmitted - numFramesPlayed;
|
||||
int pendingDurationMs = 1000 * numFramesPending / mSampleRate;
|
||||
|
||||
mWriteMorePending = true;
|
||||
mHandler.sendMessageDelayed(
|
||||
mHandler.obtainMessage(EVENT_WRITE_MORE),
|
||||
pendingDurationMs / 3);
|
||||
}
|
||||
|
||||
private void cancelWriteMore() {
|
||||
mHandler.removeMessages(EVENT_WRITE_MORE);
|
||||
mWriteMorePending = false;
|
||||
}
|
||||
|
||||
public void write(byte[] data, int size) {
|
||||
QueueElem elem = new QueueElem();
|
||||
elem.data = data;
|
||||
elem.offset = 0;
|
||||
elem.size = size;
|
||||
|
||||
mQueue.add(elem);
|
||||
|
||||
scheduleWriteMore();
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
package com.widevine.demo;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.net.Uri;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.os.Message;
|
||||
@@ -19,12 +20,20 @@ import android.view.Gravity;
|
||||
import android.view.View;
|
||||
import android.content.Context;
|
||||
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
import android.media.MediaCrypto;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaPlayer;
|
||||
import android.media.MediaPlayer.OnErrorListener;
|
||||
import android.media.MediaPlayer.OnCompletionListener;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class VideoPlayerView extends Activity {
|
||||
private final static String TAG = "VideoPlayerView";
|
||||
|
||||
private final static float BUTTON_FONT_SIZE = 10;
|
||||
private final static String EXIT_FULLSCREEN = "Exit Full Screen";
|
||||
private final static String FULLSCREEN = "Enter Full Screen";
|
||||
@@ -33,11 +42,13 @@ public class VideoPlayerView extends Activity {
|
||||
|
||||
private WidevineDrm drm;
|
||||
private FullScreenVideoView videoView;
|
||||
private MediaCodecView mediaCodecView;
|
||||
private String assetUri;
|
||||
private TextView logs;
|
||||
private ScrollView scrollView;
|
||||
private Context context;
|
||||
private ClipImageView bgImage;
|
||||
private Button mediaCodecModeButton;
|
||||
private Button playButton;
|
||||
private Button fullScreen;
|
||||
private Handler hRefresh;
|
||||
@@ -45,6 +56,7 @@ public class VideoPlayerView extends Activity {
|
||||
private LinearLayout main;
|
||||
private LinearLayout sidePanel;
|
||||
private boolean enteringFullScreen;
|
||||
private boolean useMediaCodec;
|
||||
private int width, height;
|
||||
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
@@ -53,6 +65,7 @@ public class VideoPlayerView extends Activity {
|
||||
height = display.getHeight();
|
||||
width = display.getWidth();
|
||||
context = this;
|
||||
useMediaCodec = false;
|
||||
contentView = createView();
|
||||
if (drm.isProvisionedDevice()) {
|
||||
setContentView(contentView);
|
||||
@@ -68,6 +81,11 @@ public class VideoPlayerView extends Activity {
|
||||
stopPlayback();
|
||||
}
|
||||
}
|
||||
if (mediaCodecView != null) {
|
||||
if (mediaCodecView.isPlaying()) {
|
||||
stopPlayback();
|
||||
}
|
||||
}
|
||||
super.onStop();
|
||||
}
|
||||
|
||||
@@ -91,8 +109,6 @@ public class VideoPlayerView extends Activity {
|
||||
}
|
||||
};
|
||||
|
||||
videoView = new FullScreenVideoView(this);
|
||||
|
||||
logs = new TextView(this);
|
||||
drm.setLogListener(drmLogListener);
|
||||
drm.registerPortal(WidevineDrm.Settings.PORTAL_NAME);
|
||||
@@ -130,7 +146,17 @@ public class VideoPlayerView extends Activity {
|
||||
sidePanel.addView(createButtons(), paramsSidePanel);
|
||||
|
||||
FrameLayout playerFrame = new FrameLayout(this);
|
||||
playerFrame.addView(videoView, new FrameLayout.LayoutParams(
|
||||
|
||||
View view;
|
||||
if (useMediaCodec) {
|
||||
mediaCodecView = new MediaCodecView(this);
|
||||
view = mediaCodecView;
|
||||
} else {
|
||||
videoView = new FullScreenVideoView(this);
|
||||
view = videoView;
|
||||
}
|
||||
|
||||
playerFrame.addView(view, new FrameLayout.LayoutParams(
|
||||
LinearLayout.LayoutParams.WRAP_CONTENT,
|
||||
FrameLayout.LayoutParams.MATCH_PARENT));
|
||||
|
||||
@@ -191,70 +217,107 @@ public class VideoPlayerView extends Activity {
|
||||
|
||||
private void startPlayback() {
|
||||
playButton.setText(R.string.stop);
|
||||
|
||||
bgImage.setVisibility(View.GONE);
|
||||
|
||||
videoView.setVideoPath(assetUri);
|
||||
videoView.setMediaController(new MediaController(context));
|
||||
if (useMediaCodec) {
|
||||
mediaCodecView.setDataSource(
|
||||
this,
|
||||
Uri.parse(assetUri),
|
||||
null /* headers */,
|
||||
true /* encrypted */);
|
||||
|
||||
videoView.setOnErrorListener(new OnErrorListener() {
|
||||
public boolean onError(MediaPlayer mp, int what, int extra) {
|
||||
String message = "Unknown error";
|
||||
switch (what) {
|
||||
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
|
||||
message = "Unable to play media";
|
||||
break;
|
||||
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
|
||||
message = "Server failed";
|
||||
break;
|
||||
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
|
||||
message = "Invalid media";
|
||||
break;
|
||||
}
|
||||
drm.logBuffer.append(message + "\n");
|
||||
updateLogs();
|
||||
bgImage.setVisibility(View.VISIBLE);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
videoView.setOnCompletionListener(new OnCompletionListener() {
|
||||
public void onCompletion(MediaPlayer mp) {
|
||||
stopPlayback();
|
||||
}
|
||||
});
|
||||
videoView.requestFocus();
|
||||
|
||||
videoView.start();
|
||||
|
||||
if (videoView.getFullScreen()) {
|
||||
sidePanel.setVisibility(View.GONE);
|
||||
mediaCodecView.setMediaController(new MediaController(context));
|
||||
mediaCodecView.requestFocus();
|
||||
mediaCodecView.start();
|
||||
} else {
|
||||
sidePanel.setVisibility(View.VISIBLE);
|
||||
}
|
||||
videoView.setVideoPath(assetUri);
|
||||
videoView.setMediaController(new MediaController(context));
|
||||
|
||||
fullScreen.setVisibility(View.VISIBLE);
|
||||
videoView.setFullScreenDimensions(contentView.getRight() - contentView.getLeft(),
|
||||
contentView.getBottom() - contentView.getTop());
|
||||
videoView.setOnErrorListener(new OnErrorListener() {
|
||||
public boolean onError(MediaPlayer mp, int what, int extra) {
|
||||
String message = "Unknown error";
|
||||
switch (what) {
|
||||
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
|
||||
message = "Unable to play media";
|
||||
break;
|
||||
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
|
||||
message = "Server failed";
|
||||
break;
|
||||
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
|
||||
message = "Invalid media";
|
||||
break;
|
||||
}
|
||||
drm.logBuffer.append(message + "\n");
|
||||
updateLogs();
|
||||
bgImage.setVisibility(View.VISIBLE);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
videoView.setOnCompletionListener(new OnCompletionListener() {
|
||||
public void onCompletion(MediaPlayer mp) {
|
||||
stopPlayback();
|
||||
}
|
||||
});
|
||||
videoView.requestFocus();
|
||||
|
||||
videoView.start();
|
||||
|
||||
if (videoView.getFullScreen()) {
|
||||
sidePanel.setVisibility(View.GONE);
|
||||
} else {
|
||||
sidePanel.setVisibility(View.VISIBLE);
|
||||
}
|
||||
|
||||
fullScreen.setVisibility(View.VISIBLE);
|
||||
videoView.setFullScreenDimensions(contentView.getRight() - contentView.getLeft(),
|
||||
contentView.getBottom() - contentView.getTop());
|
||||
}
|
||||
}
|
||||
|
||||
private void stopPlayback() {
|
||||
playButton.setText(R.string.play);
|
||||
videoView.stopPlayback();
|
||||
fullScreen.setVisibility(View.INVISIBLE);
|
||||
bgImage.setVisibility(View.VISIBLE);
|
||||
|
||||
if (videoView.getFullScreen() && !enteringFullScreen) {
|
||||
videoView.setVisibility(View.INVISIBLE);
|
||||
videoView.setFullScreen(false);
|
||||
videoView.setVisibility(View.VISIBLE);
|
||||
sidePanel.setVisibility(View.VISIBLE);
|
||||
fullScreen.setText(FULLSCREEN);
|
||||
if (useMediaCodec) {
|
||||
mediaCodecView.reset();
|
||||
} else {
|
||||
videoView.stopPlayback();
|
||||
|
||||
fullScreen.setVisibility(View.INVISIBLE);
|
||||
if (videoView.getFullScreen() && !enteringFullScreen) {
|
||||
videoView.setVisibility(View.INVISIBLE);
|
||||
videoView.setFullScreen(false);
|
||||
videoView.setVisibility(View.VISIBLE);
|
||||
sidePanel.setVisibility(View.VISIBLE);
|
||||
fullScreen.setText(FULLSCREEN);
|
||||
}
|
||||
}
|
||||
enteringFullScreen = false;
|
||||
|
||||
}
|
||||
|
||||
private View createButtons() {
|
||||
mediaCodecModeButton = new Button(this);
|
||||
if (useMediaCodec) {
|
||||
mediaCodecModeButton.setText(R.string.normal_mode);
|
||||
} else {
|
||||
mediaCodecModeButton.setText(R.string.mediacodec_mode);
|
||||
}
|
||||
mediaCodecModeButton.setTextSize(BUTTON_FONT_SIZE);
|
||||
|
||||
mediaCodecModeButton.setOnClickListener(new View.OnClickListener() {
|
||||
public void onClick(View v) {
|
||||
onStop();
|
||||
|
||||
useMediaCodec = (useMediaCodec) ? false : true;
|
||||
contentView = createView();
|
||||
if (drm.isProvisionedDevice()) {
|
||||
setContentView(contentView);
|
||||
} else {
|
||||
setContentView(R.layout.notprovisioned);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
playButton = new Button(this);
|
||||
playButton.setText(R.string.play);
|
||||
playButton.setTextSize(BUTTON_FONT_SIZE);
|
||||
@@ -328,6 +391,7 @@ public class VideoPlayerView extends Activity {
|
||||
buttonsLeft.addView(checkConstraints, params);
|
||||
|
||||
LinearLayout buttonsRight = new LinearLayout(this);
|
||||
buttonsRight.addView(mediaCodecModeButton, params);
|
||||
buttonsRight.setOrientation(LinearLayout.VERTICAL);
|
||||
buttonsRight.addView(checkButton, params);
|
||||
buttonsRight.addView(removeButton, params);
|
||||
|
||||
Reference in New Issue
Block a user