第一篇:android 使用MediaCodec 編解碼總結
android 使用MediaCodec 編解碼總結
本文將主要介紹在安卓中調用MediaCodec類實現視頻文件的硬解碼,以及如何將以byte[]類型存儲的圖像數據通過硬編碼合成視頻文件。1.MediaCodec類的編解碼原理 參考鏈接:https://developer.Android.com/reference/android/media/MediaCodec.html 工作流是這樣的: 以編碼為例,首先要初始化硬件編碼器,配置要編碼的格式、視頻文件的長寬、碼率、幀率、關鍵幀間隔等等。這一步叫configure。之后開啟編碼器,當前編碼器便是可用狀態,隨時準備接收數據。下一個過程便是編碼的running過程,在此過程中,需要維護兩個buffer隊列,InputBuffer 和OutputBuffer,用戶需要不斷出隊InputBuffer(即dequeueInputBuffer),往里邊放入需要編碼的圖像數據之后再入隊等待處理,然后硬件編碼器開始異步處理,一旦處理結束,他會將數據放在OutputBuffer中,并且通知用戶當前有輸出數據可用了,那么用戶就可以出隊一個OutputBuffer,將其中的數據拿走,然后釋放掉這個buffer。結束條件在于end-of-stream這個flag標志位的設定。在編碼結束后,編碼器調用stop函數停止編碼,之后調用release函數將編碼器完全釋放掉,整體流程結束。
2.視頻解碼程序示例 代碼來源于
Android: MediaCodec視頻文件硬件解碼以下所有代碼可以在此處下載[java] view plain copy
print?
package com.example.guoheng_iri.helloworld;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.util.Log;
import java.io.File;import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
public class VideoDecode {
private static final String TAG = “VideoToFrames”;
private static final boolean VERBOSE = true;
private static final long DEFAULT_TIMEOUT_US = 10000;
private static final int COLOR_FormatI420 = 1;
private static final int COLOR_FormatNV21 = 2;
public static final int FILE_TypeI420 = 1;
public static final int FILE_TypeNV21 = 2;
public static final int FILE_TypeJPEG = 3;
private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
private int outputImageFileType =-1;
private String OUTPUT_DIR;
public int ImageWidth=0;
public int ImageHeight=0;
MediaExtractor extractor = null;
MediaCodec decoder = null;
MediaFormat mediaFormat;
public void setSaveFrames(String dir, int fileType)throws IOException {
if(fileType!= FILE_TypeI420 && fileType!= FILE_TypeNV21 && fileType!= FILE_TypeJPEG){
throw new IllegalArgumentException(“only support FILE_TypeI420 ” + “and FILE_TypeNV21 ” + “and FILE_TypeJPEG”);
}
outputImageFileType = fileType;
File theDir = new File(dir);
if(!theDir.exists()){
theDir.mkdirs();
} else if(!theDir.isDirectory()){
throw new IOException(“Not a directory”);
}
OUTPUT_DIR = theDir.getAbsolutePath()+ “/”;
}
public void VideoDecodePrepare(String videoFilePath){
extractor = null;
decoder = null;
try {
File videoFile = new File(videoFilePath);
extractor = new MediaExtractor();
extractor.setDataSource(videoFile.toString());
int trackIndex = selectTrack(extractor);
if(trackIndex < 0){
throw new RuntimeException(“No video track found in ” + videoFilePath);
}
extractor.selectTrack(trackIndex);
mediaFormat = extractor.getTrackFormat(trackIndex);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
showSupportedColorFormat(decoder.getCodecInfo().getCapabilitiesForType(mime));
if(isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))){
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
Log.i(TAG, “set decode color format to type ” + decodeColorFormat);
} else {
Log.i(TAG, “unable to set decode color format, color format type ” + decodeColorFormat + “ not supported”);
}
decoder.configure(mediaFormat, null, null, 0);
decoder.start();
} catch(IOException ioe){
throw new RuntimeException(“failed init encoder”, ioe);
}
}
public void close(){
decoder.stop();
decoder.release();
if(extractor!= null){
extractor.release();
extractor = null;
}
}
public void excuate()
{
try {
decodeFramesToImage(decoder, extractor, mediaFormat);
}finally {
// release encoder, muxer, and input Surface
close();
}
}
private void showSupportedColorFormat(MediaCodecInfo.CodecCapabilities caps){
System.out.print(“supported color format: ”);
for(int c : caps.colorFormats){
System.out.print(c + “t”);
}
System.out.println();
}
private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps){
for(int c : caps.colorFormats){
if(c == colorFormat){
return true;
}
}
return false;
}
public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat){
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
final int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
final int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
ImageWidth=width;
ImageHeight=height;
int outputFrameCount = 0;
while(!sawOutputEOS){
if(!sawInputEOS){
int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if(inputBufferId >= 0){
ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
int sampleSize = extractor.readSampleData(inputBuffer, 0);//將一部分視頻數據讀取到inputbuffer中,大小為sampleSize
if(sampleSize < 0){
decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
extractor.advance();//移動到視頻文件的下一個地址
}
}
}
int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
if(outputBufferId >= 0){
if((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)!= 0){
sawOutputEOS = true;
}
boolean doRender =(info.size!= 0);
if(doRender){
outputFrameCount++;
Image image = decoder.getOutputImage(outputBufferId);
System.out.println(“image format: ” + image.getFormat());
if(outputImageFileType!=-1){
String fileName;
switch(outputImageFileType){
case FILE_TypeI420:
fileName = OUTPUT_DIR + String.format(“frame_%05d_I420_%dx%d.yuv”, outputFrameCount, width, height);
dumpFile(fileName, getDataFromImage(image, COLOR_FormatI420));
break;
case FILE_TypeNV21:
fileName = OUTPUT_DIR + String.format(“frame_%05d_NV21_%dx%d.yuv”, outputFrameCount, width, height);
dumpFile(fileName, getDataFromImage(image, COLOR_FormatNV21));
break;
case FILE_TypeJPEG:
fileName = OUTPUT_DIR + String.format(“frame_%05d.jpg”, outputFrameCount);
compressToJpeg(fileName, image);
break;
}
}
image.close();
decoder.releaseOutputBuffer(outputBufferId, true);
}
}
}
}
private static int selectTrack(MediaExtractor extractor){
int numTracks = extractor.getTrackCount();
for(int i = 0;i < numTracks;i++){
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if(mime.startsWith(“video/”)){
if(VERBOSE){
Log.d(TAG, “Extractor selected track ” + i + “(” + mime + “): ” + format);
}
return i;
}
}
return-1;
}
private static boolean isImageFormatSupported(Image image){
int format = image.getFormat();
switch(format){
case ImageFormat.YUV_420_888:
case ImageFormat.NV21:
case ImageFormat.YV12:
return true;
}
return false;
}
public static byte[] getGrayFromData(Image image, int colorFormat){
if(colorFormat!= COLOR_FormatI420 && colorFormat!= COLOR_FormatNV21){
throw new IllegalArgumentException(“only support COLOR_FormatI420 ” + “and COLOR_FormatNV21”);
}
if(!isImageFormatSupported(image)){
throw new RuntimeException(“can't convert Image to byte array, format ” + image.getFormat());
}
Image.Plane[] planes = image.getPlanes();
int i = 0;
ByteBuffer buffer = planes[i].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data, 0, data.length);
if(VERBOSE)Log.v(TAG, “Finished reading data from plane ” + i);
return data;
}
public static byte[] getDataFromImage(Image image, int colorFormat){
if(colorFormat!= COLOR_FormatI420 && colorFormat!= COLOR_FormatNV21){
throw new IllegalArgumentException(“only support COLOR_FormatI420 ” + “and COLOR_FormatNV21”);
}
if(!isImageFormatSupported(image)){
throw new RuntimeException(“can't convert Image to byte array, format ” + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format)/ 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for(int i = 0;i < planes.length;i++){
switch(i){
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if(colorFormat == COLOR_FormatI420){
channelOffset = width * height;
outputStride = 1;
} else if(colorFormat == COLOR_FormatNV21){
channelOffset = width * height;
outputStride = 2;
}
break;
case 2:
if(colorFormat == COLOR_FormatI420){
channelOffset =(int)(width * height * 1.25);
outputStride = 1;
} else if(colorFormat =
第二篇:Android 下log的使用總結
Android 下log的使用總結
一:在源碼開發模式下
1:包含頭文件:
1.#include
2:定義宏LOG_TAG
1.#define LOG_TAG “MY LOG TAG”
3:鏈接log對應的.so庫
在Android.mk文件中加入如下語句:
1.LOCAL_SHARED_LIBRARIES +=
2.libcutils
接下來就可以直接使用LOGD來打印log信息了.二:在NDK開發模式下
1:包含頭文件:
1.#include
2:定義宏LOG_TAG
1.#define LOG_TAG “MY LOG TAG”
2.#define LOGD(...)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)3:鏈接log對應的.so庫
在Android.mk文件中加入如下語句:
1.LOCAL_LDLIBS :=-llog
接下來就可以直接使用LOGD來打印log信息了.三:在Java代碼中
1:導入包
1.import android.util.Log;
2:使用
1.private static final String TAG = “your_tag”;
2.Log.d(TAG,“show something”);
在程序運行過程中可以通過adb shell下的logcat指令看到相應的內容?;蛟贓clipse下的ADT的LogCat窗口中看到相應的內容了.
第三篇:Android總結
Android四大組件:
Activity—表現屏幕界面
Service—后臺服務
BroadcastReceiver—實現廣播機制
ContentProvider—實現數據存儲
Intent類:用來啟動程序并傳遞信息的類
用于Activity、Receiver、Service之間進行交互的類,通過無參構造方法創建對象,增加其action、category、data、extra等屬性進行信息傳遞,并通過Activity中的startActivity(Intent intent)進行界面的跳轉;通過Context中的StartService(Intent intent)進行服務跳轉;通過Context中的registerReceive(Intent intent)對廣播進行注冊,并通過sendBroadcast()進行無序消息發送,或可以通過SendOrderedBroadcast()進行有序的消息發送。Handler類:
用來發送和處理消息,并配合主線程完成UI的更新;消息Message/Runnable傳遞通過MessageQueue(消息隊列,先進先出)進行傳遞,并通過Lopper進行接收,傳遞的消息可以為Message對象,也可以是Runnable對象;接收方法通過HandleMessage(Message msg)進行獲取。SharedPreferences類:
一般用于第一次登錄時的設置,或者是各個界面的一些小型格式設置,如字體等。是本地的小型共享數據庫,可以通過Context的靜態方法getSharedPreferences獲得其對象,對象內的值均為鍵值對進行儲存。通過SharedPreferences對象調用editor()獲取SharedPreferences.Editor對象,向共享數據庫中增加數據,putString(),并提交數據,commit();通過SharedPreferences對象獲取共享數據庫中的數據,getString()。
ViewPager:實現界面滑動的類;
通過設置OnPagerChangedListener設置ViewPager的監聽事件;
實現流程:
①布局文件中設置ViewPager控件;
②代碼中進行綁定控件;
③通過繼承PagerAdapter抽象類進行設置適配器,并傳遞數據源;
④適配器中實現兩個抽象方法,兩個重寫方法:getCount()—獲取滑動界面的數量,isViewFromObject()—判斷視圖是否是來自于Object文件中;重寫兩個方法,分別為destoryItem—銷毀指定位置的視圖;InstantiateItem(),設置指定位置的視圖;
Timer與TimerTask類:
Timer為計時器的類,通過無參構造方法可以獲取對象,通過Timer.schedule(TimerTask task,long time)進行設置多久后執行某任務,當任務執行完后,取消計時的功能,Timer.cancle();TimerTask類為抽象類,實例化時,必須重寫run方法;執行的內容,均在run方法中進行設置,并且執行時,已在子線程中進行執行。自定義View:用到的類有Paint、Canvas、Spec、SpecF、Path、View.MeasureSpec、Timer、TimerTask;
抽象類,通過子類繼承,獲取對象;在布局文件中綁定后,通過代碼,設置自定義View的屬性;自定義View中,通過重寫OnMeasure方法,對布局文件中的尺寸進行測量,并由View中的setMeasureDimenson()方法,進行數據的保存;通過重寫Ondraw方法,進行繪圖;當需要繪制動態圖形時,使用計時器Timer的schedule(TimerTask,long time,delay time2)方法,在time時間后,每隔time2時間,重寫執行run方法中的內容;將耗時的操作設置在run方法中,并通過View中的invalidate()方法刷新主線程中的繪的圖形,通過postInvalidate()刷新子線程中的圖形。數據庫:
常用的數據庫有Oracle,需要安裝和配置的大型收費數據庫;MySQL是中型數據庫,同樣需要安裝配置,但不需要收費;Sqlite是小型免費的嵌入式數據庫,占用內存低,最新版本為3.0。Sqlite數據庫需要通過SqliteDatabaseOpenHelper進行創建數據庫,并通過SqliteDatabase進行數據庫的操作。輔助類是抽象類,通過繼承,重寫兩個方法,并在子類的構造方法中通過OpenHelper的構造方法(Context context,String SqlName,SqliteDatabase.CursorFactory factory,int version)進行數據庫的創建,在onCreate方法中,進行數據庫表的創建,在onUpdate中進行數據庫的版本更新。在數據庫的操作類中,執行exect方法,通過sql語句對數據庫進行操作。Create table student(_id integer primary key auto increament ,name text);insert into student(_id,name)values(1,zx);delete from student where _id=1;update student set _id=2 where name=zx;select *from student;ListView、GridView適配器的優化:
將布局文件中的控件進行封裝,當視圖加載時,判斷可變視圖是否存在,當不存在時,通過布局文件獲取視圖,并新建封裝類,將地址通過setTag()進行發送;當視圖存在時,重復利用地址—getTag()。反射:
存儲數據的方式:
共享數據庫、數據庫、文件、網絡、內容提供者
廣播:
廣播傳播時,需要接收者、發送者、廣播頻道;根據發送者的發送方式不同,分為有序廣播、無序廣播;有序廣播為接收者有接收順序,根據設置的優先級不同,確定先后順序,接收者同時也是發送者,向后面的廣播發送消息,發送過程中,可以添加信息,也可以停止廣播的傳輸;無序廣播,接收者之間無聯系,均從發送者處接收信息;廣播在傳輸過程中,不能被添加信息,也不可能被停止。廣播在發送前,需要對接收者進行注冊,注冊方式有兩種,動態注冊、靜態注冊。動態注冊,是在代碼中進行,通過Context對象調用靜態方法進行注冊,所有的廣播均可以用動態注冊,其生命周期依賴于應用,相對于靜態注冊,比較節省內存;靜態方法在清單文件中進行注冊,部分系統廣播不能通過靜態注冊進行,其生命周期依賴于系統,當系統啟動,即運行接收廣播,較耗內存。廣播接收者需要繼承BroadcastReceiver,并實現抽象方法onReceive(),通過回調接口,進行數據的傳輸。注意:廣播發送前,必須進行接收者的注冊,并且,當顯示跳轉時,不需要意圖過濾器。安卓布局:九種布局
線性布局,水平或垂直方向兩種格式,主要特點為權重,即規定各控件在視圖中的占有的比例;
相對布局,相對于父控件或兄弟控件的布局,各控件需指定相對位置; 絕對布局,指定各控件在視圖中的絕對位置,幾乎不再使用; 表格布局,子布局放在行中,列由控件表示(TableRow); 幀布局:覆蓋前面布局的布局,一般用于暫停按鈕等; 風格布局:可以跨行、跨列的布局,占滿換行;
左右側滑:可以實現左右側滑,通過設置主菜單和二級菜單設置左右兩個菜單; 下拉刷新:設置下拉刷新、上拉加載的功能; 抽屜布局;
安卓版本及對應的API:
1.6—4;2—7;3—11;4—15;4.3—18;5—20;5.1—21;6—23;7—25; 安卓四層架構:
應用層:Java語言開發,主要從事App開發;
運行庫層:Java語言與C語言,View視圖、管理類等的開發; 架構層:C語言與Linux語言,各種框架、瀏覽器等; 內核層:Linux、C語言,開發各種驅動; 安卓四大組件:
Activity:界面,實現程序與用戶之間的交換,有自己的生命周期,七個生命周期;4種啟動模式 Service:
BroadcastReceive:三要素,發送者、接收者、發送頻道(Intent);類型:有序(接收有序,有數據傳送,可以攔截數據)、無序廣播(相對);注冊方式:靜態注冊,持久監聽,占用內存比較高生命周期跟隨系統,動態注冊(代碼中),所有廣播都可以動態注冊,部分系統廣播不能動態注冊,臨時監聽,占用內存較少,生命周期隨應用進行;
ContentProvide:不能存放數據,五種存放數據方式之一,特點為:①為數據的獲取等操作添加一個統一的接口②可以實現跨應用訪問數據;③可以實現Android中通訊錄、消息、音頻、視頻等的訪問或操作;通過ContentReceive進行數據的訪問,可以對數據進行增刪改查操作。
動畫: IO流: 序列化: AlertDialog:
Set實現類: 手機電量檢測:
自定義SurfaceView:
自定義View:三個構造方法的區別
Message:Handler.obtain/new/Message.obtain
HttpUriConnection訪問網絡
gride 異步任務 動畫
抽象類和接口 反射 克隆 序列化 側滑的實現 數據庫 Socket:
Gson解析
異步任務和子線程區別 WebView 版本更新 照片的圓角化
Collection與Collections Sql語句
MVP框架與MVC: TCP與UDP的區別: 一鍵分享的流程: Http協議的理解: 不使用框架訪問網絡: List集合與set集合: 自定義View的流程: 線性布局的特點: ViewPager的原理: 服務的啟動方式:
Activity的啟動方式: Xml數據解析:
第四篇:Android Multimedia框架總結(二十二)MediaCodec中C++中創建到start過程及狀態變換
Android Multimedia框架總結(二十二)MediaCodec中C++中創建到start過程
及狀態變換
從今天開始,將深入源碼中看看其c++過程,看下Agenda如下:
mediacodec.h CreateByType initMediaCodec中BufferInfo內部類: configure過程 start BufferInfo在MediaCodec.h中對應是一個結構體
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin struct BufferInfo {
uint32_t mBufferID;
sp
sp
sp
sp
sp
bool mOwnedByClient;};mediacodec.h的方法的聲明,位于frameworksavincludemediastagefright下
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin namespace android { struct ABuffer;struct AMessage;struct AReplyToken;struct AString;struct CodecBase;struct IBatteryStats;struct ICrypto;class IMemory;struct MemoryDealer;class IResourceManagerClient;class IResourceManagerService;struct PersistentSurface;struct SoftwareRenderer;struct Surface;struct MediaCodec : public AHandler {
enum ConfigureFlags {
CONFIGURE_FLAG_ENCODE
= 1,};
enum BufferFlags {
BUFFER_FLAG_SYNCFRAME
= 1,BUFFER_FLAG_CODECCONFIG = 2,BUFFER_FLAG_EOS
= 4,};
enum {
CB_INPUT_AVAILABLE = 1,CB_OUTPUT_AVAILABLE = 2,CB_ERROR = 3,CB_OUTPUT_FORMAT_CHANGED = 4,CB_RESOURCE_RECLAIMED = 5,};
static const pid_t kNoPid =-1;
static sp
static sp
static sp
CreatePersistentInputSurface();
status_t configure(const sp
status_t setCallback(const sp
status_t setOnFrameRenderedNotification(const sp
status_t createInputSurface(sp
status_t setInputSurface(const sp
&surface);
status_t start();
// Returns to a state in which the component remains allocated but
// unconfigured.status_t stop();
// Resets the codec to the INITIALIZED state.Can be called after an error
// has occured to make the codec usable.status_t reset();
// Client MUST call release before releasing final reference to this
// object.status_t release();
status_t flush();
status_t queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString *errorDetailMsg = NULL);
status_t queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample *subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,int64_t presentationTimeUs,uint32_t flags,AString *errorDetailMsg = NULL);
status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs = 0ll);
status_t dequeueOutputBuffer(size_t *index,size_t *offset,size_t *size,int64_t *presentationTimeUs,uint32_t *flags,int64_t timeoutUs = 0ll);
status_t renderOutputBufferAndRelease(size_t index, int64_t timestampNs);
status_t renderOutputBufferAndRelease(size_t index);
status_t releaseOutputBuffer(size_t index);
status_t signalEndOfInputStream();
status_t getOutputFormat(sp
status_t getInputFormat(sp
status_t getWidevineLegacyBuffers(Vector
status_t getInputBuffers(Vector
status_t getOutputBuffers(Vector
status_t getOutputBuffer(size_t index, sp
status_t getOutputFormat(size_t index, sp
status_t getInputBuffer(size_t index, sp
status_t setSurface(const sp
status_t requestIDRFrame();
// Notification will be posted once there “is something to do”, i.e.// an input/output buffer has become available, a format change is
// pending, an error is pending.void requestActivityNotification(const sp
status_t getName(AString *componentName)const;
status_t setParameters(const sp
// Create a MediaCodec notification message from a list of rendered or dropped render infos
// by adding rendered frame information to a base notification message.Returns the number
// of frames that were rendered.static size_t CreateFramesRenderedMessage(std::list
virtual ~MediaCodec();
virtual void onMessageReceived(const sp
// used by ResourceManagerClient
status_t reclaim(bool force = false);
friend struct ResourceManagerClient;private:
enum State {
UNINITIALIZED,INITIALIZING,INITIALIZED,CONFIGURING,CONFIGURED,STARTING,STARTED,FLUSHING,FLUSHED,STOPPING,RELEASING,};
enum {
kPortIndexInput
= 0,kPortIndexOutput
= 1,};
enum {
kWhatInit
= 'init',kWhatConfigure
= 'conf',kWhatSetSurface
= 'sSur',kWhatCreateInputSurface
= 'cisf',kWhatSetInputSurface
= 'sisf',kWhatStart
= 'strt',kWhatStop
= 'stop',kWhatRelease
= 'rele',kWhatDequeueInputBuffer
= 'deqI',kWhatQueueInputBuffer
= 'queI',kWhatDequeueOutputBuffer
= 'deqO',kWhatReleaseOutputBuffer
= 'relO',kWhatSignalEndOfInputStream
= 'eois',kWhatGetBuffers
= 'getB',kWhatFlush
= 'flus',kWhatGetOutputFormat
= 'getO',kWhatGetInputFormat
= 'getI',kWhatDequeueInputTimedOut
= 'dITO',kWhatDequeueOutputTimedOut
= 'dOTO',kWhatCodecNotify
= 'codc',kWhatRequestIDRFrame
= 'ridr',kWhatRequestActivityNotification
= 'racN',kWhatGetName
= 'getN',kWhatSetParameters
= 'setP',kWhatSetCallback
= 'setC',kWhatSetNotification
= 'setN',};
enum {
kFlagUsesSoftwareRenderer
= 1,kFlagOutputFormatChanged
= 2,kFlagOutputBuffersChanged
= 4,kFlagStickyError
= 8,kFlagDequeueInputPending
= 16,kFlagDequeueOutputPending
= 32,kFlagIsSecure
= 64,kFlagSawMediaServerDie
= 128,kFlagIsEncoder
= 256,kFlagGatherCodecSpecificData
= 512,kFlagIsAsync
= 1024,kFlagIsComponentAllocated
= 2048,kFlagPushBlankBuffersOnShutdown = 4096,};
struct BufferInfo {
uint32_t mBufferID;
sp
sp
sp
sp
sp
bool mOwnedByClient;
};
struct ResourceManagerServiceProxy : public IBinder::DeathRecipient {
ResourceManagerServiceProxy(pid_t pid);
~ResourceManagerServiceProxy();
void init();
// implements DeathRecipient
virtual void binderDied(const wp
void addResource(int64_t clientId,const sp
void removeResource(int64_t clientId);
bool reclaimResource(const Vector
private:
Mutex mLock;
sp
pid_t mPid;
};
State mState;
bool mReleasedByResourceManager;
sp
sp
sp
AString mComponentName;
sp
uint32_t mFlags;
status_t mStickyError;
sp
SoftwareRenderer *mSoftRenderer;
sp
sp
sp
sp
sp
sp
sp
bool mBatteryStatNotified;
bool mIsVideo;
int32_t mVideoWidth;
int32_t mVideoHeight;
int32_t mRotationDegrees;
// initial create parameters
AString mInitName;
bool mInitNameIsType;
bool mInitIsEncoder;
// configure parameter
sp
// Used only to synchronize asynchronous getBufferAndFormat
// across all the other(synchronous)buffer state change
// operations, such as de/queueIn/OutputBuffer, start and
// stop/flush/reset/release.Mutex mBufferLock;
List
Vector
int32_t mDequeueInputTimeoutGeneration;
sp
int32_t mDequeueOutputTimeoutGeneration;
sp
sp
List
sp
bool mHaveInputSurface;
bool mHavePendingInputBuffers;
MediaCodec(const sp
static status_t PostAndAwaitResponse(const sp
void PostReplyWithError(const sp
status_t init(const AString &name, bool nameIsType, bool encoder);
void setState(State newState);
void returnBuffersToCodec();
void returnBuffersToCodecOnPort(int32_t portIndex);
size_t updateBuffers(int32_t portIndex, const sp
status_t onQueueInputBuffer(const sp
status_t onReleaseOutputBuffer(const sp
ssize_t dequeuePortBuffer(int32_t portIndex);
status_t getBufferAndFormat(size_t portIndex, size_t index,sp
bool handleDequeueInputBuffer(const sp
bool handleDequeueOutputBuffer(const sp
void cancelPendingDequeueOperations();
void extractCSD(const sp
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp
status_t connectToSurface(const sp
status_t disconnectFromSurface();
void postActivityNotificationIfPossible();
void onInputBufferAvailable();
void onOutputBufferAvailable();
void onError(status_t err, int32_t actionCode, const char *detail = NULL);
void onOutputFormatChanged();
status_t onSetParameters(const sp
status_t amendOutputFormatWithCodecSpecificData(const sp
void updateBatteryStat();
bool isExecuting()const;
uint64_t getGraphicBufferSize();
void addResource(const String8 &type, const String8 &subtype, uint64_t value);
bool hasPendingBuffer(int portIndex);
bool hasPendingBuffer();
/* called to get the last codec error when the sticky flag is set.* if no such codec error is found, returns UNKNOWN_ERROR.*/
inline status_t getStickyError()const {
return mStickyError!= 0 ? mStickyError : UNKNOWN_ERROR;
}
inline void setStickyError(status_t err){
mFlags |= kFlagStickyError;
mStickyError = err;
}
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);};} // namespace android CreateByType
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin // static sp
sp
const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
if(err!= NULL){
*err = ret;
}
return ret == OK ? codec : NULL;// NULL deallocates codec.} 接著到init過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder){
mResourceManagerService->init();
// 保存 初始參數,到時用于reset
mInitName = name;
mInitNameIsType = nameIsType;
mInitIsEncoder = encoder;
// 目前視頻解碼器不能馬上從OMX_FillThisBuffer返回,違反OpenMAX規格,直到提醒我們需要入駐另一個第三方的looper釋放在事件隊列中。
if(nameIsType ||!strncasecmp(name.c_str(), “omx.”, 4)){//omx.匹配
mCodec = new ACodec;//實例化ACodec
} else if(!nameIsType
&&!strncasecmp(name.c_str(), “android.filter.”, 15)){
mCodec = new MediaFilter;// 實例化MediaFilter
} else {
return NAME_NOT_FOUND;
}
bool secureCodec = false;
if(nameIsType &&!strncasecmp(name.c_str(), “video/”, 6)){
mIsVideo = true;
} else {
AString tmp = name;
if(tmp.endsWith(“.secure”)){
secureCodec = true;
tmp.erase(tmp.size()-7, 7);
}
const sp
if(mcl == NULL){
mCodec = NULL;// remove the codec.return NO_INIT;// if called from Java should raise IOException
}
ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());
if(codecIdx >= 0){
const sp
Vector
info->getSupportedMimes(&mimes);
for(size_t i = 0;i < mimes.size();i++){
if(mimes[i].startsWith(“video/”)){
mIsVideo = true;
break;
}
}
}
}
if(mIsVideo){
// video codec needs dedicated looper
if(mCodecLooper == NULL){
mCodecLooper = new ALooper;
mCodecLooper->setName(“CodecLooper”);//設置名字為CodecLooper
mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
mCodecLooper->registerHandler(mCodec);
} else {
mLooper->registerHandler(mCodec);
}
mLooper->registerHandler(this);
mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, this));
sp
msg->setString(“name”, name);
msg->setInt32(“nameIsType”, nameIsType);
if(nameIsType){
msg->setInt32(“encoder”, encoder);
}
status_t err;
Vector
const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(!isResourceError(err)){
break;
}
}
return err;} configure過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::configure(const sp
sp
if(mIsVideo){
format->findInt32(“width”, &mVideoWidth);
format->findInt32(“height”, &mVideoHeight);
if(!format->findInt32(“rotation-degrees”, &mRotationDegrees)){
mRotationDegrees = 0;
}
}
msg->setMessage(“format”, format);
msg->setInt32(“flags”, flags);
msg->setObject(“surface”, surface);
if(crypto!= NULL){
msg->setPointer(“crypto”, crypto.get());
}
// save msg for reset
mConfigureMsg = msg;
status_t err;
Vector
const char *type =(mFlags & kFlagIsSecure)?
kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(err!= OK && err!= INVALID_OPERATION){
// MediaCodec now set state to UNINITIALIZED upon any fatal error.// To maintain backward-compatibility, do a reset()to put codec
// back into INITIALIZED state.// But don't reset if the err is INVALID_OPERATION, which means
// the configure failure is due to wrong state.ALOGE(“configure failed with err 0x%08x, resetting...”, err);
reset();
}
if(!isResourceError(err)){
break;
}
}
return err;} start過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::start(){
sp
status_t err;
Vector
const char *type =(mFlags & kFlagIsSecure)?
kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
// Don't know the buwww.tmdps.cnffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
// Recover codec from previous error before retry start.err = reset();
if(err!= OK){
ALOGE(“retrying start: failed to reset codec”);
break;
}
sp
err = PostAndAwaitResponse(mConfigureMsg, &response);
if(err!= OK){
ALOGE(“retrying start: failed to configure codec”);
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(!isResourceError(err)){
break;
}
}
return err;} stop過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::stop(){
sp
sp
return PostAndAwaitResponse(msg, &response);} 找到對應的AMessage.cpp,對應同樣有一套AHandler.cpp,及ALooper.cpp,這此組成了在c++中一套機制,接口 方法的名字和Java層保持一致。
所有message都在onMessageReceived方法中處理,MediaCodec的各個狀態的相關切換。
void MediaCodec::onMessageReceived(const sp
switch(mState){
case INITIALIZING://初始化中
{
setState(UNINITIALIZED);
break;
}
case CONFIGURING://配置中
{
setState(actionCode == ACTION_CODE_FATAL ?
UNINITIALIZED : INITIALIZED);
break;
}
case STARTING://start中
{
setState(actionCode == ACTION_CODE_FATAL ?
UNINITIALIZED : CONFIGURED);
break;
}
case STOPPING://停止中
case RELEASING://釋放中
{
// Ignore the error, assuming we'll still get
// the shnc630.comutdown complete notification.sendErrorResponse = false;
if(mFlags & kFlagSawMediaServerDie){
// MediaServer died, there definitely won't
// be a shutdown complete notification after
// all.// note that we're directly going from
// STOPPING->UNINITIALIZED, instead of the
// usual STOPPING->INITIALIZED state.setState(UNINITIALIZED);
if(mState == RELEASING){
mComponentName.clear();
}
STARTED);
(new AMessage)->postReply(mReplyID);
}
break;}
case FLUSHING://刷新中 {
if(actionCode == ACTION_CODE_FATAL){
setState(UNINITIALIZED);
} else {
setState((mFlags & kFlagIsAsync)? FLUSHED :
}
break;}
case FLUSHED: case STARTED: {
sendErrorResponse = false;
setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
if(mFlags & kFlagIsAsync){
onError(err, actionCode);
}
switch(actionCode){
case ACTION_CODE_TRANSIENT:
break;
case ACTION_CODE_RECOVERABLE:
setState(INITIALIZED);
break;
default:
setState(UNINITIALIZED);
break;
}
break;
}
default:
{
sendErrorResponse = false;
setStickyError(err);
postActivityNotificationIfPossible();
// actionCode in an uninitialized state is always fatal.if(mState == UNINITIALIZED){
}
actionCode = ACTION_CODE_FATAL;
}
if(mFlags & kFlagIsAsync){
onError(err, actionCode);
}
switch(actionCode){
case ACTION_CODE_TRANSIENT:
break;
case ACTION_CODE_RECOVERABLE:
setState(INITIALIZED);
break;
default:
setState(UNINITIALIZED);
break;
}
break;}
第五篇:Android WebView總結
Android WebView總結
1、添加權限:AndroidManifest.xml中必須使用許可“Android.permission.INTERNET”,否則會出web page not available錯誤。
2、在要Activity中生成一個WebView組件:WebView webView = new WebView(this);
3、設置WebView基本信息:
如果訪問的頁面中有Javascript,則webview必須設置支持Javascript。
webview.getSettings().setJavaScriptEnabled(true);
觸摸焦點起作用
requestFocus();
取消滾動條
this.setScrollBarStyle(SCROLLBARS_OUTSIDE_OVERLAY);
4、設置WevView要顯示的網頁:
互聯網用:webView.loadUrl("");本地文件存放在:assets文件中
5、如果希望點擊鏈接由自己處理,而不是新開Android的系統browser中響應該鏈接。給WebView添加一個事件監聽對象(WebViewClient)
并重寫其中的一些方法
shouldOverrideUrlLoading:對網頁中超鏈接按鈕的響應。
當按下某個連接時WebViewClient會調用這個方法,并傳遞參數:按下的url
onLoadResource
onPageStart
onPageFinish
onReceiveError
onReceivedHttpAuthRequest6、如果用webview點鏈接看了很多頁以后,如果不做任何處理,點擊系統“Back”鍵,整個瀏覽器會調用finish()而結束自身,如果希望瀏覽的網頁回退而不是退出瀏覽器,需要在當前Activity中處理并消費掉該Back事件。
覆蓋Activity類的onKeyDown(int keyCoder,KeyEvent event)方法。
public boolean onKeyDown(int keyCoder,KeyEvent event){
if(webView.canGoBack()&& keyCoder == KeyEvent.KEYCODE_BACK){
webview.goBack();//goBack()表示返回webView的上一頁面
return true;
}
return false;
}