MediaCodec的使用(音頻編碼一)
MediaCodec的使用(音頻編碼一)
準備工作
我們準備一個pcm文件即可,采樣率16000,單聲道
如果可以使用ffplay如下命令正常播放,那么音頻格式是沒有問題的。
ffplay -f s16le -ar 16000 -ch_layout mono -i hello.pcm
將文件推送到download目錄
adb -s adb-4128230924000237-Nl4jPZ._adb-tls-connect._tcp push hello.pcm /sdcard/Download/
Android10+是無法直接操作Download目錄的文件的,即使你獲取到讀寫權限。
讀寫權限的前世今生
android.Manifest.permission.READ_EXTERNAL_STORAGE,
android.Manifest.permission.WRITE_EXTERNAL_STORAGE,
這兩個已經完全過時了在Android13+
READ_EXTERNAL_STORAGE is deprecated (and is not granted) when targeting Android 13+. If you need to query or interact with MediaStore or media files on the shared storage, you should instead use one or more new storage permissions: READ_MEDIA_IMAGES, READ_MEDIA_VIDEO or READ_MEDIA_AUDIO. Toggle info (Ctrl+F1)
Android10+
shared storage 使用MediaStore
但是上面兩個還應用于視頻、音頻、照片
Android13++
上面兩個權限完全過時了,視頻、音頻、照片使用單獨的權限
當然國內的魔改手機在Android13+仍有可能使用上面的兩個權限
READ_MEDIA_IMAGES, READ_MEDIA_VIDEO or READ_MEDIA_AUDIO
我現在這臺手機是android11,訪問Download目錄,所有用不上上面的兩個權限。
但是如果是android10以下,還是需要的。
所以將hello.pcm只能通過SAF訪問或者MANAGE_EXTERNAL_STORAGE.
SAF
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
context.contentResolver.openInputStream(this)?.use { inputStream ->
val bytes = ByteArray(1024)
inputStream.read(bytes)
Log.i(TAG, "MediaCodecScreen -> data: ${bytes.joinToString()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "發送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
建議在android10+訪問shared storage盡量使用MediaStore,如果不使用的話還是很危險的,如果生成一個文件到Download,用戶重新卸載安裝那你就不能訪問這個文件了,會崩潰的,每個app都相當于一個匿名用戶,重新安裝那你的匿名身份就變更了,當然文件就不能訪問了。當然具體原理的還需要了解SeLinux.
當然在android10~android12如果訪問視頻、音頻、照片還是要讀寫權限的。
還要注意MediaStore相當于是一個數據庫,他加快了檢索文件的速度。當你使用MeidaStore的時候,不要僅僅是插入一條記錄,還有把文件的二進制數據寫入到MediaStore.
還有盡量不要使用uri轉file,file轉uri.不符合現代android的開發。
當然在app的私有目錄無論是外部存儲還是內部存儲都可以使用java 的 File因為他和shared storage沒有任何關系。
PCM 編碼 MP3
編碼函數如下:
internal suspend fun pcmToMp3(
context: Context,
pcmUri: Uri,
mp3Uri: Uri,
): Unit = suspendCancellableCoroutine { continuation ->
val mediaCodecList = MediaCodecList(MediaCodecList.ALL_CODECS)
val mp3Encoders: List<MediaCodecInfo> = mediaCodecList.codecInfos.filter { it.isEncoder }.filter {
it.supportedTypes.contains(element = MediaFormat.MIMETYPE_AUDIO_MPEG)
}
if (mp3Encoders.isEmpty()){
if (continuation.isActive){
continuation.resume(Unit) // 不支持的話可以使用lame軟編碼
}
return@suspendCancellableCoroutine
}
mp3Encoders.forEach { mediaCodecInfo ->
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "pcmToAac -> name: ${mediaCodecInfo.name}, canonicalName: ${mediaCodecInfo.canonicalName}, isAlias: ${mediaCodecInfo.isAlias}, isVendor: ${mediaCodecInfo.isVendor}, isHardwareAccelerated: ${mediaCodecInfo.isHardwareAccelerated}, isEncoder: ${mediaCodecInfo.isEncoder}, isSoftwareOnly: ${mediaCodecInfo.isSoftwareOnly}, supportedTypes: ${mediaCodecInfo.supportedTypes.joinToString()}")
} else {
Log.i(TAG, "pcmToAac -> name: ${mediaCodecInfo.name}, isEncoder: ${mediaCodecInfo.isEncoder}, supportedTypes: ${mediaCodecInfo.supportedTypes.joinToString()}")
}
}
val mediaFormat: MediaFormat = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_MPEG, 16000, 1).apply {
setInteger(MediaFormat.KEY_BIT_RATE, 128000)
}
val mediaCodec: MediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_MPEG)
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val pcmInputStream: InputStream = context.contentResolver.openInputStream(pcmUri)!!
val mp3OutputStream: OutputStream = context.contentResolver.openOutputStream(mp3Uri)!!
val bytes = ByteArray(1024 * 8)
mediaCodec.setCallback(object : MediaCodec.Callback(){
override fun onError(
codec: MediaCodec,
e: MediaCodec.CodecException
) {
Log.e(TAG, "onError name: ${codec.name}, thread: ${Thread.currentThread()}, error: ${e.message}", e)
}
override fun onInputBufferAvailable(
codec: MediaCodec,
index: Int
) {
val inputBuffer: ByteBuffer = codec.getInputBuffer(index) ?: return
val size: Int = pcmInputStream.read(bytes, 0, inputBuffer.limit()) // 或許需要去 min(bytes.size, inputBuffer.limit())
Log.i(TAG, "onInputBufferAvailable -> name: ${mediaCodec.name}, index: $index, thread: ${Thread.currentThread()}, size: $size, limit: ${inputBuffer.limit()}, position: ${inputBuffer.position()}")
if (size > 0) {
inputBuffer.put(bytes, 0, size)
codec.queueInputBuffer(index, 0, size, System.nanoTime() / 1000, 0)
} else {
codec.queueInputBuffer(index, 0, 0, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_END_OF_STREAM)
}
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
Log.i(TAG, "onOutputBufferAvailable -> name: ${codec.name}, index: $index, info: ${info.size}, thread: ${Thread.currentThread()}")
val outputBuffer: ByteBuffer = codec.getOutputBuffer(index) ?: return
outputBuffer.get(bytes, 0, info.size)
mp3OutputStream.write(bytes, 0, info.size)
codec.releaseOutputBuffer(index, false)
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM){
Log.i(TAG, "onOutputBufferAvailable -> == 編碼結束...") // todo
pcmInputStream.close()
mp3OutputStream.close()
if (continuation.isActive){
Log.i(TAG, "pcmToMp3 -> 解碼完成 resume before...")
continuation.resume(Unit)
Log.i(TAG, "pcmToMp3 -> 解碼完成 resume after...")
}
}
}
override fun onOutputFormatChanged(
codec: MediaCodec,
format: MediaFormat
) {
Log.i(TAG, "onOutputFormatChanged -> name: ${codec.name}, format: ${format.getString(MediaFormat.KEY_MIME)}")
}
})
Log.i(TAG, "pcmToAac -> before start...")
mediaCodec.start()
Log.i(TAG, "pcmToAac -> after start...")
}
調用如下
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
val audioExtractManager = AudioExtractManager()
val contentValues: ContentValues = contentValuesOf(
MediaStore.Audio.Media.DISPLAY_NAME to "hello.mp3",
MediaStore.Audio.Media.MIME_TYPE to MediaFormat.MIMETYPE_AUDIO_MPEG,
MediaStore.Audio.Media.RELATIVE_PATH to Environment.DIRECTORY_MUSIC
)
val mp3Uri: Uri = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
context.contentResolver.insert(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, contentValues)
} else {
FileProvider.getUriForFile(context, "${context.packageName}.provider", File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC),
"hello.mp3"
))
} ?: return@apply
Log.i(TAG, "MediaCodecScreen -> mp3Uri: $mp3Uri")
coroutineScope.launch {
Log.i(TAG, "MediaCodecScreen -> before ${Thread.currentThread()}")
audioExtractManager.pcmToMp3(context, this@apply, mp3Uri)
Log.i(TAG, "MediaCodecScreen -> after ${Thread.currentThread()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "開發者",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
val intent = Intent(Settings.ACTION_APPLICATION_DEVELOPMENT_SETTINGS)
context.startActivity(intent)
},
color = Color.White
)
Text(
text = "發送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
PCM 編碼 adts aac
aac 有兩種格式 分別是 adif、adts
說到adts,它是aac裸流,是由frame幀組成,在某些設備上是無法正確播放的比如ios.
參考: adts aac
7字節的頭
7字節的頭其一共7 * 8=56bit位。分為 各占28bit
adts_fixed_header();
adts_variable_header();
我們一位一位的看.
adts_fixed_header()
syncword: 前12位是固定的都是1, 代表adts header開始的標識
ID:1bit, MPEG Version: 0 for MPEG-4,1 for MPEG-2
Layer:2bits, always: '00'
protection_absent:1bit, Warning, set to 1 if there is no CRC and 0 if there is CRC
一般是0表示: no CRC
profile:2bits, 表示使用哪個級別的AAC,如01 Low Complexity(LC) -- AAC LC
profile的值等于 Audio Object Type的值減1.
profile = MPEG-4 Audio Object Type - 1
sampling_frequency_index: 4bits, 采樣率下標,可以參考上面的鏈接查看
Private bit, 1bit, 設置為0即可
channel_configuration: 占3bits, 聲道數下標,可以參考上面的鏈接查看
原創性,1bit: 設為1表示音頻為原創,設為0則表示非原創。
家用性,1bit:設為1表示音頻為家用,設為0則表示非家用。
總共28bit
adts_variable_header
版權標識位(Copyright ID bit):1bit, 中央注冊版權標識符的下一比特位。采用LSB(最低有效位優先)順序滑動遍歷比特串時,將當前比特值填入此字段,到達末尾時循環回到起始位置(環形緩沖區結構)。
版權標識起始位(Copyright ID start):1bit, 1表示當前幀的版權標識位是起始位,設為0則表示非起始位。
aac_frame_length:13bits, 一個ADTS幀的長度包括ADTS頭和AAC原始流。
frame length, this value must include 7 or 9 bytes of header length:
aac_frame_length = (protection_absent == 1 ? 7 : 9) + size(AACFrame)
protection_absent=0時, header length=9bytes
protection_absent=1時, header length=7bytes
adts_buffer_fullness:11bit,
計算公式
max_bit_reservoir = minimum_decoder_input_size - mean_bits_per_RDB; // for CBR
// bit reservoir state/available bits (≥0 and <max_bit_reservoir); for the i-th frame.
bit_reservoir_state[i] = (int)(bit_reservoir_state[i - 1] + mean_framelength - framelength[i]);
// NCC is the number of channels.
adts_buffer_fullness = bit_reservoir_state[i] / (NCC * 32);
0x7FF 說明是碼率可變的碼流。
number_of_raw_data_blocks_in_frame:2bits, 表示ADTS幀中有number_of_raw_data_blocks_in_frame + 1個AAC原始幀。
所以說number_of_raw_data_blocks_in_frame == 0 表示說ADTS幀中有一個AAC數據塊。
(一個AAC原始幀包含一段時間內1024個采樣及相關數據)
ADTS幀中的AAC幀數量(即RDB,原始數據塊數)減1。為獲得最佳兼容性,建議每個ADTS幀始終只包含一個AAC幀。
也是28bit
當CRC時還有
16bit: CRC check (as of ISO/IEC 11172-3, subclause 2.4.3.1), if Protection absent is 0.
下面是一個示例對于裸aac添加adts幀頭
private fun addAdtsHeader(packet: ByteArray, packetLen: Int, sampleRate: Int, channels: Int) {
val profile = 2 // AAC LC
val freqIdx = when(sampleRate){
96000 -> 0
88200 -> 1
64000 -> 2
48000 -> 3
44100 -> 4
32000 -> 5
24000 -> 6
22050 -> 7
16000 -> 8
12000 -> 9
11025 -> 10
8000 -> 11
else -> 4 // 默認44100
}
/**
0000 0x00
0001 0x01
0010 0x02
0011 0x03
0100 0x04
0101 0x05
0110 0x06
0111 0x07
1000 0x08
1001 0x09
1010 0x0A
1011 0x0B
1100 0x0C
1101 0x0D
1110 0x0E
1111 0x0F
*/
val chanCfg = channels // CPE = 1, mono = 1
packet[0] = 0xFF.toByte() // 1111 1111 1字節
packet[1] = 0xF9.toByte() // 1111 1001 2字節 id 1 for MPEG-2, layer = 00 protection_absent = 1
// [01]00 0000 profile 2bits
// [0110 00]00 freqIdx 4bits
// [0100 000]0 Private bit
// channel 3 bits 001 右移兩位 0
// [0100 0000] channel的1位0加上來 // 3字節
packet[2] = ((profile - 1 shl 6) + (freqIdx shl 2) + (chanCfg shr 2)).toByte()
// 0000 0000
// channel 0000 0001 左移6位 01
// [01]00 0000 channel 處理完成 // 26
// Originality、Home兩bit Copyright ID bit, Copyright ID start 2bit 合并到 packetLen,所有其一共17bits
// 0x7FFF
// 0000 0000 0000 000[0 00011|111 1111 1111] packetLen 右邊移 11 ->
// 0000 0000 0000 0000 0000 00[00 0011] packetLen 右邊移 11 ->
// [0100 0011]
packet[3] = (((chanCfg and 3) shl 6) + (packetLen shr 11)).toByte() // 4字節
packet[4] = (packetLen and 0x7FF shr 3).toByte() // [111 1111 1]111
// [111]0 0000 后面即 0xFFC
packet[5] = ((packetLen and 7 shl 5) + 0x1F).toByte() // 0001 1111
// 1111 1100
packet[6] = 0xFC.toByte() // 7字節
}
編碼函數如下:
internal suspend fun pcmToAac(context: Context, pcmUri: Uri, aacUri: Uri): Unit = suspendCancellableCoroutine { continuation ->
// 編碼器
// aac 3gpp audio/amr-wb audio/flac
// h264
// 解碼器 解碼器那可就太多了
val mediaCodecList = MediaCodecList(MediaCodecList.ALL_CODECS)
val aacEncoders: List<MediaCodecInfo> = mediaCodecList.codecInfos.filter { it.isEncoder }.filter {
it.supportedTypes.contains(element = MediaFormat.MIMETYPE_AUDIO_AAC)
}
Log.i(TAG, "pcmToAac -> aacEncoders: ${aacEncoders.joinToString { it.name }}")
if (aacEncoders.isEmpty()){
// 不支持硬件解碼
Log.i(TAG, "pcmToAac -> 不支持硬件編碼...") // 使用軟解碼
if (continuation.isActive){
continuation.resume(Unit)
}
return@suspendCancellableCoroutine
}
// 可惜 aac 僅支持軟解碼
Log.i(TAG, "pcmToAac -> 支持硬件編碼")
val mediaFormat: MediaFormat = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, 16000, 1).apply {
setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
setInteger(MediaFormat.KEY_BIT_RATE, 128000)
}
val mediaCodec: MediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val pcmInputStream: InputStream = context.contentResolver.openInputStream(pcmUri)!!
val aacOutputStream: OutputStream = context.contentResolver.openOutputStream(aacUri)!!
val bytes = ByteArray(1024 * 8)
mediaCodec.setCallback(object : MediaCodec.Callback() {
override fun onError(
codec: MediaCodec,
e: MediaCodec.CodecException
) {
Log.e(
TAG,
"onError name: ${codec.name}, thread: ${Thread.currentThread()}, error: ${e.message}",
e
)
}
override fun onInputBufferAvailable(
codec: MediaCodec,
index: Int
) {
Log.i(
TAG,
"onInputBufferAvailable -> name: ${mediaCodec.name}, index: $index, thread: ${Thread.currentThread()}"
)
val inputBuffer: ByteBuffer = codec.getInputBuffer(index) ?: return
val size: Int = pcmInputStream.read(bytes, 0, inputBuffer.limit())
if (size > 0) {
inputBuffer.put(bytes, 0, size)
codec.queueInputBuffer(index, 0, size, System.nanoTime() / 1000, 0)
} else {
codec.queueInputBuffer(
index,
0,
0,
System.nanoTime() / 1000,
MediaCodec.BUFFER_FLAG_END_OF_STREAM
)
}
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
Log.i(
TAG,
"onOutputBufferAvailable -> name: ${codec.name}, index: $index, info: ${info.size}, thread: ${Thread.currentThread()}"
)
val outputBuffer: ByteBuffer = codec.getOutputBuffer(index) ?: return
val aacData = ByteArray(info.size + 7)
addAdtsHeader(aacData, aacData.size, 16000, 1)
outputBuffer.get(aacData, 7, info.size)
aacOutputStream.write(aacData)
codec.releaseOutputBuffer(index, false)
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.i(TAG, "onOutputBufferAvailable -> == 編碼結束...") // todo
pcmInputStream.close()
aacOutputStream.close()
if (continuation.isActive) {
Log.i(TAG, "pcmToAac -> 解碼完成 resume before...")
continuation.resume(Unit)
Log.i(TAG, "pcmToAac -> 解碼完成 resume after...")
}
}
}
override fun onOutputFormatChanged(
codec: MediaCodec,
format: MediaFormat
) {
Log.i(
TAG,
"onOutputFormatChanged -> name: ${codec.name}, format: ${
format.getString(MediaFormat.KEY_MIME)
}"
)
}
})
Log.i(TAG, "pcmToAac -> before start...")
mediaCodec.start()
Log.i(TAG, "pcmToAac -> after start...")
}
@Suppress("SameParameterValue")
private fun addAdtsHeader(packet: ByteArray, packetLen: Int, sampleRate: Int, channels: Int) {
val profile = 2 // AAC LC
val freqIdx = when(sampleRate){
96000 -> 0
88200 -> 1
64000 -> 2
48000 -> 3
44100 -> 4
32000 -> 5
24000 -> 6
22050 -> 7
16000 -> 8
12000 -> 9
11025 -> 10
8000 -> 11
else -> 4 // 默認44100
}
/**
0000 0x00
0001 0x01
0010 0x02
0011 0x03
0100 0x04
0101 0x05
0110 0x06
0111 0x07
1000 0x08
1001 0x09
1010 0x0A
1011 0x0B
1100 0x0C
1101 0x0D
1110 0x0E
1111 0x0F
*/
val chanCfg = channels // CPE = 1, mono = 1
packet[0] = 0xFF.toByte() // 1111 1111 1字節
packet[1] = 0xF9.toByte() // 1111 1001 2字節 id 1 for MPEG-2, layer = 00 protection_absent = 1
// [01]00 0000 profile 2bits
// [0110 00]00 freqIdx 4bits
// [0100 000]0 Private bit
// channel 3 bits 001 右移兩位 0
// [0100 0000] channel的1位0加上來 // 3字節
packet[2] = ((profile - 1 shl 6) + (freqIdx shl 2) + (chanCfg shr 2)).toByte()
// 0000 0000
// channel 0000 0001 左移6位 01
// [01]00 0000 channel 處理完成 // 26
// Originality、Home兩bit Copyright ID bit, Copyright ID start 2bit 合并到 packetLen,所有其一共17bits
// 0x7FFF
// 0000 0000 0000 000[0 00011|111 1111 1111] packetLen 右邊移 11 ->
// 0000 0000 0000 0000 0000 00[00 0011] packetLen 右邊移 11 ->
// [0100 0011]
packet[3] = (((chanCfg and 3) shl 6) + (packetLen shr 11)).toByte() // 4字節
packet[4] = (packetLen and 0x7FF shr 3).toByte() // [111 1111 1]111
// [111]0 0000 后面即 0xFFC
packet[5] = ((packetLen and 7 shl 5) + 0x1F).toByte() // 0001 1111
// 1111 1100
packet[6] = 0xFC.toByte() // 7字節
}
使用案例
@Composable
internal fun MediaCodecScreen(
navHostController: NavHostController,
snackBarHostState: SnackbarHostState,
) {
val context: Context = LocalContext.current
val coroutineScope: CoroutineScope = rememberCoroutineScope()
val permissionLauncher = rememberLauncherForActivityResult(
contract = ActivityResultContracts.RequestMultiplePermissions()
) { map ->
coroutineScope.launch {
snackBarHostState.showSnackbar("權限獲取是否成功: ${map.values.all { it }}")
}
}
val launcher: ManagedActivityResultLauncher<Array<String>, Uri?> = rememberLauncherForActivityResult(
contract = ActivityResultContracts.OpenDocument()
) { uri: Uri? ->
uri?.apply {
Log.i(TAG, "MediaCodecScreen -> uri: $this")
val audioExtractManager = AudioExtractManager()
val contentValues: ContentValues = contentValuesOf(
MediaStore.Audio.Media.DISPLAY_NAME to "hello.aac",
MediaStore.Audio.Media.MIME_TYPE to MediaFormat.MIMETYPE_AUDIO_AAC,
MediaStore.Audio.Media.RELATIVE_PATH to Environment.DIRECTORY_MUSIC
)
val mp3Uri: Uri = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.contentResolver.insert(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, contentValues)
} else {
FileProvider.getUriForFile(context, "${context.packageName}.provider", File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC),
"hello.aac"
))
} ?: return@apply
Log.i(TAG, "MediaCodecScreen -> mp3Uri: $mp3Uri")
coroutineScope.launch {
Log.i(TAG, "MediaCodecScreen -> before ${Thread.currentThread()}")
audioExtractManager.pcmToAac(context, this@apply, mp3Uri)
Log.i(TAG, "MediaCodecScreen -> after ${Thread.currentThread()}")
}
}
}
Column(
modifier = Modifier.fillMaxSize()
) {
Text(
text = "開發者",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
val intent = Intent(Settings.ACTION_APPLICATION_DEVELOPMENT_SETTINGS)
context.startActivity(intent)
},
color = Color.White
)
Text(
text = "發送信息",
Modifier
.padding(top = 10.dp)
.background(color = Color.Black, shape = RoundedCornerShape10)
.padding(all = 5.dp)
.clickable {
launcher.launch(arrayOf("*/*"))
},
color = Color.White
)
}
}
探索為什么Android MediaPlayer可以獲取adts格式aac裸流的音頻時長?
用法:
val aacUri: Uri = Uri.Builder().scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.path(R.raw.audio1.toString()).build()
val mediaPlayer: MediaPlayer = MediaPlayer.create(context, aacUri)
Log.i(TAG, "MusicScreen -> duration: ${mediaPlayer.duration}")
mediaPlayer.release()
getDuration()是一個native函數,如下所示:
frameworks/base/media/jni/android_media_MediaPlayer.cpp
static jint
android_media_MediaPlayer_getDuration(JNIEnv *env, jobject thiz)
{
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
if (mp == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return 0;
}
int msec;
process_media_player_call( env, thiz, mp->getDuration(&msec), NULL, NULL );
ALOGV("getDuration: %d (msec)", msec);
return (jint) msec;
}
可以看待調用的是MediaPlayer的getDuration方法。
frameworks/av/media/libmedia/mediaplayer.cpp
status_t MediaPlayer::getDuration_l(int *msec)
{
ALOGV("getDuration_l");
bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED |
MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE));
if (mPlayer != 0 && isValidState) {
int durationMs;
status_t ret = mPlayer->getDuration(&durationMs);
if (ret != OK) {
// Do not enter error state just because no duration was available.
durationMs = -1;
ret = OK;
}
if (msec) {
*msec = durationMs;
}
return ret;
}
ALOGE("Attempt to call getDuration in wrong state: mPlayer=%p, mCurrentState=%u",
mPlayer.get(), mCurrentState);
return INVALID_OPERATION;
}
status_t MediaPlayer::getDuration(int *msec)
{
Mutex::Autolock _l(mLock);
return getDuration_l(msec);
}
status_t MediaPlayer::attachNewPlayer(const sp<IMediaPlayer>& player)
{
status_t err = UNKNOWN_ERROR;
sp<IMediaPlayer> p;
{ // scope for the lock
Mutex::Autolock _l(mLock);
if ( !( (mCurrentState & MEDIA_PLAYER_IDLE) ||
(mCurrentState == MEDIA_PLAYER_STATE_ERROR ) ) ) {
ALOGE("attachNewPlayer called in state %d", mCurrentState);
return INVALID_OPERATION;
}
clear_l();
p = mPlayer;
mPlayer = player;
if (player != 0) {
mCurrentState = MEDIA_PLAYER_INITIALIZED;
err = NO_ERROR;
} else {
ALOGE("Unable to create media player");
}
}
if (p != 0) {
p->disconnect();
}
return err;
}
status_t MediaPlayer::setDataSource(
const sp<IMediaHTTPService> &httpService,
const char *url, const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
status_t err = BAD_VALUE;
if (url != NULL) {
const sp<IMediaPlayerService> service(getMediaPlayerService()); // 1
if (service != 0) {
sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource)); // 2
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(httpService, url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
}
}
return err;
}
其調用的是IMediaPlayer的getDuration方法。且在attachNewPlayer方法被初始化.
1處代碼:
frameworks/av/media/libmedia/IMediaDeathNotifier.cpp
// establish binder interface to MediaPlayerService
/*static*/const sp<IMediaPlayerService>
IMediaDeathNotifier::getMediaPlayerService()
{
ALOGV("getMediaPlayerService");
Mutex::Autolock _l(sServiceLock);
if (sMediaPlayerService == 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->waitForService(String16("media.player"));
if (binder == nullptr) {
return nullptr;
}
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
binder->linkToDeath(sDeathNotifier);
sMediaPlayerService = interface_cast<IMediaPlayerService>(binder);
}
ALOGE_IF(sMediaPlayerService == 0, "no media player service!?");
return sMediaPlayerService;
}
可以看到獲取了media.player服務,且在代碼2處調用了create方法創建了播放器。服務注冊就不看了,感興趣的話可以自行探索。
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId, const AttributionSourceState& attributionSource)
{
int32_t connId = android_atomic_inc(&mNextConnId);
// TODO b/182392769: use attribution source util
AttributionSourceState verifiedAttributionSource = attributionSource;
verifiedAttributionSource.pid = VALUE_OR_FATAL(
legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
verifiedAttributionSource.uid = VALUE_OR_FATAL(
legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
sp<Client> c = new Client(
this, verifiedAttributionSource, connId, client, audioSessionId);
ALOGV("Create new client(%d) from %s, ", connId,
verifiedAttributionSource.toString().c_str());
wp<Client> w = c;
{
Mutex::Autolock lock(mLock);
mClients.add(w);
}
return c;
}
Client是MediaPlayerService的內部類,其繼承了BnMediaPlayer
frameworks/av/media/libmediaplayerservice/MediaPlayerService.h
class Client : public BnMediaPlayer
綜上所述,調用IMediaPlayer的getDuration方法實際上調用的是Client是MediaPlayerService的內部類的getDuration方法。
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
status_t MediaPlayerService::Client::getDuration(int *msec)
{
ALOGV("getDuration");
sp<MediaPlayerBase> p = getPlayer(); // 1 獲取mPlayer
if (p == 0) return UNKNOWN_ERROR;
status_t ret = p->getDuration(msec);
if (ret == NO_ERROR) {
ALOGV("[%d] getDuration = %d", mConnId, *msec);
} else {
ALOGE("getDuration returned %d", ret);
}
return ret;
}
status_t MediaPlayerService::Client::setDataSource_post(
const sp<MediaPlayerBase>& p,
status_t status)
{
ALOGV(" setDataSource");
if (status != OK) {
ALOGE(" error: %d", status);
return status;
}
// Set the re-transmission endpoint if one was chosen.
if (mRetransmitEndpointValid) {
status = p->setRetransmitEndpoint(&mRetransmitEndpoint);
if (status != NO_ERROR) {
ALOGE("setRetransmitEndpoint error: %d", status);
}
}
if (status == OK) {
Mutex::Autolock lock(mLock);
mPlayer = p;
}
return status;
}
status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64_t length)
{
ALOGV("setDataSource fd=%d (%s), offset=%lld, length=%lld",
fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);
struct stat sb;
int ret = fstat(fd, &sb);
if (ret != 0) {
ALOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));
return UNKNOWN_ERROR;
}
ALOGV("st_dev = %llu", static_cast<unsigned long long>(sb.st_dev));
ALOGV("st_mode = %u", sb.st_mode);
ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", static_cast<unsigned long long>(sb.st_size));
if (offset >= sb.st_size) {
ALOGE("offset error");
return UNKNOWN_ERROR;
}
if (offset + length > sb.st_size) {
length = sb.st_size - offset;
ALOGV("calculated length = %lld", (long long)length);
}
player_type playerType = MediaPlayerFactory::getPlayerType(this,
fd,
offset,
length);
sp<MediaPlayerBase> p = setDataSource_pre(playerType); // 解析為什么是NulPlayer類型
if (p == NULL) {
return NO_INIT;
}
// now set data source
return mStatus = setDataSource_post(p, p->setDataSource(fd, offset, length));
}
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
player_type playerType)
{
ALOGV("player type = %d", playerType);
// create the right type of player
sp<MediaPlayerBase> p = createPlayer(playerType);
if (p == NULL) {
return p;
}
std::vector<DeathNotifier> deathNotifiers;
// Listen to death of media.extractor service
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.extractor"));
if (binder == NULL) {
ALOGE("extractor service not available");
return NULL;
}
deathNotifiers.emplace_back(
binder, [l = wp<MediaPlayerBase>(p)]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("media.extractor died. Sending death notification.");
listener->sendEvent(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIAEXTRACTOR_PROCESS_DEATH);
} else {
ALOGW("media.extractor died without a death handler.");
}
});
{
using ::android::hidl::base::V1_0::IBase;
// Listen to death of OMX service
{
sp<IBase> base = ::android::hardware::media::omx::V1_0::
IOmx::getService();
if (base == nullptr) {
ALOGD("OMX service is not available");
} else {
deathNotifiers.emplace_back(
base, [l = wp<MediaPlayerBase>(p)]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("OMX service died. "
"Sending death notification.");
listener->sendEvent(
MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIACODEC_PROCESS_DEATH);
} else {
ALOGW("OMX service died without a death handler.");
}
});
}
}
// Listen to death of Codec2 services
{
for (std::shared_ptr<Codec2Client> const& client :
Codec2Client::CreateFromAllServices()) {
sp<IBase> hidlBase = client->getHidlBase();
::ndk::SpAIBinder aidlBase = client->getAidlBase();
auto onBinderDied = [l = wp<MediaPlayerBase>(p),
name = std::string(client->getServiceName())]() {
sp<MediaPlayerBase> listener = l.promote();
if (listener) {
ALOGI("Codec2 service \"%s\" died. "
"Sending death notification.",
name.c_str());
listener->sendEvent(
MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
MEDIACODEC_PROCESS_DEATH);
} else {
ALOGW("Codec2 service \"%s\" died "
"without a death handler.",
name.c_str());
}
};
if (hidlBase) {
deathNotifiers.emplace_back(hidlBase, onBinderDied);
} else if (aidlBase.get() != nullptr) {
deathNotifiers.emplace_back(aidlBase, onBinderDied);
}
}
}
}
Mutex::Autolock lock(mLock);
mDeathNotifiers.clear();
mDeathNotifiers.swap(deathNotifiers);
mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId, mAttributionSource,
mAudioAttributes, mAudioDeviceUpdatedListener);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
return p;
}
sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerType)
{
// determine if we have the right player type
sp<MediaPlayerBase> p = getPlayer();
if ((p != NULL) && (p->playerType() != playerType)) {
ALOGV("delete player");
p.clear();
}
if (p == NULL) {
p = MediaPlayerFactory::createPlayer(playerType, mListener,
VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid)));
}
if (p != NULL) {
p->setUID(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid)));
}
return p;
}
1處代碼返回的是MediaPlayerService的內部類Client的成員變量mPlayer.
sp<MediaPlayerBase> mPlayer;
接下來看mPlayer是如何被初始化的。
Client的成員函數被MediaPlayer通過ipc調用,然后調用setDataSource、setDataSource_pre、setDataSource_post
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(
player_type playerType,
const sp<MediaPlayerBase::Listener> &listener,
pid_t pid) {
sp<MediaPlayerBase> p;
IFactory* factory;
status_t init_result;
Mutex::Autolock lock_(&sLock);
if (sFactoryMap.indexOfKey(playerType) < 0) {
ALOGE("Failed to create player object of type %d, no registered"
" factory", playerType);
return p;
}
factory = sFactoryMap.valueFor(playerType);
CHECK(NULL != factory);
p = factory->createPlayer(pid);
if (p == NULL) {
ALOGE("Failed to create player object of type %d, create failed",
playerType);
return p;
}
init_result = p->initCheck();
if (init_result == NO_ERROR) {
p->setNotifyCallback(listener);
} else {
ALOGE("Failed to create player object of type %d, initCheck failed"
" (res = %d)", playerType, init_result);
p.clear();
}
return p;
}
通過playerType拿到創建播放器對應的工廠類然后創建播放器player.
解析為什么是NulPlayer類型
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
player_type MediaPlayerFactory::getPlayerType(const sp<IMediaPlayer>& client,
int fd,
int64_t offset,
int64_t length) {
GET_PLAYER_TYPE_IMPL(client, fd, offset, length);
}
#define GET_PLAYER_TYPE_IMPL(a...) \
Mutex::Autolock lock_(&sLock); \
\
player_type ret = STAGEFRIGHT_PLAYER; \
float bestScore = 0.0; \
\
for (size_t i = 0; i < sFactoryMap.size(); ++i) { \
\
IFactory* v = sFactoryMap.valueAt(i); \
float thisScore; \
CHECK(v != NULL); \
thisScore = v->scoreFactory(a, bestScore); \
if (thisScore > bestScore) { \
ret = sFactoryMap.keyAt(i); \
bestScore = thisScore; \
} \
} \
\
if (0.0 == bestScore) { \
ret = getDefaultPlayerType(); \
} \
\
return ret;
宏展開之后為
thisScore = v->scoreFactory(client, fd, offset, length, bestScore); // 注意:a 被替換了
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.h
/*
**
** Copyright 2012, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#ifndef ANDROID_MEDIAPLAYERFACTORY_H
#define ANDROID_MEDIAPLAYERFACTORY_H
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/foundation/ABase.h>
namespace android {
class MediaPlayerFactory {
public:
class IFactory {
public:
virtual ~IFactory() { }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* /*url*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
int /*fd*/,
int64_t /*offset*/,
int64_t /*length*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<IStreamSource> &/*source*/,
float /*curScore*/) { return 0.0; }
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<DataSource> &/*source*/,
float /*curScore*/) { return 0.0; }
virtual sp<MediaPlayerBase> createPlayer(pid_t pid) = 0;
};
static status_t registerFactory(IFactory* factory,
player_type type);
static void unregisterFactory(player_type type);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const char* url);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
int fd,
int64_t offset,
int64_t length);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const sp<IStreamSource> &source);
static player_type getPlayerType(const sp<IMediaPlayer>& client,
const sp<DataSource> &source);
static sp<MediaPlayerBase> createPlayer(player_type playerType,
const sp<MediaPlayerBase::Listener> &listener,
pid_t pid);
static void registerBuiltinFactories();
private:
typedef KeyedVector<player_type, IFactory*> tFactoryMap;
MediaPlayerFactory() { }
static status_t registerFactory_l(IFactory* factory,
player_type type);
static Mutex sLock;
static tFactoryMap sFactoryMap;
static bool sInitComplete;
DISALLOW_EVIL_CONSTRUCTORS(MediaPlayerFactory);
};
} // namespace android
#endif // ANDROID_MEDIAPLAYERFACTORY_H
可以看到默認返回0.0.
frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
static player_type getDefaultPlayerType() {
return NU_PLAYER;
}
class NuPlayerFactory : public MediaPlayerFactory::IFactory {
public:
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* url,
float curScore) {
static const float kOurScore = 0.8;
if (kOurScore <= curScore)
return 0.0;
if (!strncasecmp("http://", url, 7)
|| !strncasecmp("https://", url, 8)
|| !strncasecmp("file://", url, 7)) {
size_t len = strlen(url);
if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
return kOurScore;
}
if (strstr(url,"m3u8")) {
return kOurScore;
}
if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) {
return kOurScore;
}
}
if (!strncasecmp("rtsp://", url, 7)) {
return kOurScore;
}
return 0.0;
}
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<IStreamSource>& /*source*/,
float /*curScore*/) {
return 1.0;
}
virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const sp<DataSource>& /*source*/,
float /*curScore*/) {
// Only NuPlayer supports setting a DataSource source directly.
return 1.0;
}
virtual sp<MediaPlayerBase> createPlayer(pid_t pid) {
ALOGV(" create NuPlayer");
return new NuPlayerDriver(pid);
}
};
默認使用NU_PLAYER.
由此可見
frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
status_t MediaPlayerService::Client::getDuration(int *msec)
{
ALOGV("getDuration");
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
status_t ret = p->getDuration(msec);
if (ret == NO_ERROR) {
ALOGV("[%d] getDuration = %d", mConnId, *msec);
} else {
ALOGE("getDuration returned %d", ret);
}
return ret;
}
getPlayer()方法返回的是NuPlayerDriver.p->getDuration(msec)調用的是NuPlayerDriver的getDuration方法
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
NuPlayerDriver::NuPlayerDriver(pid_t pid)
: mState(STATE_IDLE),
mIsAsyncPrepare(false),
mAsyncResult(UNKNOWN_ERROR),
mSetSurfaceInProgress(false),
mDurationUs(-1), // 默認-1
mPositionUs(-1),
mSeekInProgress(false),
mPlayingTimeUs(0),
mRebufferingTimeUs(0),
mRebufferingEvents(0),
mRebufferingAtExit(false),
mLooper(new ALooper),
mMediaClock(new MediaClock),
mPlayer(new NuPlayer(pid, mMediaClock)),
mPlayerFlags(0),
mCachedPlayerIId(PLAYER_PIID_INVALID),
mMetricsItem(NULL),
mClientUid(-1),
mAtEOS(false),
mLooping(false),
mAutoLoop(false) {
ALOGD("NuPlayerDriver(%p) created, clientPid(%d)", this, pid);
mLooper->setName("NuPlayerDriver Looper");
mMediaClock->init();
// set up an analytics record
mMetricsItem = mediametrics::Item::create(kKeyPlayer);
mLooper->start(
false, /* runOnCallingThread */
true, /* canCallJava */
PRIORITY_AUDIO);
mLooper->registerHandler(mPlayer); // ALooper/AHandler機制
mPlayer->init(this);
}
status_t NuPlayerDriver::getDuration(int *msec) {
Mutex::Autolock autoLock(mLock);
if (mDurationUs < 0) {
return UNKNOWN_ERROR;
}
*msec = (mDurationUs + 500LL) / 1000;
return OK;
}
void NuPlayerDriver::notifyDuration(int64_t durationUs) {
Mutex::Autolock autoLock(mLock);
mDurationUs = durationUs;
}
frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
void NuPlayer::schedulePollDuration() {
sp<AMessage> msg = new AMessage(kWhatPollDuration, this);
msg->setInt32("generation", mPollDurationGeneration);
msg->post();
}
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
// ...
case kWhatPollDuration:
{
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
if (generation != mPollDurationGeneration) {
// stale
break;
}
int64_t durationUs;
if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { // 1
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
driver->notifyDuration(durationUs); // 1
}
}
msg->post(1000000LL); // poll again in a second.
break;
}
}
}
void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
sp<GenericSource> source =
new GenericSource(notify, mUIDValid, mUID, mMediaClock);
ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
fd, (long long)offset, (long long)length, source.get());
status_t err = source->setDataSource(fd, offset, length);
if (err != OK) {
ALOGE("Failed to set data source!");
source = NULL;
}
msg->setObject("source", source);
msg->post();
mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
}
可以看到回調了上面NuPlayerDriver的notifyDuration方法。
mSource是GenericSource
frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
Mutex::Autolock _l(mLock);
*durationUs = mDurationUs;
return OK;
}
status_t NuPlayer::GenericSource::initFromDataSource() {
sp<IMediaExtractor> extractor;
sp<DataSource> dataSource;
{
Mutex::Autolock _l_d(mDisconnectLock);
dataSource = mDataSource;
}
CHECK(dataSource != NULL);
mLock.unlock();
// This might take long time if data source is not reliable.
extractor = MediaExtractorFactory::Create(dataSource, NULL);
if (extractor == NULL) {
ALOGE("initFromDataSource, cannot create extractor!");
mLock.lock();
return UNKNOWN_ERROR;
}
sp<MetaData> fileMeta = extractor->getMetaData();
size_t numtracks = extractor->countTracks();
if (numtracks == 0) {
ALOGE("initFromDataSource, source has no track!");
mLock.lock();
return UNKNOWN_ERROR;
}
mLock.lock();
mFileMeta = fileMeta;
if (mFileMeta != NULL) {
int64_t duration;
if (mFileMeta->findInt64(kKeyDuration, &duration)) {
mDurationUs = duration;
}
}
int32_t totalBitrate = 0;
mMimes.clear();
for (size_t i = 0; i < numtracks; ++i) {
sp<IMediaSource> track = extractor->getTrack(i);
if (track == NULL) {
continue;
}
sp<MetaData> meta = extractor->getTrackMetaData(i);
if (meta == NULL) {
ALOGE("no metadata for track %zu", i);
return UNKNOWN_ERROR;
}
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
ALOGV("initFromDataSource track[%zu]: %s", i, mime);
// Do the string compare immediately with "mime",
// we can't assume "mime" would stay valid after another
// extractor operation, some extractors might modify meta
// during getTrack() and make it invalid.
if (!strncasecmp(mime, "audio/", 6)) {
if (mAudioTrack.mSource == NULL) {
mAudioTrack.mIndex = i;
mAudioTrack.mSource = track;
mAudioTrack.mPackets =
new AnotherPacketSource(mAudioTrack.mSource->getFormat());
if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
mAudioIsVorbis = true;
} else {
mAudioIsVorbis = false;
}
mMimes.add(String8(mime));
}
} else if (!strncasecmp(mime, "video/", 6)) {
if (mVideoTrack.mSource == NULL) {
mVideoTrack.mIndex = i;
mVideoTrack.mSource = track;
mVideoTrack.mPackets =
new AnotherPacketSource(mVideoTrack.mSource->getFormat());
// video always at the beginning
mMimes.insertAt(String8(mime), 0);
}
}
mSources.push(track);
int64_t durationUs;
if (meta->findInt64(kKeyDuration, &durationUs)) {
if (durationUs > mDurationUs) {
mDurationUs = durationUs;
}
}
int32_t bitrate;
if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
totalBitrate += bitrate;
} else {
totalBitrate = -1;
}
}
ALOGV("initFromDataSource mSources.size(): %zu mIsSecure: %d mime[0]: %s", mSources.size(),
mIsSecure, (mMimes.isEmpty() ? "NONE" : mMimes[0].c_str()));
if (mSources.size() == 0) {
ALOGE("b/23705695");
return UNKNOWN_ERROR;
}
// Modular DRM: The return value doesn't affect source initialization.
(void)checkDrmInfo();
mBitrate = totalBitrate;
return OK;
}
mDurationUs成員變量是由sp<MetaData> fileMeta = extractor->getMetaData();中MetaData的成員方法findInt32賦值的.
我快寫??了
咋還沒到下面的源碼
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/util/AacAdtsExtractor.cpp
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/AacBqToPcmCbRenderer.cpp
zsh@zsh:~/android_asop/frameworks$ vim ./wilhelm/src/android/AudioPlayer_to_android.cpp

MediaCodec的使用(音頻編碼一)
浙公網安備 33010602011771號