解析M2TS成h264视频流与AAC音频流然后使用MediaCodec进行播放,然而视频卡顿严重,MediaCodec给出的日志是错误的视频帧,我将接收到的TS包保存进ts文件,然后利用一些播放软件播放却没有卡顿问题,下面是我的代码,希望有相关处理经验的人帮忙看看问题出在哪,可以解决的可有偿
UDP接收数据线程
```kotlin
//缓存接收到的TS包
private val tsPacketQueue: LinkedBlockingQueue<ByteArray> = LinkedBlockingQueue()
//缓存处理好的H264数据
private var h264Queue: LinkedBlockingQueue<MediaFrame> = LinkedBlockingQueue()
//缓存处理好的AAC音频数据
private var aacQueue: LinkedBlockingQueue<MediaFrame> = LinkedBlockingQueue()
//udp线程,接受RTP数据包并解析成一个个Ts包
private val rtpThread = Thread {
udpSocket = DatagramSocket(20011)
var lastSeqNo: Int? = null
try {
udpSocket!!.receiveBufferSize = 2 * 1024 * 1024
val packet = DatagramPacket(ByteArray(1328), 1328)
//会话成功建立 开启udp监听
while (true) {
udpSocket!!.receive(packet)
var startIndex = 12
var endIndex = 12 + 188
while (endIndex <= packet.data.size) {
val tsPacket = ByteArray(TS_PACKET_LENGTH)
System.arraycopy(packet.data, startIndex, tsPacket, 0, tsPacket.size)
if (tsPacket[0] == TS_PACKET_SYNC) {
tsPacketQueue.put(tsPacket)
} else {
Log.d(TAG, "error ts packet:[${tsPacket.toHexString()}]")
}
startIndex += 188
endIndex += 188
}
}
} catch (e: InterruptedException) {
Log.d(TAG, "RTPThread Interrupted")
} catch (e: Exception) {
e.printStackTrace()
} finally {
Log.d(TAG, "UDP Closed")
udpSocket?.close()
}
}
解析Mpeg2Ts线程
```kotlin
//解析ts包线程
private val tsParseThread = Thread {
//暂存h264视频帧
val h264PesStream = ByteArrayOutputStream(H264_STREAM_SIZE)
//暂存aac音频帧
val aacPesStream = ByteArrayOutputStream(AAC_STREAM_SIZE)
try {
while (true) {
val tsPacket = tsPacketQueue.take()
//ts包的数据头
val tsHeader = ByteArray(4)
System.arraycopy(tsPacket, 0, tsHeader, 0, tsHeader.size)
//ts包的数据体
val tsPayload = ByteArray(TS_PACKET_LENGTH - 4)
System.arraycopy(tsPacket, 4, tsPayload, 0, tsPayload.size)
//是否为一帧的第一包
val payloadIndicator = (tsHeader[1].toInt() ushr 6) and 0x01
//获取Ts的pid 用于区分节目类型
val pid = ((tsHeader[1].toInt() and 0x1F) shl 8) or (tsHeader[2].toInt() and 0xFF)
//是否有适配域标识
val adapterIndicator = (tsHeader[3].toInt() ushr 4) and 0x03
//获取连续性计数器
val continuityCounter = tsHeader[3].toInt() and 0x0F
if (pid == H264_PID) {
parseVideoTs(
TsPacket(
continuityCounter, payloadIndicator, adapterIndicator, tsPayload
), h264PesStream
)
}
if (pid == ACC_PID) {
//音频流包
parseAudioTs(
TsPacket(
continuityCounter, payloadIndicator, adapterIndicator, tsPayload
), aacPesStream
)
}
}
} catch (e: InterruptedException) {
Log.d(TAG, "TSParseThread Interrupted")
} catch (e: Exception) {
e.printStackTrace()
}
}
//解析音频Ts包
private fun parseAudioTs(
tsPacket: TsPacket, aacPesStream: ByteArrayOutputStream
) {
//若不是第一包且pes包为空,丢弃包
if (tsPacket.payloadIndicator != 1 && aacPesStream.size() == 0) {
return
}
//若是第一包,将之前的pes包数据发出并清空
if (tsPacket.payloadIndicator == 1 && aacPesStream.size() > 0) {
// 一帧的开头 将上一帧写入展示层
val pesPacket = aacPesStream.toByteArray()
val aacFrame = parsePES(pesPacket)
if (aacFrame != null && receive) {
aacQueue.put(aacFrame)
}
//重置pes包
aacPesStream.reset()
}
//若有适配域,则去掉适配域
if (tsPacket.adapterIndicator == 1) {
aacPesStream.write(tsPacket.tsPayload)
} else if (tsPacket.adapterIndicator == 3) {
val adapterSize = tsPacket.tsPayload[0].toInt() and 0xFF
val payload = ByteArray(tsPacket.tsPayload.size - adapterSize - 1)
System.arraycopy(tsPacket.tsPayload, adapterSize + 1, payload, 0, payload.size)
aacPesStream.write(payload)
}
}
//解析视频TS包
private fun parseVideoTs(
tsPacket: TsPacket, pesPktStream: ByteArrayOutputStream
) {
//若不是第一包且pes包为空,丢弃包
if (tsPacket.payloadIndicator != 1 && pesPktStream.size() == 0) {
Log.d(TAG, "丢弃不完整的包")
return
}
//若是第一包,将之前的pes包数据发出并清空
if (tsPacket.payloadIndicator == 1 && pesPktStream.size() > 0) {
// 一帧的开头 将上一帧写入展示层
val pesPacket = pesPktStream.toByteArray()
val h264Frame = parsePES(pesPacket)
if (h264Frame != null && receive) {
h264Queue.put(h264Frame)
} else {
Log.d(TAG, "包有问题,丢掉")
}
//重置pes包
pesPktStream.reset()
}
//若有适配域,则去掉适配域
if (tsPacket.adapterIndicator == 1) {
pesPktStream.write(tsPacket.tsPayload)
} else if (tsPacket.adapterIndicator == 3) {
val adapterSize = tsPacket.tsPayload[0].toInt() and 0xFF
val payload = ByteArray(tsPacket.tsPayload.size - adapterSize - 1)
System.arraycopy(tsPacket.tsPayload, adapterSize + 1, payload, 0, payload.size)
pesPktStream.write(payload)
}
}
//解析PES包
private fun parsePES(pesPacket: ByteArray): MediaFrame? {
// 解析pes包 得到PTS
val startCode = ByteArray(3)
System.arraycopy(pesPacket, 0, startCode, 0, startCode.size)
val stream = ByteArray(1)
System.arraycopy(pesPacket, 3, stream, 0, stream.size)
val pesHeader = ByteArray(3)
System.arraycopy(pesPacket, 6, pesHeader, 0, pesHeader.size)
//检查PES头部 若不匹配抛弃
val headerTag = (pesHeader[0].toInt() ushr 6) and 0x03
if (headerTag != 2) {
return null
}
val ptsTag = (pesHeader[1].toInt() ushr 7) and 0x01
if (ptsTag != 1) {
return null
}
val headerDataSize = pesHeader[2].toInt()
val headerData = ByteArray(5)
System.arraycopy(pesPacket, 9, headerData, 0, 5)
val pts = parsePTS(headerData)
val streamBytes = ByteArray(pesPacket.size - 9 - headerDataSize)
System.arraycopy(
pesPacket, 9 + headerDataSize, streamBytes, 0, pesPacket.size - 9 - headerDataSize
)
return MediaFrame(streamBytes, pts)
}
//通过pts字节码计算出pts时间
private fun parsePTS(ptsBytes: ByteArray): Long {
val ptsValue =
((ptsBytes[0].toLong() and 0x0E) shl 29) or ((ptsBytes[1].toLong() and 0xFF) shl 22) or ((ptsBytes[2].toLong() and 0xFE) shl 14) or ((ptsBytes[3].toLong() and 0xFF) shl 7) or ((ptsBytes[4].toLong() and 0xFE) ushr 1)
return ((ptsValue * 100) / 9)
}
MediaCodec解码播放线程
private fun initVideoMediaCodec() {
try {
//创建解码器 H264
mediaCodec = MediaCodec.createDecoderByType("video/avc")
Log.d(TAG, "H264 Decoder:${mediaCodec.codecInfo.name}")
//创建配置
val mediaFormat = createVideoFormat("video/avc", 1920, 1080)
//配置绑定mediaFormat和surface
mediaCodec.configure(mediaFormat, surface, null, 0)
} catch (e: IOException) {
e.printStackTrace()
//创建解码失败
Log.e(TAG, "Create Decoder Failure")
}
}
/**
* 解码播放
*/
fun decodePlay() {
mediaCodec.start()
//渲染H.264数据包
decodeThread.start()
}
private val decodeThread = Thread {
try {
var startFrameTime = 0L
var lastFrameTime = 0L
while (true) {
val h264frame = h264Queue.take()
val h264Packet = h264frame.bytes
val arrPts = h264frame.pts
if (startFrameTime == 0L) startFrameTime = arrPts
val pts = arrPts - startFrameTime
if (pts < lastFrameTime) continue
lastFrameTime = pts
// 查询10000毫秒后,如果dSP芯片的buffer全部被占用,返回-1;存在则大于0
val inIndex = mediaCodec.dequeueInputBuffer(10000)
if (inIndex >= 0) {
//根据返回的index拿到可以用的buffer
val byteBuffer = mediaCodec.getInputBuffer(inIndex)
//清空缓存
byteBuffer!!.clear()
//开始为buffer填充数据
byteBuffer.put(h264Packet)
//填充数据后通知mediacodec查询inIndex索引的这个buffer,
mediaCodec.queueInputBuffer(inIndex, 0, h264Packet.size, pts, 0)
}
val info = MediaCodec.BufferInfo()
//mediaCodec 查询 "mediaCodec的输出方队列"得到索引
var outIndex = mediaCodec.dequeueOutputBuffer(info, 10000)
while (outIndex >= 0) {
//如果surface绑定了,则直接输入到surface渲染并释放
mediaCodec.releaseOutputBuffer(outIndex, true)
outIndex = mediaCodec.dequeueOutputBuffer(info, 0)
}
}
} catch (e: InterruptedException) {
Log.d(TAG, "H264 Render Thread Interrupted")
} catch (e: Exception) {
e.printStackTrace()
}
}