I'm trying to get an mp4 file. I shoot video using the camera2 api and can save this as an avc file using MediaCodec. But I do not understand how I can redo this code, for encoding into an mp4 file using MediaMuxer. Sorry for my English, this is translated through a translator
private class EncoderCallback : MediaCodec.Callback() {
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index2: Int,
info: MediaCodec.BufferInfo
) {
outPutByteBuffer = mCodec!!.getOutputBuffer(index2)
val outDate = ByteArray(info.size)
outPutByteBuffer!![outDate]
try {
Log.i("EncoderCallBack", " outDate.length : " + outDate.size)
outputStream!!.write(outDate, 0, outDate.size)
} catch (e: IOException) {
e.printStackTrace()
}
mCodec!!.releaseOutputBuffer(index2, false)
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.i("EncoderCallBack", "Error: $e")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.i("EncoderCallBack", "encoder output format changed: $format")
}
}
after initializing MediaCodec, I record the video:
var texture: SurfaceTexture = textureViewOver
texture.setDefaultBufferSize(320, 240)
surface = Surface(texture)
builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
builder.addTarget(surface)
builder.addTarget(mEncoderSurface!!)
mCameraDevice.createCaptureSession(
mutableListOf(surface, mEncoderSurface),
object : CameraCaptureSession.StateCallback() {...
the muxer code is missing:
override fun onOutputBufferAvailable(
codec: MediaCodec,
index2: Int,
info: MediaCodec.BufferInfo
) {
outPutByteBuffer = mCodec!!.getOutputBuffer(index2)
mediaMuxer?.writeSampleData(trackIndex, outPutByteBuffer, info)
mCodec!!.releaseOutputBuffer(index2, false)
}
Related
I've been implementing a video encoder which takes raw RGB frame data and encodes/muxes it into a H264 video.
Initially I was using a sync implementation with a while loop based on examples found in https://bigflake.com/mediacodec/, which worked fine.
To improve performance and readability I wanted to switch over to an asynchronous implementation, however I ran into an issue:
calling signalEndOfInputStream often does not set the MediaCodec.BUFFER_FLAG_END_OF_STREAM flag on MediaCodec.BufferInfo
I'm not sure when I should be sending that signal (ideally it would be in the finalize function, however when I tried that I never received the BUFFER_FLAG_END_OF_STREAM flag at all.)
The encoder API looks as follows:
package com.app.encoder
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.os.Environment
import android.util.Log
import java.io.File
import java.io.IOException
import java.nio.ByteBuffer
import java.util.*
class VideoEncoder(private val width: Int, private val height: Int, private val frameRate: Int, bitRate: Int, private val fileName: String) : MediaCodec.Callback() {
private val format = MediaFormat.createVideoFormat(MIME_TYPE, width, height)
private var encoder = MediaCodec.createEncoderByType(MIME_TYPE)
private var surface: InputSurface
private lateinit var muxer: MediaMuxer
private var trackIndex: Int = -1
private var muxerStarted = false
private val sync = Object()
private var encoderDone = false
private val pendingBuffers: Queue<Pair<Int, MediaCodec.BufferInfo>> = LinkedList()
companion object {
const val MIME_TYPE = "video/avc"
const val IFRAME_INTERVAL = 10
const val TAG = "VideoEncoder"
}
init {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL)
encoder.setCallback(this)
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
surface = InputSurface(encoder.createInputSurface())
encoder.start()
}
/**
* Prepares the media muxer
*/
fun init() {
val path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES)
val file = File(path, fileName)
try {
muxer = MediaMuxer(file.path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
} catch (ioe: IOException) {
throw RuntimeException("Unable to create MediaMuxer", ioe)
}
}
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
return // Unused
}
/**
* Starts the MediaMuxer and processes the queue (if any)
*/
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.d(TAG, "onOutputFormatChanged")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
Log.d(TAG, "MediaMuxer started")
val queueIterator = pendingBuffers.iterator()
while (queueIterator.hasNext()) {
val p = queueIterator.next()
mux(p.first, p.second)
queueIterator.remove()
}
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, info: MediaCodec.BufferInfo) {
mux(index, info)
}
/**
* Pushes encoded data into the muxer, queue's it if the muxer was not yet started
*/
private fun mux(index: Int, info: MediaCodec.BufferInfo) {
if (!muxerStarted) {
pendingBuffers.add(Pair(index, info))
return
}
if (info.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
encoder.releaseOutputBuffer(index, false)
return
}
val outputBuffer = encoder.getOutputBuffer(index)!!
if (info.size != 0) {
muxer.writeSampleData(trackIndex, outputBuffer, info)
}
encoder.releaseOutputBuffer(index, false)
// This flag is often not set after signalEndOfInputStream(), causing a timeout in finalize()
if ((info.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
synchronized(sync) {
encoderDone = true
sync.notifyAll()
}
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
// TODO
Log.d(TAG, "onError")
}
/**
* Pushes a frame into the encoder using a GLES20 texture
*/
fun addFrame(frameIndex: Int, data: ByteArray, endOfStream: Boolean) {
if (endOfStream) {
encoder.signalEndOfInputStream()
}
surface.makeCurrent()
surface.generateSurfaceFrame(width, height, ByteBuffer.wrap(data))
surface.setPresentationTime(frameIndex, frameRate)
surface.swapBuffers()
surface.releaseEGLContext()
}
/**
* Awaits for the encoder to finish
*/
fun finalize() {
// encoder.signalEndOfInputStream() <- I would prefer to send the signal here, but that does not work at all
Log.d(TAG, "Finalizing")
val waitUntil = System.currentTimeMillis() + 10000
var timedOut = false
synchronized(sync) {
while (!encoderDone) {
try {
sync.wait(1000)
} catch (_: InterruptedException) {
}
if (System.currentTimeMillis() > waitUntil) {
timedOut = true
break
}
}
}
Log.d(TAG, "Finalized")
release()
if (timedOut) {
throw RuntimeException("Timeout waiting for encoder to complete")
}
}
/**
* Releases any related objects
*/
private fun release() {
encoder.stop()
encoder.release()
surface.release()
if (muxerStarted) {
muxer.stop()
}
muxer.release()
}
}
I instantiate the encoder, call init(), addFrame() all the images and finally wait for the encoder to finish using finalize()
In the above implementation, I have a 50/50 chance that the BUFFER_FLAG_END_OF_STREAM flag is set, so I'm not sure what I'm doing wrong here
I am decoding an mp3 file, first I convert the mp3 file into a chunks of byteArray of size 1000 and put it in a circularArray and then pass it to mediaCodec callback for decoding (decode one byteArray at a time), I follow this link. It is working fine for Samsung devices, but if I use other than Samsung devices (Vivo, pixel 3a) it crashes at the mediaCodec.getInputBuffer(index) in the callback of onInputBufferAvailable by giving the exception IllegalStateException. My code is as follows:
var decoder: MediaCodec = MediaCodec.createDecoderByType("audio/mpeg")
decoder.configure(format, null, null, 0)
decoder.setCallback(object : MediaCodec.Callback() {
override fun onInputBufferAvailable(mediaCodec: MediaCodec, i: Int) {
while (true) {
if (circularArray!!.size() > 0) {
val data: ByteArray = circularArray.popFirst()
val info = MediaCodec.BufferInfo()
val buffer = mediaCodec.getInputBuffer(i)
buffer!!.put(data, 0, data.size)
mediaCodec.queueInputBuffer(i, 0, data.size, 0, 0)
break
}
}
}
override fun onOutputBufferAvailable(mediaCodec: MediaCodec, i: Int, info: MediaCodec.BufferInfo) {
//DECODING PACKET ENDED
val outBuffer = mediaCodec.getOutputBuffer(i)
val chunk = ByteArray(info.size)
outBuffer!![chunk] // Read the buffer all at once
outBuffer!!.clear()
Log.d(TAG, "onOutputBufferAvailable: ${info.size}")
audioTrack!!.write(chunk, info.offset, info.offset + info.size)
mediaCodec.releaseOutputBuffer(i, false)
}
override fun onError(mediaCodec: MediaCodec, e: MediaCodec.CodecException) {}
override fun onOutputFormatChanged(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {}
})
decoder!!.start()
I converted my file like this
val tempBuf = ByteArray(1000)
var byteRead: Int
try {
val bufferedInputStream = BufferedInputStream(FileInputStream(mp3File))
while (bufferedInputStream.read(tempBuf).also { byteRead = it } != -1) {
circularArray.addLast(tempBuf.copyOf())
}
bufferedInputStream.close()
Thread(aacDecoderAndPlayRunnable).start()
} catch (e: java.lang.Exception) {
Log.d(TAG, "fileToInputStream: ${e.message}")
e.printStackTrace()
null
}
The exception where the app crashes is
Even if I try to get the format form mediaCodec in the callback, it gives an exception and crashes anyway. I also checked supportedTypes from the codec it supports audio/mpeg.
First of all, the MediaCodec works with a queue of input buffers. And you can read more about it in the docs.
The second parameter of the onInputBufferAvailable callback is the index of the buffer. When calling getInputBuffer() you must pass this index instead of 0:
val buffer = mediaCodec.getInputBuffer(i)
Second, consider using the MediaExtractor instead of reading the file yourself. It supplies you will presentation timestamps and flags to pass into queueInputBuffer().
Third, you need to remove the while (true) loop. You can only queue one buffer per callback.
Firstly, I know using OpenGL ES is more optimized, yet here not a choice.
So, when user would be able to save frames with original size in a H.264 live-stream, there are two scenarios, which would have better performance-wise?
Using MediaCodec in asynchronous mode, get YUV image and show that image on an ImageView. (Does it have overhead compared to second option??)
Using MediaCodec in synchronous mode, set TextureView's surface as MediaCodec input surface and whenever user wants to get screenshot, use textureView.getBitmap()
SurfaceView which cannot retrieve the (frame) bitmap after render because it's an output element so failing, no argument.
Code for option 1:
val frame = ...//ByteArray from server
mediaCodec.setCallback(object : MediaCodec.Callback() {
override fun onInputBufferAvailable(
_codec: MediaCodec,
index: Int
) {
try {
val buffer = _codec.getInputBuffer(index)
buffer?.put(frame)
mediaCodec.queueInputBuffer(
index,
0,
data.size,
0,
0
)
} catch (e: Exception) {
try {
_codec.flush()
} catch (e: Exception) {
}
}
}
override fun onOutputBufferAvailable(
_codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
try {
val info = MediaCodec.BufferInfo()
val outputIndex = index
val image: Image? = _codec.getOutputImage(outputIndex)
if (image == null) {
return
}
val rect = image.cropRect
val yuvImage = YuvImage(
YUV_420_888toNV21(image),
NV21,
rect.width(),
rect.height(),
null
)
val stream = ByteArrayOutputStream()
yuvImage.compressToJpeg(
Rect(0, 0, rect.width(), rect.height()),
100,
stream
)
frameBitmap =
BitmapFactory.decodeByteArray(
stream.toByteArray(),
0,
stream.size()
)
imageView.setImageBitmap(frameBitmap)
_codec.stop()
stream.close()
image.close()
if (outputIndex >= 0) {
_codec.releaseOutputBuffer(outputIndex, false)
}
} catch (e: Exception) {
}
}
override fun onError(
_codec: MediaCodec,
e: MediaCodec.CodecException
) {
}
override fun onOutputFormatChanged(
_codec: MediaCodec,
format: MediaFormat
) {
}
})
try {
mediaCodec.start()
} catch (e: Exception) {
mediaCodec.flush()
}
Code for option 2:
val frame = ...//ByteArray from server
try {
val index = mediaCodec.dequeueInputBuffer(-1)
if (index >= 0) {
val buffer = mediaCodec.getInputBuffer(index)
buffer?.put(frame)
mediaCodec.queueInputBuffer(index, 0, data.size, 0, 0)
val info = MediaCodec.BufferInfo()
val outputIndex = mediaCodec.dequeueOutputBuffer(info, 0)
if (outputIndex >= 0) {
mediaCodec.releaseOutputBuffer(outputIndex, true)
)
lastRenderTime = System.currentTimeMillis()
}
} else {
}
} catch (e: Exception) {
//mediaCodec.flush()
}
Using Mp4Parser
when I try to add a watermark on video download from pixels it throws an error as below
2021-11-24 10:43:12.879 5748-5959/com.example.fastsaveapp E/Mp4Composer:
**This device cannot codec with that setting. Check width, height, bitrate, and video format.**
**android.media.MediaCodec$CodecException: Error 0xfffffc0e**
at android.media.MediaCodec.native_configure(Native Method)
at android.media.MediaCodec.configure(MediaCodec.java:1882)
at com.example.fastsaveapp.mp4compose.composer.VideoComposer.setUp(VideoComposer.java:78)
at com.example.fastsaveapp.mp4compose.composer.Mp4ComposerEngine.compose(Mp4ComposerEngine.java:198)
**at com.example.fastsaveapp.mp4compose.composer.Mp4Composer$2.run(Mp4Composer.java:319)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1133)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:607)**
at java.lang.Thread.run(Thread.java:761)
but video downloaded from youtube or from any Instagram video works like charm or from media gallery,
what can be done to add a watermark or encode video with such a high bitrate video like download from Pexels or so..
Mp4Composer(videoPath!!, getDestinationPath())
.filter(GlWatermarkFilter(bitmap, getRepostWatermarkPosition()))
.listener(object : Mp4Composer.Listener {
override fun onProgress(progress: Double) {
mainScoopLauncher {
loadStateEnable(true)
val progressValue = (progress * 100).toString()
val isUnder10Percentage = progressValue.take(2).contains(".")
val under10PercentageValue = "${progress * 100}".take(1) + "%"
val upto10PercentageValue = progressValue.take(2) + "%"
val loadValue =
if (isUnder10Percentage) under10PercentageValue else upto10PercentageValue
binding.txtPercentage.text = loadValue
}
}
override fun onCurrentWrittenVideoTime(timeUs: Long) {
mainScoopLauncher { loadStateEnable(true) }
}
override fun onCompleted() {
mainScoopLauncher {
loadStateEnable(false)
toast("Repost Video Complate")
}
}
override fun onCanceled() {
logger("cancelled")
mainScoopLauncher {
loadStateEnable(false)
toast("Repost Process Cancel..")
}
}
override fun onFailed(exception: Exception?) {
mainScoopLauncher {
loadStateEnable(false)
toast("Repost Process Failed..")
}
}
}).start()
I am building a streaming app. I am facing a problem, here is code
I want to live stream camera feed to the server and I hope I will get ByteBuffer in onOutputBufferAvailable(). I am getting output buffer but I am never getting MediaCodec.BUFFER_FLAG_END_OF_STREAM when I call stopVideoCapture()
Here are code segments
Creating Media Codec
private val recorderStreamSurface by lazy {
val format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, width, height)
val frameRate = 30 // 30 fps
var recorderStreamSurface: Surface? = null
// Set some required properties. The media codec may fail if these aren't defined.
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000) // 6Mbps
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate)
format.setInteger(MediaFormat.KEY_CAPTURE_RATE, frameRate)
format.setInteger(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, 1000000 / frameRate)
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) // 1 seconds between I-frames
videoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE)
// Create a MediaCodec encoder and configure it. Get a Surface we can use for recording into.
try {
videoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
recorderStreamSurface = videoEncoder.createInputSurface()
videoEncoder.setCallback(object : MediaCodec.Callback() {
override fun onError(codec: MediaCodec, exception: MediaCodec.CodecException) {
Log.d(TAG, "==onError $codec $exception")
serverChannel.onError(exception)
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.d(TAG, "video encoder: output format changed")
}
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
Log.d(TAG, "video encoder: returned input buffer: $index")
val frameData: ByteArray
frameData = queue.take().array()
val inputData = codec.getInputBuffer(index)
inputData!!.clear()
inputData.put(frameData)
codec.queueInputBuffer(index, 0, frameData.size, 0, 0)
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, info: MediaCodec.BufferInfo) {
Log.d(TAG, "video encoder: returned output buffer: $index flag : ${info.flags}")
Log.d(TAG, "video encoder: returned buffer of size " + info.size)
if ((info.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i(TAG,"serverChannel.onCompleted()1")
}
videoEncoder.releaseOutputBuffer(index, false)
}
})
videoEncoder.start()
} catch (e: IOException) {
videoEncoder.stop()
videoEncoder.release()
serverChannel.onError(e)
}
recorderStreamSurface
}
local variables
lateinit var videoEncoder: MediaCodec
val queue: ArrayBlockingQueue<ByteBuffer> = ArrayBlockingQueue<ByteBuffer>(10)
val targets by lazy { listOf(viewFinder.holder.surface, recorderStreamSurface!!) }
private const val VIDEO_MIME_TYPE = "video/avc"
val cameraId = "1"
val fps = 30
val width = 1080
val height = 1920
Record Request
private val recordRequest: CaptureRequest by lazy {
// Capture request holds references to target surfaces
session.device.createCaptureRequest(CameraDevice.TEMPLATE_RECORD).apply {
// Add the preview and recording surface targets
for (target: Surface in targets) {
addTarget(target)
}
// Sets user requested FPS for all targets
set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
}.build()
}
and finally start and stop recording
private fun startVideoCapture() {
// Prevents screen rotation during the video recording
requireActivity().requestedOrientation =
ActivityInfo.SCREEN_ORIENTATION_LOCKED
session.setRepeatingRequest(previewRequest, null, cameraHandler)
// Start recording repeating requests, which will stop the ongoing preview
// repeating requests without having to explicitly call `session.stopRepeating`
session.setRepeatingRequest(recordRequest, null, cameraHandler)
recordingStartMillis = System.currentTimeMillis()
Log.d(TAG, "Recording started")
}
private fun stopVideoCapture() {
// Unlocks screen rotation after recording finished
requireActivity().requestedOrientation =
ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
videoEncoder.stop()
videoEncoder.release()
Log.d(TAG, "Recording stopped")
session.setRepeatingRequest(previewRequest, null, cameraHandler)
}
you must pass as parameter the flag BUFFER_FLAG_END_OF_STREAM with the last data to encode.
codec.queueInputBuffer(index, 0, frameData.size, 0, BUFFER_FLAG_END_OF_STREAM)