I'm developing an Android app that uses screenshot mechanism and then sends the captured data over network in infinite loop. After some time of work the system kills my app without any exception. I made an experiment where the whole app was cut off besides the screen shooter and the problem is still here. Android profiler does not show any issues but I can see very large AbstractList$Itr and byte[] as on image:
If I increase the RAM then the app lives longer. I can't find the leak for two weaks...
Full source code of ScreenShooter class:
import android.content.Context
import android.content.Intent
import android.graphics.Bitmap
import android.graphics.PixelFormat
import android.graphics.Point
import android.hardware.display.VirtualDisplay
import android.media.ImageReader
import android.media.projection.MediaProjection
import android.media.projection.MediaProjectionManager
import android.os.Handler
import android.os.HandlerThread
import android.os.Process
import android.view.Display
import android.view.WindowManager
import my_module.service.network.Networking
import java.io.ByteArrayOutputStream
import java.util.concurrent.locks.ReentrantLock
class ScreenShooter(private val network: Networking, private val context: Context): ImageReader.OnImageAvailableListener {
private val handlerThread: HandlerThread
private val handler: Handler
private var imageReader: ImageReader? = null
private var virtualDisplay: VirtualDisplay? = null
private var projection: MediaProjection? = null
private val mediaProjectionManager: MediaProjectionManager
private var latestBitmap: Bitmap? = null
private val byteStream = ByteArrayOutputStream()
private val mut: ReentrantLock = ReentrantLock()
private var width: Int
private var height: Int
private val TAG = ScreenShooter::class.java.simpleName
private val DELAY = 1000L
init {
handlerThread = HandlerThread(
javaClass.simpleName,
Process.THREAD_PRIORITY_BACKGROUND
).apply { start() }
handler = Handler(handlerThread.looper)
val windowManager = (context.getSystemService(Context.WINDOW_SERVICE) as WindowManager)
val display: Display = windowManager.defaultDisplay
val size = Point()
display.getRealSize(size)
var width = size.x
var height = size.y
while (width * height > 2 shl 19) {
width = (width * 0.9).toInt()
height = (height * 0.9).toInt()
}
this.width = width
this.height = height
mediaProjectionManager = context.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
}
override fun onImageAvailable(reader: ImageReader?) {
mut.lock()
val image = imageReader?.acquireLatestImage() ?: return
var buffer = image.planes[0].buffer
buffer.rewind()
latestBitmap?.copyPixelsFromBuffer(buffer)
latestBitmap?.compress(Bitmap.CompressFormat.JPEG, 80, byteStream)
network.sendScreen(byteStream.toByteArray())
buffer.clear()
buffer = null
image.close()
byteStream.flush()
byteStream.reset()
byteStream.close()
mut.unlock()
Thread.sleep(DELAY)
}
fun startCapture(resultCode: Int, resultData: Intent) {
latestBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
imageReader = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N_MR1) {
ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 2)
} else {
ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 1)
}
imageReader?.setOnImageAvailableListener(this, handler)
projection = mediaProjectionManager.getMediaProjection(resultCode, resultData)
virtualDisplay = projection!!.createVirtualDisplay(
"shooter",
width,
height,
context.resources.displayMetrics.densityDpi,
VIRT_DISPLAY_FLAGS,
imageReader?.surface,
null,
handler
)
projection?.registerCallback(projectionCallback, handler)
}
fun stopCapture() {
mut.lock()
imageReader?.setOnImageAvailableListener(null, null)
imageReader?.acquireLatestImage()?.close()
imageReader = null
projection?.unregisterCallback(projectionCallback)
projection?.stop()
virtualDisplay?.release()
imageReader?.close()
latestBitmap?.recycle()
mut.unlock()
}
private val projectionCallback = object : MediaProjection.Callback() {
override fun onStop() {
virtualDisplay?.release()
}
}
}
Methods startCapture() and stopCapture() are called from my background service in mainthread. network.sendScreen(byteStream.toByteArray()) just pushes the byte array (screenshot) to okhttp websocket queue:
fun sendScreen(bytes: ByteArray) {
val timestamp = System.currentTimeMillis()
val mss = Base64
.encodeToString(bytes, Base64.DEFAULT)
.replace("\n", "")
val message = """{ "data": {"timestamp":"$timestamp", "image":"$mss"} }"""
.trimMargin()
ws?.send(message.trimIndent())
}
Also I often get messages like Background concurrent copying GC freed but I got them without screenshooter with only network part and everything is Ok. I see no errors in logcat, no leaks in profiler.
I tried to recreate Screenshooter object in my service, but the app started to crash more often:
val t = HandlerThread(javaClass.simpleName).apply { start() }
Handler(t.looper).post {
while (true) {
Thread.sleep(10 * 1000L)
Log.d(TAG, "recreating the shooter")
Handler(mainLooper).post {
shooter!!.stopCapture()
shooter = null
shooter = ScreenShooter(network!!, applicationContext)
shooter!!.startCapture(resultCode, resultData!!)
}
}
}
However I supposed that this method would drop the previous created object out of GC Root and all its memory.
I've already no forces to search the leak. Thank you in advance.
Related
My program draws an oval when we click on the screen and + changes the coordinates of the oval depending on the coordinates of the held finger. When we release the finger, the oval should disappear.
Now when I click on the screen I get my oval and + when I move my finger the oval is always next to it, but when I release my finger I get an error:
E/Surface: freeAllBuffers: 1 buffers were freed while being dequeued!
E/AndroidRuntime: FATAL EXCEPTION: Thread-2
Process: com.example.lab10, PID: 24198
java.lang.IllegalStateException: Surface has already been released.
at android.view.Surface.checkNotReleasedLocked(Surface.java:774)
at android.view.Surface.unlockCanvasAndPost(Surface.java:473)
at android.view.SurfaceView$1.unlockCanvasAndPost(SurfaceView.java:1629)
at com.example.lab10.TaskSix$OvalsView.surfaceCreated$lambda$0(TaskSix.kt:136)
at com.example.lab10.TaskSix$OvalsView.$r8$lambda$fxYnyciEdLkjDqbt7r8jlbaJ-60(Unknown Source:0)
at com.example.lab10.TaskSix$OvalsView$$ExternalSyntheticLambda0.run(Unknown Source:2)
at java.lang.Thread.run(Thread.java:1012)
package com.example.lab10
import android.annotation.SuppressLint
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.content.Context
import android.graphics.Canvas
import android.graphics.Color
import android.graphics.Paint
import android.util.DisplayMetrics
import android.util.Log
import android.view.MotionEvent
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.View
import android.view.WindowManager
import androidx.constraintlayout.widget.ConstraintLayout
class TaskSix : AppCompatActivity() {
#SuppressLint("ClickableViewAccessibility")
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_task_six)
title = "Task#6"
val layout = findViewById<ConstraintLayout>(R.id.layout)
val ovalsView = OvalsView(this)
layout.setOnTouchListener(View.OnTouchListener { view, motionEvent ->
when (motionEvent.getActionMasked()) {
MotionEvent.ACTION_DOWN, MotionEvent.ACTION_POINTER_DOWN -> {
Log.d("qqq", "down")
val ID: Int = motionEvent.getPointerId(motionEvent.getActionIndex())
Log.d("qqq", "element with id: $ID")
layout.addView(ovalsView)
}
MotionEvent.ACTION_MOVE -> {
Log.d("qqq", "move")
var idx = 0
while (idx < motionEvent.pointerCount) {
val ID: Int = motionEvent.getPointerId(idx) // pobranie unikalnego id dla każdego dotyku
idx++
ovalsView._top = motionEvent.y
ovalsView._left = motionEvent.x
}
}
MotionEvent.ACTION_UP, MotionEvent.ACTION_POINTER_UP -> {
Log.d("qqq", "up")
layout.removeView(ovalsView)
}
else -> Log.d("qqq", "unhandled")
}
return#OnTouchListener true
})
}
private class OvalsView(context: Context) : SurfaceView(context), SurfaceHolder.Callback {
private val mSurfaceHolder: SurfaceHolder
private val mPainter = Paint()
private var mDrawingThread: Thread? = null
private val mDisplay = DisplayMetrics()
private var mDisplayWidth: Int
private var mDisplayHeight: Int
private var mRotation = 0f
var _top: Float = 0f
var _left: Float = 0f
init {
val wm = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
wm.defaultDisplay.getMetrics(mDisplay)
mDisplayWidth = mDisplay.widthPixels
mDisplayHeight = mDisplay.heightPixels
mPainter.isAntiAlias = true
mPainter.color = Color.RED
mSurfaceHolder = holder
mSurfaceHolder.addCallback(this)
}
private fun animateOvals(): Boolean {
mRotation += 1
return true
}
private fun drawWheel(canvas: Canvas) {
canvas.drawColor(Color.WHITE)
// canvas.rotate(mRotation, mDisplayWidth / 2f, mDisplayHeight / 2f)
//drawOval(float left, float top, float right, float bottom, #NonNull Paint paint)
// canvas.drawOval(mDisplayWidth/2f - 100f/2f - 100f, mDisplayHeight/2 + 100f/2f,
// mDisplayWidth/2f + 100f/2f + 100f, mDisplayHeight/2f - 100/2f, mPainter)
if (_top > 0f && _left > 0f)
canvas.drawOval(_left, _top, _left + 200f, _top + 350f, mPainter)
}
override fun onSizeChanged(w: Int, h: Int, oldw: Int, oldh: Int) {
mDisplayWidth = w
mDisplayHeight = h
super.onSizeChanged(w, h, oldw, oldh)
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
if (mDrawingThread != null) {
mDrawingThread!!.interrupt()
}
}
override fun surfaceCreated(holder: SurfaceHolder) {
mDrawingThread = Thread(Runnable {
var MAX_FRAME_TIME = 1000/60
var frameStartTime = System.nanoTime();
var frameTime: Long = 0
var canvas: Canvas? = null
while (!Thread.currentThread().isInterrupted && animateOvals()) {
canvas = mSurfaceHolder.lockCanvas()
if (canvas != null) {
drawWheel(canvas)
mSurfaceHolder.unlockCanvasAndPost(canvas)
}
frameTime = (System.nanoTime() - frameStartTime) / 1000000
if (frameTime < MAX_FRAME_TIME) // faster than the max fps - limit the FPS
{
try {
Thread.sleep(MAX_FRAME_TIME - frameTime)
} catch (e: InterruptedException) {
// ignore
}
}
}
})
mDrawingThread!!.start()
}
}
}
I don't have time to dig into the Android source and find out exactly what's happening, but your crash is being generated by that Runnable interacting with the Surface's Canvas (creating the "Surface has been released" exception).
When you lift your finger, you do this:
MotionEvent.ACTION_UP, MotionEvent.ACTION_POINTER_UP -> {
layout.removeView(ovalsView)
}
And in the docs for removeView:
Note: do not invoke this method from View.draw(android.graphics.Canvas), View.onDraw(android.graphics.Canvas), dispatchDraw(android.graphics.Canvas) or any related method.
Which implies to me that calling removeView will cause things to happen to the View which mean they can no longer be drawn to, which is why you can't do it from inside one of the draw calls where you're actively drawing to their Canvas.
Since your drawing thread is constantly drawing to the Surface on a tick, you have a potential race condition where the Surface is released before the thread is interrupted. And you call interrupt() in surfaceDestroyed - but releasing happens before the Surface is actually destroyed, it just releases a reference and makes the Surface invalid.
So your drawing loop is probably still attempting to draw to that released, invalid Surface - there are lots of ways to get around this, but you might want to look at Surface#isValid (you can access a SurfaceView's underlying surface with getHolder()). And since you have multiple threads here, you might want to implement some kind of concurrent locking to avoid the UI thread invalidating the Surface with removeView while the drawing thread is running the Canvas-touching code, which could cause rare crashes
I am building an Android app in Kotlin and i am really new to this.
My app gets data from a sensor that sends 250 data in one sec (250Hz) and save them in a local file and at the same time send them to a server via HTTP requests.
Also there is the possibility to watch these data in app via a Graph, i choose mpAndroid to plot the data into a linear chart.
I have already everything working 100%, except for the live chart, it works but it has a very slow rendering. It can go up to 3 or 4 minutes of delay since the start of the plotting and i need it as close as possible.
Basically my updateGraph method is triggered by few booleans values. When i click on the button to show the graph the boolean value changes to true and the method that receives data from the sensor starts collecting data into an array.
The updateGraph method is called in the onResume method of the LiveGraph Activity, it gets the array and add the data to the Y axis to plot them and actually it does, but as said with a big delay. I am sure the data are right because on the web server i can see them properly.
This is my main thread:
fun renderChartThread() {
val thread = Thread(Runnable {
runOnUiThread {
renderLineChartOnline()
}
})
thread.start()
}
This is the renderLineChartOnline method:
fun renderLineChartOnline(){
isRendering = true
var mChart: LineChart = graph
yArray.add(Entry(0.toFloat(), 1.20!!.toFloat()))
set1 = LineDataSet(yArray, "Saved Session")
set1.setDrawCircles(false);
set1.setDrawValues(false);
set1.setLineWidth(2f)
val data = LineData(set1)
mChart.setData(data)
mChart.getAxisRight().setEnabled(false);
val xAxis = mChart.xAxis
xAxis.position = XAxis.XAxisPosition.BOTTOM_INSIDE
xAxis.setDrawGridLines(false)
xAxis.granularity = 1f
xAxis.textSize = 8f
xAxis.valueFormatter = IndexAxisValueFormatter(xLabel)
}
This is the onResume method
override fun onResume() {
super.onResume()
if (MainActivity.isLiveView == true) {
var mChart: LineChart = graph
//Getting the data from the device activity
dataPoints = SocketActivity.liveBRTDataPoint
updateLiveDataset(dataPoints, mChart)
}
mHandler.postDelayed(mTimer2, 1000)
}
And as last this is my updateGraph method:
fun updateLiveDataset(var1: ArrayList<Float>, mChart: LineChart) {
var i = 0
mTimer2 = object : Runnable {
override fun run() {
i++
yArray.add(Entry(i.toFloat(), var1[i]!!.toFloat()))
// limit the number of visible entries
mChart.setVisibleXRangeMaximum(750f)
mChart.setVisibleXRangeMinimum(750f)
set1 = LineDataSet(yArray, "Live View")
set1.setDrawCircles(false)
set1.setDrawValues(false)
set1.setLineWidth(2f)
mChart.getAxisRight().setEnabled(false);
data = LineData(set1)
mChart.data = data
mChart.setAutoScaleMinMaxEnabled(true);
mChart.axisLeft.removeAllLimitLines()
mChart.axisLeft.resetAxisMaximum()
mChart.axisLeft.resetAxisMinimum()
mChart.notifyDataSetChanged();
mChart.moveViewToX(var1.size.toFloat())
mChart.invalidate()
mChart.notifyDataSetChanged()
mHandler.postDelayed(mTimer2, 4)
}
}
}
Does anyone have any suggestion on how to speed up this process?
Ok, i found a solution that plot real time data and I would like to post to you the solution, maybe can be helpful to someone else. But this gained another problem, i am missing data in the plot process, off course, because the variable that calls the real time array is not synchronized with the function that receives the data from the device.
This is my updated up code.
import android.content.pm.ActivityInfo
import android.content.res.Configuration
import android.os.Bundle
import android.os.Handler
import android.os.Looper
import android.view.WindowManager
import android.widget.TextView
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.github.mikephil.charting.charts.LineChart
import com.github.mikephil.charting.components.XAxis
import com.github.mikephil.charting.data.Entry
import com.github.mikephil.charting.data.LineData
import com.github.mikephil.charting.data.LineDataSet
import com.github.mikephil.charting.formatter.IndexAxisValueFormatter
import kotlinx.android.synthetic.main.activity_live_graph.*
import okhttp3.Response
import java.io.File
import java.io.InputStream
class LiveGraph : AppCompatActivity() {
companion object{
var isRendering:Boolean = false
var isEcg:Boolean = false
var isBrt:Boolean = false
}
lateinit var chart: LineChart
var dataPoints = ArrayList<Float>()
var pre_filter = ArrayList<Float>()
var yArray = ArrayList<Entry>()
lateinit var set1: LineDataSet
var xLabel = ArrayList<String>()
var data = LineData()
private val mHandler: Handler = Handler()
private var mTimer2: Runnable? = null
lateinit var file: String
var thread: Thread? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_live_graph)
// lock the current device orientation
val currentOrientation = this.resources.configuration.orientation
if (currentOrientation == Configuration.ORIENTATION_PORTRAIT) {
this.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
} else {
this.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
}
// Keep screen awake
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
chart = graph
run(chart)
}
fun renderChartOnline(data: ArrayList<Float>, chart: LineChart) {
var i = 0
if (data.size>0){
for (d in data) {
i +=1
yArray.add(Entry(i.toFloat(), d.toFloat()))
var sec = i/250.toFloat()
val mainHandler = Handler(Looper.getMainLooper())
if (sec > 60){
var min = sec/60.toFloat()
xLabel.add(min.toString()+"min")
} else{
xLabel.add(sec.toString()+"sec")
}
}
}else{
yArray.add(Entry(0.toFloat(), 1.20!!.toFloat()))
}
set1 = LineDataSet(yArray, "Saved Session")
set1.setDrawCircles(false);
set1.setDrawValues(false);
set1.setLineWidth(2f)
val data = LineData(set1)
chart.setData(data)
chart.getAxisRight().setEnabled(false);
val xAxis = chart.xAxis
xAxis.position = XAxis.XAxisPosition.BOTTOM_INSIDE
xAxis.setDrawGridLines(false)
xAxis.granularity = 1f // only intervals of 1 day
xAxis.textSize = 8f
xAxis.valueFormatter = IndexAxisValueFormatter(xLabel)
chart.invalidate()
}
fun run(chart: LineChart){
runOnlineGraph()
val mainHandler = Handler(Looper.getMainLooper())
}
fun runOnlineGraph(){
isRendering = true
feedMultiple()
}
private fun feedMultiple() {
if (thread != null) thread!!.interrupt()
val runnable = Runnable { addEntry() }
thread = Thread(Runnable {
while (true) {
runOnUiThread(runnable)
try {
Thread.sleep(4)
} catch (e: InterruptedException) {
e.printStackTrace()
}
}
})
thread!!.start()
}
override fun onResume() {
super.onResume()
dataPoints = SocketActivity.liveECGDataPoint
renderChartOnline(dataPoints, chart)
}
private fun addEntry() {
/***
* Whit this i get the data saved in the Socket process,
* but this gives me the problem of missing datas received,
* if i use an array the delay increase too much then,
* so i m still looking for a solution to this point
*/
dataPoints = SocketActivity.liveDataForGraph
val data = chart.data
if (data != null) {
var set = data.getDataSetByIndex(0)
// set.addEntry(...); // can be called as well
if (set1 == null) {
data.addDataSet(set)
}
for (i in dataPoints){
data.addEntry(
Entry(
set.entryCount.toFloat(),
i
), 0
)
data.notifyDataChanged()
// let the chart know it's data has changed
chart.notifyDataSetChanged()
// move to the latest entry
chart.moveViewToX(data.entryCount.toFloat())
// limit the number of visible entries
chart.setVisibleXRangeMaximum(750f)
chart.setVisibleXRangeMinimum(750f)
chart.getAxisRight().setEnabled(false);
// move to the latest entry
chart.moveViewToX(data.entryCount.toFloat())
chart.setAutoScaleMinMaxEnabled(true);
chart.axisLeft.removeAllLimitLines()
chart.axisLeft.resetAxisMaximum()
chart.axisLeft.resetAxisMinimum()
chart.notifyDataSetChanged(); // let the chart know it's data changed
chart.invalidate()
}
}
override fun onDestroy() {
super.onDestroy()
mHandler.removeCallbacks(mTimer2);
isRendering = false
}
}
I've updated my gradle plugins for CameraX kotlin and all of a sudden I've got an error that I don't know how to fix.
This is my camera fragment
package com.khumomashapa.notes.fragments
import android.annotation.SuppressLint
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.content.IntentFilter
import android.content.res.Configuration
import android.graphics.Color
import android.graphics.drawable.ColorDrawable
import android.hardware.display.DisplayManager
import android.media.MediaScannerConnection
import android.net.Uri
import android.os.Build
import android.os.Bundle
import android.util.DisplayMetrics
import android.util.Log
import android.view.KeyEvent
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.MimeTypeMap
import android.widget.ImageButton
import androidx.camera.core.AspectRatio
import androidx.camera.core.Camera
import androidx.camera.core.CameraInfoUnavailableException
import androidx.camera.core.CameraSelector
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageCapture.Metadata
import androidx.camera.core.ImageCaptureException
import androidx.camera.core.ImageProxy
import androidx.camera.core.Preview
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.view.PreviewView
import androidx.constraintlayout.widget.ConstraintLayout
import androidx.core.content.ContextCompat
import androidx.core.net.toFile
import androidx.core.view.setPadding
import androidx.fragment.app.Fragment
import androidx.lifecycle.lifecycleScope
import androidx.localbroadcastmanager.content.LocalBroadcastManager
import androidx.navigation.Navigation
import com.bumptech.glide.Glide
import com.bumptech.glide.request.RequestOptions
import com.khumomashapa.notes.R
import com.khumomashapa.notes.activities.CameraActivity
import com.khumomashapa.notes.activities.KEY_EVENT_ACTION
import com.khumomashapa.notes.activities.KEY_EVENT_EXTRA
import com.khumomashapa.notes.utils.ANIMATION_FAST_MILLIS
import com.khumomashapa.notes.utils.ANIMATION_SLOW_MILLIS
import com.khumomashapa.notes.utils.simulateClick
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import java.io.File
import java.nio.ByteBuffer
import java.text.SimpleDateFormat
import java.util.ArrayDeque
import java.util.Locale
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import kotlin.collections.ArrayList
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
/** Helper type alias used for analysis use case callbacks */
typealias LumaListener = (luma: Double) -> Unit
/**
* Main fragment for this app. Implements all camera operations including:
* - Viewfinder
* - Photo taking
* - Image analysis
*/
class CameraFragment : Fragment() {
private lateinit var container: ConstraintLayout
private lateinit var viewFinder: PreviewView
private lateinit var outputDirectory: File
private lateinit var broadcastManager: LocalBroadcastManager
private var displayId: Int = -1
private var lensFacing: Int = CameraSelector.LENS_FACING_BACK
private var preview: Preview? = null
private var imageCapture: ImageCapture? = null
private var imageAnalyzer: ImageAnalysis? = null
private var camera: Camera? = null
private var cameraProvider: ProcessCameraProvider? = null
private val displayManager by lazy {
requireContext().getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
}
/** Blocking camera operations are performed using this executor */
private lateinit var cameraExecutor: ExecutorService
/** Volume down button receiver used to trigger shutter */
private val volumeDownReceiver = object : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
when (intent.getIntExtra(KEY_EVENT_EXTRA, KeyEvent.KEYCODE_UNKNOWN)) {
// When the volume down button is pressed, simulate a shutter button click
KeyEvent.KEYCODE_VOLUME_DOWN -> {
val shutter = container
.findViewById<ImageButton>(R.id.camera_capture_button)
shutter.simulateClick()
}
}
}
}
/**
* We need a display listener for orientation changes that do not trigger a configuration
* change, for example if we choose to override config change in manifest or for 180-degree
* orientation changes.
*/
private val displayListener = object : DisplayManager.DisplayListener {
override fun onDisplayAdded(displayId: Int) = Unit
override fun onDisplayRemoved(displayId: Int) = Unit
override fun onDisplayChanged(displayId: Int) = view?.let { view ->
if (displayId == this#CameraFragment.displayId) {
Log.d(TAG, "Rotation changed: ${view.display.rotation}")
imageCapture?.targetRotation = view.display.rotation
imageAnalyzer?.targetRotation = view.display.rotation
}
} ?: Unit
}
override fun onResume() {
super.onResume()
// Make sure that all permissions are still present, since the
// user could have removed them while the app was in paused state.
if (!PermissionsFragment.hasPermissions(requireContext())) {
Navigation.findNavController(requireActivity(), R.id.fragment_container).navigate(
CameraFragmentDirections.actionCameraToPermissions()
)
}
}
override fun onDestroyView() {
super.onDestroyView()
// Shut down our background executor
cameraExecutor.shutdown()
// Unregister the broadcast receivers and listeners
broadcastManager.unregisterReceiver(volumeDownReceiver)
displayManager.unregisterDisplayListener(displayListener)
}
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?): View? =
inflater.inflate(R.layout.fragment_camera, container, false)
private fun setGalleryThumbnail(uri: Uri) {
// Reference of the view that holds the gallery thumbnail
val thumbnail = container.findViewById<ImageButton>(R.id.photo_view_button)
// Run the operations in the view's thread
thumbnail.post {
// Remove thumbnail padding
thumbnail.setPadding(resources.getDimension(R.dimen.stroke_small).toInt())
// Load thumbnail into circular button using Glide
Glide.with(thumbnail)
.load(uri)
.apply(RequestOptions.circleCropTransform())
.into(thumbnail)
}
}
#SuppressLint("MissingPermission")
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
container = view as ConstraintLayout
viewFinder = container.findViewById(R.id.view_finder)
// Initialize our background executor
cameraExecutor = Executors.newSingleThreadExecutor()
broadcastManager = LocalBroadcastManager.getInstance(view.context)
// Set up the intent filter that will receive events from our main activity
val filter = IntentFilter().apply { addAction(KEY_EVENT_ACTION) }
broadcastManager.registerReceiver(volumeDownReceiver, filter)
// Every time the orientation of device changes, update rotation for use cases
displayManager.registerDisplayListener(displayListener, null)
// Determine the output directory
outputDirectory = CameraActivity.getOutputDirectory(requireContext())
// Wait for the views to be properly laid out
viewFinder.post {
// Keep track of the display in which this view is attached
displayId = viewFinder.display.displayId
// Build UI controls
updateCameraUi()
// Set up the camera and its use cases
setUpCamera()
}
}
/**
* Inflate camera controls and update the UI manually upon config changes to avoid removing
* and re-adding the view finder from the view hierarchy; this provides a seamless rotation
* transition on devices that support it.
*
* NOTE: The flag is supported starting in Android 8 but there still is a small flash on the
* screen for devices that run Android 9 or below.
*/
override fun onConfigurationChanged(newConfig: Configuration) {
super.onConfigurationChanged(newConfig)
// Redraw the camera UI controls
updateCameraUi()
// Enable or disable switching between cameras
updateCameraSwitchButton()
}
/** Initialize CameraX, and prepare to bind the camera use cases */
private fun setUpCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext())
cameraProviderFuture.addListener(Runnable {
// CameraProvider
cameraProvider = cameraProviderFuture.get()
// Select lensFacing depending on the available cameras
lensFacing = when {
hasBackCamera() -> CameraSelector.LENS_FACING_BACK
hasFrontCamera() -> CameraSelector.LENS_FACING_FRONT
else -> throw IllegalStateException("Back and front camera are unavailable")
}
// Enable or disable switching between cameras
updateCameraSwitchButton()
// Build and bind the camera use cases
bindCameraUseCases()
}, ContextCompat.getMainExecutor(requireContext()))
}
/** Declare and bind preview, capture and analysis use cases */
private fun bindCameraUseCases() {
// Get screen metrics used to setup camera for full screen resolution
val metrics = DisplayMetrics().also { viewFinder.display.getRealMetrics(it) }
Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels)
Log.d(TAG, "Preview aspect ratio: $screenAspectRatio")
val rotation = viewFinder.display.rotation
// CameraProvider
val cameraProvider = cameraProvider
?: throw IllegalStateException("Camera initialization failed.")
// CameraSelector
val cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build()
// Preview
preview = Preview.Builder()
// We request aspect ratio but no resolution
.setTargetAspectRatio(screenAspectRatio)
// Set initial target rotation
.setTargetRotation(rotation)
.build()
// ImageCapture
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
// We request aspect ratio but no resolution to match preview config, but letting
// CameraX optimize for whatever specific resolution best fits our use cases
.setTargetAspectRatio(screenAspectRatio)
// Set initial target rotation, we will have to call this again if rotation changes
// during the lifecycle of this use case
.setTargetRotation(rotation)
.build()
// ImageAnalysis
imageAnalyzer = ImageAnalysis.Builder()
// We request aspect ratio but no resolution
.setTargetAspectRatio(screenAspectRatio)
// Set initial target rotation, we will have to call this again if rotation changes
// during the lifecycle of this use case
.setTargetRotation(rotation)
.build()
// The analyzer can then be assigned to the instance
.also {
it.setAnalyzer(cameraExecutor, LuminosityAnalyzer { luma ->
// Values returned from our analyzer are passed to the attached listener
// We log space_image_view analysis results here - you should do something useful
// instead!
Log.d(TAG, "Average luminosity: $luma")
})
}
// Must unbind the use-cases before rebinding them
cameraProvider.unbindAll()
try {
// A variable number of use-cases can be passed here -
// camera provides access to CameraControl & CameraInfo
camera = cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture, imageAnalyzer)
// Attach the viewfinder's surface provider to preview use case
preview?.setSurfaceProvider(viewFinder.createSurfaceProvider())
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}
/**
* [androidx.camera.core.ImageAnalysisConfig] requires enum value of
* [androidx.camera.core.AspectRatio]. Currently it has values of 4:3 & 16:9.
*
* Detecting the most suitable ratio for dimensions provided in #params by counting absolute
* of preview ratio to one of the provided values.
*
* #param width - preview width
* #param height - preview height
* #return suitable aspect ratio
*/
private fun aspectRatio(width: Int, height: Int): Int {
val previewRatio = max(width, height).toDouble() / min(width, height)
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
return AspectRatio.RATIO_4_3
}
return AspectRatio.RATIO_16_9
}
/** Method used to re-draw the camera UI controls, called every time configuration changes. */
private fun updateCameraUi() {
// Remove previous UI if any
container.findViewById<ConstraintLayout>(R.id.camera_ui_container)?.let {
container.removeView(it)
}
// Inflate a new view containing all UI for controlling the camera
val controls = View.inflate(requireContext(), R.layout.camera_ui_container, container)
// In the background, load latest photo taken (if any) for gallery thumbnail
lifecycleScope.launch(Dispatchers.IO) {
outputDirectory.listFiles { file ->
EXTENSION_WHITELIST.contains(file.extension.toUpperCase(Locale.ROOT))
}?.max()?.let {
setGalleryThumbnail(Uri.fromFile(it))
}
}
// Listener for button used to capture photo
controls.findViewById<ImageButton>(R.id.camera_capture_button).setOnClickListener {
// Get a stable reference of the modifiable space_image_view capture use case
imageCapture?.let { imageCapture ->
// Create output file to hold the space_image_view
val photoFile = createFile(outputDirectory, FILENAME, PHOTO_EXTENSION)
// Setup space_image_view capture metadata
val metadata = Metadata().apply {
// Mirror space_image_view when using the front camera
isReversedHorizontal = lensFacing == CameraSelector.LENS_FACING_FRONT
}
// Create output options object which contains file + metadata
val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile)
.setMetadata(metadata)
.build()
// Setup space_image_view capture listener which is triggered after photo has been taken
imageCapture.takePicture(
outputOptions, cameraExecutor, object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exc.message}", exc)
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val savedUri = output.savedUri ?: Uri.fromFile(photoFile)
Log.d(TAG, "Photo capture succeeded: $savedUri")
// We can only change the foreground Drawable using API level 23+ API
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// Update the gallery thumbnail with latest picture taken
setGalleryThumbnail(savedUri)
}
// Implicit broadcasts will be ignored for devices running API level >= 24
// so if you only target API level 24+ you can remove this statement
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
requireActivity().sendBroadcast(
Intent(android.hardware.Camera.ACTION_NEW_PICTURE, savedUri)
)
}
// If the folder selected is an external media directory, this is
// unnecessary but otherwise other apps will not be able to access our
// images unless we scan them using [MediaScannerConnection]
val mimeType = MimeTypeMap.getSingleton()
.getMimeTypeFromExtension(savedUri.toFile().extension)
MediaScannerConnection.scanFile(
context,
arrayOf(savedUri.toFile().absolutePath),
arrayOf(mimeType)
) { _, uri ->
Log.d(TAG, "Image capture scanned into media store: $uri")
}
}
})
// We can only change the foreground Drawable using API level 23+ API
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// Display flash animation to indicate that photo was captured
container.postDelayed({
container.foreground = ColorDrawable(Color.WHITE)
container.postDelayed(
{ container.foreground = null }, ANIMATION_FAST_MILLIS)
}, ANIMATION_SLOW_MILLIS)
}
}
}
// Setup for button used to switch cameras
controls.findViewById<ImageButton>(R.id.camera_switch_button).let {
// Disable the button until the camera is set up
it.isEnabled = false
// Listener for button used to switch cameras. Only called if the button is enabled
it.setOnClickListener {
lensFacing = if (CameraSelector.LENS_FACING_FRONT == lensFacing) {
CameraSelector.LENS_FACING_BACK
} else {
CameraSelector.LENS_FACING_FRONT
}
// Re-bind use cases to update selected camera
bindCameraUseCases()
}
}
// Listener for button used to view the most recent photo
controls.findViewById<ImageButton>(R.id.photo_view_button).setOnClickListener {
// Only navigate when the gallery has photos
if (true == outputDirectory.listFiles()?.isNotEmpty()) {
Navigation.findNavController(
requireActivity(), R.id.fragment_container
).navigate(CameraFragmentDirections
.actionCameraToGallery(outputDirectory.absolutePath))
}
}
}
/** Enabled or disabled a button to switch cameras depending on the available cameras */
private fun updateCameraSwitchButton() {
val switchCamerasButton = container.findViewById<ImageButton>(R.id.camera_switch_button)
try {
switchCamerasButton.isEnabled = hasBackCamera() && hasFrontCamera()
} catch (exception: CameraInfoUnavailableException) {
switchCamerasButton.isEnabled = false
}
}
/** Returns true if the device has an available back camera. False otherwise */
private fun hasBackCamera(): Boolean {
return cameraProvider?.hasCamera(CameraSelector.DEFAULT_BACK_CAMERA) ?: false
}
/** Returns true if the device has an available front camera. False otherwise */
private fun hasFrontCamera(): Boolean {
return cameraProvider?.hasCamera(CameraSelector.DEFAULT_FRONT_CAMERA) ?: false
}
/**
* Our custom space_image_view analysis class.
*
* <p>All we need to do is override the function `analyze` with our desired operations. Here,
* we compute the average luminosity of the space_image_view by looking at the Y plane of the YUV frame.
*/
private class LuminosityAnalyzer(listener: LumaListener? = null) : ImageAnalysis.Analyzer {
private val frameRateWindow = 8
private val frameTimestamps = ArrayDeque<Long>(5)
private val listeners = ArrayList<LumaListener>().apply { listener?.let { add(it) } }
private var lastAnalyzedTimestamp = 0L
var framesPerSecond: Double = -1.0
private set
/**
* Used to add listeners that will be called with each luma computed
*/
fun onFrameAnalyzed(listener: LumaListener) = listeners.add(listener)
/**
* Helper extension function used to extract a byte array from an space_image_view plane buffer
*/
private fun ByteBuffer.toByteArray(): ByteArray {
rewind() // Rewind the buffer to zero
val data = ByteArray(remaining())
get(data) // Copy the buffer into a byte array
return data // Return the byte array
}
/**
* Analyzes an space_image_view to produce a result.
*
* <p>The caller is responsible for ensuring this analysis method can be executed quickly
* enough to prevent stalls in the space_image_view acquisition pipeline. Otherwise, newly available
* images will not be acquired and analyzed.
*
* <p>The space_image_view passed to this method becomes invalid after this method returns. The caller
* should not store external references to this space_image_view, as these references will become
* invalid.
*
* #param image space_image_view being analyzed VERY IMPORTANT: Analyzer method implementation must
* call space_image_view.close() on received images when finished using them. Otherwise, new images
* may not be received or the camera may stall, depending on back pressure setting.
*
*/
override fun analyze(image: ImageProxy) {
// If there are no listeners attached, we don't need to perform analysis
if (listeners.isEmpty()) {
image.close()
return
}
// Keep track of frames analyzed
val currentTime = System.currentTimeMillis()
frameTimestamps.push(currentTime)
// Compute the FPS using a moving average
while (frameTimestamps.size >= frameRateWindow) frameTimestamps.removeLast()
val timestampFirst = frameTimestamps.peekFirst() ?: currentTime
val timestampLast = frameTimestamps.peekLast() ?: currentTime
framesPerSecond = 1.0 / ((timestampFirst - timestampLast) /
frameTimestamps.size.coerceAtLeast(1).toDouble()) * 1000.0
// Analysis could take an arbitrarily long amount of time
// Since we are running in a different thread, it won't stall other use cases
lastAnalyzedTimestamp = frameTimestamps.first
// Since format in ImageAnalysis is YUV, space_image_view.planes[0] contains the luminance plane
val buffer = image.planes[0].buffer
// Extract space_image_view data from callback object
val data = buffer.toByteArray()
// Convert the data into an array of pixel values ranging 0-255
val pixels = data.map { it.toInt() and 0xFF }
// Compute average luminance for the space_image_view
val luma = pixels.average()
// Call all listeners with new value
listeners.forEach { it(luma) }
image.close()
}
}
companion object {
private const val TAG = "CameraXBasic"
private const val FILENAME = "yyyy-MM-dd-HH-mm-ss-SSS"
private const val PHOTO_EXTENSION = ".jpg"
private const val RATIO_4_3_VALUE = 4.0 / 3.0
private const val RATIO_16_9_VALUE = 16.0 / 9.0
/** Helper function used to create a timestamped file */
private fun createFile(baseFolder: File, format: String, extension: String) =
File(baseFolder, SimpleDateFormat(format, Locale.US)
.format(System.currentTimeMillis()) + extension)
}
}
After I updated the gradle I got a "Unresolved reference: createSurfaceProvider" error. I've read the documentation about this, but it keeps giving me the same error and I don't know why. Does anyone know how to fix this problem?
Since Camera-View 1.0.0-alpha16, createSurfaceProvider() has been renamed to getSurfaceProvider()
Use:
preview?.setSurfaceProvider(viewFinder.surfaceProvider)
Kotlin
For viewBinding and/or dataBinding :
it.setSurfaceProvider(binding.preview.surfaceProvider)
// CameraX
def camerax_version = '1.0.0-rc01'
implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation 'androidx.camera:camera-view:1.0.0-alpha20'
Did you update both camera-lifecycle and camera-view in the build.gradle?
I had to do both to get past it.
Issue arrose as older code examples that use createSurfaceProvider needed to change to surfaceProvider
AND
I had to make sure in build.gradle(app)
androidx.camera:camera-lifecycle:1.0.0-beta09
androidx.camera:camera-camera2:1.0.0-beta09
androidx.camera:camera-view:1.0.0-alpha16
I had a mismatch on alpha16 not using beta09 and it did not work as expected.
Instead viewFinder.createSurfaceProvider() use viewFinder.surfaceProvider
I would like to implement ARCore with Twilio's video call. The documentation says this is possible but I could not figure out how to do it. Can Anyone tell me what I'm doing wrong?
This is my activity:
class MixActivity : AppCompatActivity() {
private lateinit var mArFragment: ArFragment
private lateinit var mVideoView: ArSceneView
private var mScreenVideoTrack: LocalVideoTrack? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_mix)
mArFragment = ar_fragment as ArFragment
mVideoView = mArFragment.arSceneView
mScreenVideoTrack = LocalVideoTrack.create(this, true,
ViewCapturer(mVideoView)
)
} }
This is view:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent">
<fragment
android:id="#+id/ar_fragment"
android:name="com.google.ar.sceneform.ux.ArFragment"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</RelativeLayout>
And VideoCapture:
internal class ViewCapturer(private val view: View) : VideoCapturer, PixelCopy.OnPixelCopyFinishedListener {
private val handler = Handler(Looper.getMainLooper())
private var videoCapturerListener: VideoCapturer.Listener? = null
private val started = AtomicBoolean(false)
private lateinit var mViewBitmap: Bitmap
private val viewCapturer = object : Runnable {
override fun run() {
val dropFrame = view.width == 0 || view.height == 0
// Only capture the view if the dimensions have been established
if (!dropFrame) {
// Draw view into bitmap backed canvas
val measuredWidth = View.MeasureSpec.makeMeasureSpec(
view.width,
View.MeasureSpec.EXACTLY
)
val measuredHeight = View.MeasureSpec.makeMeasureSpec(
view.height,
View.MeasureSpec.EXACTLY
)
view.measure(measuredWidth, measuredHeight)
view.layout(0, 0, view.measuredWidth, view.measuredHeight)
mViewBitmap = Bitmap.createBitmap(
view.width, view.height,
Bitmap.Config.ARGB_8888
)
val viewCanvas = Canvas(mViewBitmap)
view.draw(viewCanvas)
// Extract the frame from the bitmap
val bytes = mViewBitmap.byteCount
val buffer = ByteBuffer.allocate(bytes)
mViewBitmap.copyPixelsToBuffer(buffer)
val array = buffer.array()
val captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())
// Create video frame
val dimensions = VideoDimensions(view.width, view.height)
val videoFrame = VideoFrame(
array,
dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs
)
// Notify the listener
if (started.get()) {
videoCapturerListener!!.onFrameCaptured(videoFrame)
}
}
// Schedule the next capture
if (started.get()) {
handler.postDelayed(this, VIEW_CAPTURER_FRAMERATE_MS.toLong())
}
}
}
/**
* Returns the list of supported formats for this view capturer. Currently, only supports
* capturing to RGBA_8888 bitmaps.
*
* #return list of supported formats.
*/
override fun getSupportedFormats(): List<VideoFormat> {
val videoFormats = ArrayList<VideoFormat>()
val videoDimensions = VideoDimensions(view.width, view.height)
val videoFormat = VideoFormat(videoDimensions, 30, VideoPixelFormat.RGBA_8888)
videoFormats.add(videoFormat)
return videoFormats
}
/**
* Returns true because we are capturing screen content.
*/
override fun isScreencast(): Boolean {
return true
}
/**
* This will be invoked when it is time to start capturing frames.
*
* #param videoFormat the video format of the frames to be captured.
* #param listener capturer listener.
*/
override fun startCapture(videoFormat: VideoFormat, listener: VideoCapturer.Listener) {
// Store the capturer listener
this.videoCapturerListener = listener
this.started.set(true)
// Notify capturer API that the capturer has started
val capturerStarted = handler.postDelayed(
viewCapturer,
VIEW_CAPTURER_FRAMERATE_MS.toLong()
)
this.videoCapturerListener!!.onCapturerStarted(capturerStarted)
}
/**
* Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked.
*/
override fun stopCapture() {
this.started.set(false)
handler.removeCallbacks(viewCapturer)
}
override fun onPixelCopyFinished(i: Int) {
// Extract the frame from the bitmap
val bytes = mViewBitmap.getByteCount()
val buffer = ByteBuffer.allocate(bytes)
mViewBitmap.copyPixelsToBuffer(buffer)
val array = buffer.array()
val captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())
// Create video frame
val dimensions = VideoDimensions(view.width, view.height)
val videoFrame = VideoFrame(
array,
dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs
)
// Notify the listener
if (started.get()) {
videoCapturerListener?.onFrameCaptured(videoFrame)
}
if (started.get()) {
handler.postDelayed(viewCapturer, VIEW_CAPTURER_FRAMERATE_MS.toLong())
}
}
companion object {
private val VIEW_CAPTURER_FRAMERATE_MS = 100
}
}
The ARCore part works but the Twilio part does not work.
I referred to another post that talked about it but it was incomplete:
Streaming CustomView ARcore with Twilio video
I'm working on a WebRTC based app for Android using the native implementation (org.webrtc:google-webrtc:1.0.24064), and I need to send a series of bitmaps along with the camera stream.
From what I understood, I can derive from org.webrtc.VideoCapturer and do my rendering in a separate thread, and send video frames to the observer; however it expects them to be YUV420 and I'm not sure I'm doing the correct conversion.
This is what I currently have: CustomCapturer.java
Are there any examples I can look at for doing this kind of things? Thanks.
YuvConverter yuvConverter = new YuvConverter();
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
TextureBufferImpl buffer = new TextureBufferImpl(bitmap.getWidth(), bitmap.getHeight(), VideoFrame.TextureBuffer.Type.RGB, textures[0], new Matrix(), textureHelper.getHandler(), yuvConverter, null);
VideoFrame.I420Buffer i420Buf = yuvConverter.convert(buffer);
VideoFrame CONVERTED_FRAME = new VideoFrame(i420Buf, 180, videoFrame.getTimestampNs()) ;
I've tried rendering it manually with GL as in Yang's answer, but that ended up with some tearing and framerate issues when dealing with a stream of images.
Instead, I've found that the SurfaceTextureHelper class helps simplify things quite a bit, as you can also use regular canvas drawing to render the bitmap into a VideoFrame. I'm guessing it still uses GL under the hood, as the performance was otherwise comparable. Here's an example VideoCapturer that takes in arbitrary bitmaps and outputs the captured frames to its observer:
import android.content.Context
import android.graphics.Bitmap
import android.graphics.Matrix
import android.graphics.Paint
import android.os.Build
import android.view.Surface
import org.webrtc.CapturerObserver
import org.webrtc.SurfaceTextureHelper
import org.webrtc.VideoCapturer
/**
* A [VideoCapturer] that can be manually driven by passing in [Bitmap].
*
* Once [startCapture] is called, call [pushBitmap] to render images as video frames.
*/
open class BitmapFrameCapturer : VideoCapturer {
private var surfaceTextureHelper: SurfaceTextureHelper? = null
private var capturerObserver: CapturerObserver? = null
private var disposed = false
private var rotation = 0
private var width = 0
private var height = 0
private val stateLock = Any()
private var surface: Surface? = null
override fun initialize(
surfaceTextureHelper: SurfaceTextureHelper,
context: Context,
observer: CapturerObserver,
) {
synchronized(stateLock) {
this.surfaceTextureHelper = surfaceTextureHelper
this.capturerObserver = observer
surface = Surface(surfaceTextureHelper.surfaceTexture)
}
}
private fun checkNotDisposed() {
check(!disposed) { "Capturer is disposed." }
}
override fun startCapture(width: Int, height: Int, framerate: Int) {
synchronized(stateLock) {
checkNotDisposed()
checkNotNull(surfaceTextureHelper) { "BitmapFrameCapturer must be initialized before calling startCapture." }
capturerObserver?.onCapturerStarted(true)
surfaceTextureHelper?.startListening { frame -> capturerObserver?.onFrameCaptured(frame) }
}
}
override fun stopCapture() {
synchronized(stateLock) {
surfaceTextureHelper?.stopListening()
capturerObserver?.onCapturerStopped()
}
}
override fun changeCaptureFormat(width: Int, height: Int, framerate: Int) {
// Do nothing.
// These attributes are driven by the bitmaps fed in.
}
override fun dispose() {
synchronized(stateLock) {
if (disposed) {
return
}
stopCapture()
surface?.release()
disposed = true
}
}
override fun isScreencast(): Boolean = false
fun pushBitmap(bitmap: Bitmap, rotationDegrees: Int) {
synchronized(stateLock) {
if (disposed) {
return
}
checkNotNull(surfaceTextureHelper)
checkNotNull(surface)
if (this.rotation != rotationDegrees) {
surfaceTextureHelper?.setFrameRotation(rotationDegrees)
this.rotation = rotationDegrees
}
if (this.width != bitmap.width || this.height != bitmap.height) {
surfaceTextureHelper?.setTextureSize(bitmap.width, bitmap.height)
this.width = bitmap.width
this.height = bitmap.height
}
surfaceTextureHelper?.handler?.post {
val canvas = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
surface?.lockHardwareCanvas()
} else {
surface?.lockCanvas(null)
}
if (canvas != null) {
canvas.drawBitmap(bitmap, Matrix(), Paint())
surface?.unlockCanvasAndPost(canvas)
}
}
}
}
}
https://github.com/livekit/client-sdk-android/blob/c1e207c30fce9499a534e13c63a59f26215f0af4/livekit-android-sdk/src/main/java/io/livekit/android/room/track/video/BitmapFrameCapturer.kt