I've succesfully capture image with CameraX into JPEG files. The problem is the file size is big. On an Android powered walki talkie (Android 11), the result is typically 4 to 6 MB, so I'd like to resize it.
Here's my code:
fun takePhoto() {
val FILENAME_FORMAT = "ddMMyyyy_HHmmss"
val capturedContentValues = ContentValues()
capturedContentValues.put(MediaStore.MediaColumns.DISPLAY_NAME,
"CARAKA_"+SimpleDateFormat(FILENAME_FORMAT, Locale.US).format(System.currentTimeMillis()))
capturedContentValues.put(MediaStore.MediaColumns.RELATIVE_PATH, Environment.DIRECTORY_DCIM+"/TESTAPP")
capturedContentValues.put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
val outputOptions= ImageCapture.OutputFileOptions.Builder(
context.contentResolver,
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
capturedContentValues
).build()
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(context),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Toast.makeText(context,"Photo capture failed: ${exc.message}", Toast.LENGTH_SHORT).show()
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val theFile = getFile(context, output.savedUri!!)
val capturedBitmap = BitmapFactory.decodeFile(theFile!!.absolutePath)
val resizedBitmap = getResizedBitmap(capturedBitmap, 1024)
val fout = FileOutputStream(theFile.absolutePath)
resizedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, fout)
fout.flush()
fout.close()
}
})
}
fun getResizedBitmap(image: Bitmap, maxSize: Int): Bitmap {
var width = image.width
var height = image.height
val bitmapRatio = width.toFloat() / height.toFloat()
if (bitmapRatio > 1) {
width = maxSize
height = (width / bitmapRatio).toInt()
} else {
height = maxSize
width = (height * bitmapRatio).toInt()
}
return Bitmap.createScaledBitmap(image, width, height, true)
}
#Throws(IOException::class)
fun getFile(context: Context, uri: Uri): File? {
val destinationFilename =
File(context.filesDir.path + File.separatorChar + queryName(context, uri))
try {
context.contentResolver.openInputStream(uri).use { ins ->
createFileFromStream(
ins!!,
destinationFilename
)
}
} catch (ex: java.lang.Exception) {
Log.e("Save File", ex.message!!)
ex.printStackTrace()
}
return destinationFilename
}
fun createFileFromStream(ins: InputStream, destination: File?) {
try {
FileOutputStream(destination).use { os ->
val buffer = ByteArray(4096)
var length: Int
while (ins.read(buffer).also { length = it } > 0) {
os.write(buffer, 0, length)
}
os.flush()
}
} catch (ex: java.lang.Exception) {
Log.e("Save File", ex.message!!)
ex.printStackTrace()
}
}
private fun queryName(context: Context, uri: Uri): String {
val returnCursor: Cursor = context.contentResolver.query(uri, null, null, null, null)!!
val nameIndex: Int = returnCursor.getColumnIndex(OpenableColumns.DISPLAY_NAME)
returnCursor.moveToFirst()
val name: String = returnCursor.getString(nameIndex)
returnCursor.close()
return name
}
Those saved JPEGs size is still within 4 to 6 MB. Not reduced into hundreds KB. What's wrong here?
Instead of resizing it afterwards, another thing you can try is taking a smaller picture. You can set the target resolution to be lower: https://developer.android.com/reference/androidx/camera/core/ImageCapture.Builder#setTargetResolution(android.util.Size)
Related
Why I can't get pictureLatitude,pictureDateTime,pictureLatitude,pictureDateTime value?
They all null?
But I can get outputFileResults,byteArrayInputStream values.
Log.d(TAG, "filePath: $filePath") print=>filePath:
/external/images/media/1000000030 . But at my real mobile phone, the
photo's path is storage/emulated/0/Pictures/CameraX-Image.
====================================================================
==========================================
private fun takePhoto() {
val imageCapture2 = imageCapture1 ?: return
val name =
SimpleDateFormat(FILENAME_FORMAT, Locale.TAIWAN).format(System.currentTimeMillis())
val contentValues = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, name)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
put(MediaStore.Images.Media.RELATIVE_PATH, "Pictures/CameraX-Image")
}
}
val outputFileOptions = ImageCapture.OutputFileOptions.Builder(
context?.contentResolver!!,
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
contentValues
).build()
imageCapture2.takePicture(
outputFileOptions,
ContextCompat.getMainExecutor(requireContext()),
object : ImageCapture.OnImageSavedCallback {
#RequiresApi(Build.VERSION_CODES.Q)
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
val msg = "Photo capture succeeded: ${outputFileResults.savedUri}"
Toast.makeText(requireContext(), msg, Toast.LENGTH_SHORT).show()
Log.d(TAG, "onImageSaved: $msg")
val filePath = outputFileResults.savedUri?.path ?: return
Log.d(TAG, "filePath: $filePath")
val originalUri =
MediaStore.setRequireOriginal(outputFileResults.savedUri!!)
Log.d(TAG, "originalUri: $originalUri")
val byteArrayInputStream =
outputFileResults.savedUri?.path?.byteInputStream() ?: return
Log.d(TAG, "byteArrayInputStream:$byteArrayInputStream ")
val exif = ExifInterface(byteArrayInputStream)
val pictureLatitude = exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE)
val pictureDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME)
val latLong = exif.latLong
val altitude = exif.getAltitude(0.0)
Log.d(TAG, "pictureLatitude: $pictureLatitude")
Log.d(TAG, "pictureDateTime: $pictureDateTime")
Log.d(TAG, "latLong: $latLong")
Log.d(TAG, "altitude: $altitude")
println(pictureLatitude)
}
override fun onError(exception: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exception.message}", exception)
}
})
}
I want to take photo 0.1 seconds-one photo. Then, I can get 10 pictures in 1 seconds.
but, I implement takePhoto() preference cameraX in android developer site.
It is working but not expected.
Because takePhoto() method is called 0.1 seconds but callback listener is more time because of making bitmap, saving file(I guess).
so, I want to implement fast burst(or continuous shooting)camera mode.
However, it is hard to find references.
please help me.
[Call takePhoto()]
binding.imageCaptureButton.setOnClickListener {
val thread = Thread(Runnable {
var overtime = (System.currentTimeMillis() - pressedTime).toInt()
while(!isCaptureDone) {
try {
Log.d(TAG, "Thread: picNum = $picNum")
Log.d(TAG, "Thread: isCaptureDone = $isCaptureDone")
takePhoto()
Thread.sleep(shootMills) // 0.1seconds
overtime = (System.currentTimeMillis() - pressedTime).toInt()
if(overtime >= limitTime || picNum >= maxPicNum) {
pressedTime = 0
isCaptureDone = true
val msg = if(overtime >= limitTime) "over ${limitTime/1000} seconds" else "Take picture over {maxPicNum}"
Log.d(TAG, "onCreate: $msg")
runOnUiThread { alertDialog(msg) }
return#Runnable
}
} catch (e: InterruptedException) {
Log.e(TAG, "Thread Error : ${e.printStackTrace()}")
}
}
})
}
[takePhoto()]
private fun takePhoto() {
// Get a stable reference of the modifiable image capture use case
val imageCapture = cameraManager.getImageCapture() ?: return
// Create time stamped name and MediaStore entry.
val name = SimpleDateFormat(FILENAME_FORMAT, Locale.US)
.format(System.currentTimeMillis())
val contentValues = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, name)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if(Build.VERSION.SDK_INT > Build.VERSION_CODES.P) {
put(MediaStore.Images.Media.RELATIVE_PATH, "Pictures/CameraX-Image")
}
}
// Create output options object which contains file + metadata
val outputOptions = if(saveInGallery) ImageCapture.OutputFileOptions
.Builder(contentResolver,
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
contentValues)
.build()
else {
val dir = File(filePath)
if(!dir.exists()) dir.mkdirs()
val fileData = File(filePath, name)
ImageCapture.OutputFileOptions.Builder(fileData).build()
}
//////////// v 1 ////////////////
if(firstSol && !saveInGallery) {
imageCapture.takePicture(ContextCompat.getMainExecutor(this), object :
ImageCapture.OnImageCapturedCallback() {
override fun onCaptureSuccess(image: ImageProxy) {
val bitmap = imageProxyToBitmap(image)
val file1 = convertBitmapToFile(bitmap, name + "__1") // 80.7 KB
val options = BitmapFactory.Options()
options.inSampleSize = 2
val bitmap3 = BitmapFactory.decodeFile(file1.absolutePath, options)
val file3 = convertBitmapToFile(bitmap3, name + "__3") // 7 KB
Log.d(TAG, "onCaptureSuccess: picNum = $picNum")
Log.d(TAG, "onCaptureSuccess: isCaptureDone = $isCaptureDone")
picNum += 2
image.close()
if (isCaptureDone) {
Log.d(TAG, "onCaptureSuccess: isCaptureDone")
cameraManager.stopCamera()
Log.d(TAG, "onCaptureSuccess: stopCamera")
return
}
super.onCaptureSuccess(image)
}
override fun onError(exception: ImageCaptureException) {
super.onError(exception)
Log.d(TAG, "onError: get Bitmap error: " + exception.message)
}
})
} else {
//////////// Solve 2 ////////////////
// // Set up image capture listener, which is triggered after photo has
// // been taken
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(this),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exc.message}", exc)
}
override fun
onImageSaved(output: ImageCapture.OutputFileResults) {
picNum += 1
if (isCaptureDone) {
Log.d(TAG, "onCaptureSuccess: isCaptureDone")
cameraManager.stopCamera()
Log.d(TAG, "onCaptureSuccess: stopCamera")
return
}
}
}
)
}
}
I am trying to save video, created from custom application, to the specific folder inside DCIM folder, side by side to original camera folder.
private fun recordVideo() {
val intent = Intent(MediaStore.ACTION_VIDEO_CAPTURE)
takeVideo.launch(intent)
}
private val takeVideo = registerForActivityResult(
ActivityResultContracts.StartActivityForResult()
) {
if (it.resultCode == Activity.RESULT_OK) {
Log.i(
"VIDEO_RECORD_TAG ", "Video is available at ${
it.data?.data?.let { it1 -> getRealPathFromURI(it1) }
}"
)
saveMediaFile(
it.data?.data?.let { it1 -> getRealPathFromURI(it1) }, "MyVideoName"
)
}
}
private fun saveMediaFile(filePath: String?, fileName: String) {
filePath?.let {
val values = ContentValues().apply {
put(MediaStore.Video.Media.DISPLAY_NAME, fileName)
val mExtension = MimeTypeMap.getFileExtensionFromUrl(filePath)
put(MediaStore.Video.Media.MIME_TYPE, MimeTypeMap.getSingleton().getMimeTypeFromExtension(mExtension))
put(MediaStore.Video.Media.RELATIVE_PATH, Environment.DIRECTORY_MOVIES + "/${getString(R.string.app_name)}/")
put(MediaStore.Video.Media.IS_PENDING, 1)
}
val fileUri = contentResolver.insert(
MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY),
values
)
fileUri?.let {
contentResolver.openFileDescriptor(fileUri, "w").use { descriptor ->
descriptor?.let {
FileOutputStream(descriptor.fileDescriptor).use { out ->
val videoFile = File(filePath)
FileInputStream(videoFile).use { inputStream ->
val buf = ByteArray(8192)
while (true) {
val sz = inputStream.read(buf)
if (sz <= 0) break
out.write(buf, 0, sz)
}
}
}
}
}
values.clear()
values.put(MediaStore.Video.Media.IS_PENDING, 0)
contentResolver.update(fileUri, values, null, null)
}
}
}
private fun getRealPathFromURI(contentUri: Uri): String? {
var cursor: Cursor? = null
return try {
val proj = arrayOf(MediaStore.Images.Media.DATA)
cursor = contentResolver.query(contentUri, proj, null, null, null)
val columnIndex = cursor?.getColumnIndexOrThrow(MediaStore.Images.Media.DATA)
cursor?.moveToFirst()
columnIndex?.let { cursor?.getString(it) }
} catch (e: Exception) {
Log.e("TAG", "getRealPathFromURI Exception : $e")
""
} finally {
cursor?.close()
}
}
Using this code, folder is created, video is stored as well, but in camera folder I have the same video with original name.
I would like to have only custom created folder with stored videos.
Any suggestion ?
Thanks
I use MediaCodec.inputSurface and MediaMixuer for recording of my android view. Everything is good on the most of devices, but not on Huawei. For some reason it produces a video which cannot be played on this devices. But for some unknown reason 1 out of 10 times it generates a good video. Here's link to the broken video and to the normal video. It is also weird that both videos can be played on my mac laptop.
Our users reported the issue from multiple huawei models and I can confirm it from my phone: HUAWEI P8 lite 2017, android 7.0. Also it happens on new phones too with any android version.
Here's code of how I manage the recording:
/**
* Stages:
* 1. Draw canvas to bitmap
* 2. Take bitmap pixels and convert them to YUV
* 3. Write bitmap pixels as a frame to MediaCodec
* 4. Take mediaCodec and write to mediaMuxer to receive file
*/
class VideoEncoder(
val width: Int,
val height: Int,
val frameRate: Int,
val file: File,
val durationUs: Long,
val handler: Handler,
val videoRecordingFinished: () -> Unit,
val onError: (MediaCodec.CodecException) -> Unit
) : KoinComponent {
var mediaMuxer: MediaMuxer? = null
var videoCodec: MediaCodec? = null
var videoTrackIndex = 0
var surface: Surface? = null
val videoBufferInfo by lazy { MediaCodec.BufferInfo() }
var writingVideoFinished = false
private var currentFrame = 0
var audioEncoder: AudioEncoder? = null
var writingAudioFinished: Boolean
get() = audioEncoder?.writingAudioFinished ?: true
set(value) {
audioEncoder?.writingAudioFinished = value
}
var videoFormatInited: Boolean = false
val allFormatsInited: Boolean
get() = videoFormatInited && (audioEncoder?.audioFormatInited != false)
private val pendingVEncoderInfos = LinkedList<MediaCodec.BufferInfo>()
private val pendingVEncoderIndices = LinkedList<Int>()
val logger: KLogger by inject {
parametersOf("video-encoder")
}
private fun createVideoFormat(mimeType: String, desiredColorFormat: Int): MediaFormat {
val mediaFormat =
MediaFormat.createVideoFormat(mimeType, width, height)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, ENCODING_VIDEO_BITRATE)
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, this.frameRate)
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, desiredColorFormat)
return mediaFormat
}
private fun findCorrectVideoFormat(): MediaFormat {
val mimeType = POSSIBLE_MIME_TYPES[0]
val desiredColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
val mediaFormat = createVideoFormat(mimeType, desiredColorFormat)
val encoderForFormat =
MediaCodecList(MediaCodecList.REGULAR_CODECS).findEncoderForFormat(mediaFormat)
if (encoderForFormat == null) {
logger.info { "encoderForFormatIsNull!!! width = $width, height = $height" }
videoCodec = MediaCodec.createEncoderByType(mimeType)
} else {
videoCodec = MediaCodec.createByCodecName(encoderForFormat)
}
val codecInfo = videoCodec!!.codecInfo
if (codecInfo.isEncoder && codecInfo.supportedTypes.contains(mimeType) &&
codecInfo.getCapabilitiesForType(mimeType).colorFormats
.contains(desiredColorFormat)
) {
} else {
throw IllegalStateException("MediaCodec is wrong = ${codecInfo}")
}
val errorMessage = checkIsColorFormatSupported(mediaFormat, desiredColorFormat, mimeType)
if (errorMessage != null)
throw IllegalStateException(errorMessage)
return mediaFormat
}
//return error message if false
fun checkIsColorFormatSupported(
mediaFormat: MediaFormat,
desiredColorFormat: Int,
mimeType: String
): String? {
var colorFormats = videoCodec!!.codecInfo.getCapabilitiesForType(mimeType).colorFormats
var colorFormatSize = colorFormats.size
var counterColorFormat = 0
val colorFormatCorrect: Boolean
while (true) {
if (counterColorFormat >= colorFormatSize) {
colorFormatCorrect = false
break
}
if (colorFormats[counterColorFormat] == desiredColorFormat) {
colorFormatCorrect = true
break
}
++counterColorFormat
}
if (!colorFormatCorrect) {
var message = "NO COLOR FORMAT COMPATIBLE\\n$mediaFormat"
colorFormats = videoCodec!!.codecInfo.getCapabilitiesForType(mimeType).colorFormats
colorFormatSize = colorFormats.size
counterColorFormat = 0
while (counterColorFormat < colorFormatSize) {
val sb = StringBuilder()
sb.append(message)
sb.append("\\n")
sb.append(colorFormats[counterColorFormat])
message = sb.toString()
logger.debug { message }
++counterColorFormat
}
return message
}
return null
}
private fun printVideoCodecInfo() {
logger.debug {
val json = JSONObject()
json.put("codec_name", videoCodec!!.name)
json.put("codec_info_name", videoCodec!!.codecInfo.name)
json.put("codec_supported_types", videoCodec!!.codecInfo.supportedTypes)
json.put("output_width", width)
json.put("output_height", height)
json.toString()
}
}
#Throws(Exception::class)
fun initialize(videoAsyncEncoder: Boolean) {
val filePath = file.canonicalPath
val mediaFormat = findCorrectVideoFormat()
printVideoCodecInfo()
if (videoAsyncEncoder) {
videoCodec!!.setCallback(object : MediaCodec.Callback() {
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
pendingVEncoderIndices.add(index)
pendingVEncoderInfos.add(info)
if (allFormatsInited)
checkVideoOutputAvailable()
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
writingVideoFinished = true
e.printDebug()
onError.invoke(e)
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
onVideoFormatChanged(format)
}
}, handler)
}
videoCodec!!.configure(
mediaFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE
)
surface = videoCodec!!.createInputSurface()
mediaMuxer = MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
}
fun initAudio(
path: String,
startTimeUs: Long,
volume: Int,
audioRecordingFinished: () -> Unit
) {
audioEncoder = AudioEncoder(
mediaMuxer!!,
handler,
durationUs,
::checkFormatsInited,
audioRecordingFinished
)
audioEncoder!!.initAudio(path, startTimeUs, volume)
audioEncoder!!.startAudioCodec()
}
fun canWriteAudio() {
audioEncoder?.canWriteAudio()
}
fun getCurrentAudioTime() = audioEncoder?.getCurrentAudioTime()
private fun onVideoFormatChanged(format: MediaFormat) {
videoTrackIndex =
mediaMuxer!!.addTrack(format)
videoFormatInited = true
checkFormatsInited()
}
fun checkFormatsInited() {
if (allFormatsInited) {
mediaMuxer!!.start()
checkVideoOutputAvailable()
}
}
#Throws(IllegalStateException::class)
fun writeToMuxerSyncMode(currentFrame: Int = -1): Boolean {
var success = false
while (videoCodec != null && mediaMuxer != null) {
val outputBufferIndex = videoCodec!!.dequeueOutputBuffer(videoBufferInfo, 0L)
logger.info {
"writeToMuxer, outputBufferIndex = ${outputBufferIndex}, bufferFlag = ${videoBufferInfo.flags}," +
" presentationTime = ${((currentFrame * 1000000L) / frameRate)}," +
" bufferInfo.size ${videoBufferInfo.size}, bufferInfo.offset ${videoBufferInfo.offset}"
}
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
onVideoFormatChanged(videoCodec!!.outputFormat)
} else {
if (outputBufferIndex < 0) {
return success
}
success = true
val bufferInfo = videoBufferInfo
if (bufferInfo.offset >= 0 && bufferInfo.size > 0) {
val outputBuffer = videoCodec!!.getOutputBuffer(outputBufferIndex)!!
outputBuffer.position(this.videoBufferInfo.offset)
outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
if (currentFrame != -1) {
if (videoBufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
success = false
else
bufferInfo.presentationTimeUs = (currentFrame * 1000000L) / frameRate
}
mediaMuxer!!.writeSampleData(
videoTrackIndex,
outputBuffer,
this.videoBufferInfo
)
}
videoCodec!!.releaseOutputBuffer(outputBufferIndex, false)
if (bufferInfo.flags.and(MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
return success
}
}
}
return success
}
private fun onVideoWritingFinished() {
writingVideoFinished = true
videoRecordingFinished.invoke()
}
private fun checkVideoOutputAvailable() {
while (pendingVEncoderIndices.size > 0 &&
pendingVEncoderInfos.size > 0 && videoCodec != null
) {
val index = pendingVEncoderIndices.removeFirst()
val info = pendingVEncoderInfos.removeFirst()
onVideoOutputAvailable(videoCodec!!, index, info)
}
}
private fun onVideoOutputAvailable(codec: MediaCodec, index: Int, info: MediaCodec.BufferInfo) {
if (videoCodec == null)
return
if (info.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
codec.releaseOutputBuffer(index, false)
onVideoWritingFinished()
} else {
val outputBuffer = codec.getOutputBuffer(index)!!
outputBuffer.position(info.offset)
outputBuffer.limit(info.offset + info.size)
info.presentationTimeUs = (currentFrame * 1000000L) / frameRate
if (info.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
currentFrame++
}
logger.info {
"videoOutputAvailable time ${info.presentationTimeUs}, flags ${info.flags}," +
" size ${info.size}, offset ${info.offset}"
}
mediaMuxer!!.writeSampleData(
videoTrackIndex,
outputBuffer, info
)
codec.releaseOutputBuffer(index, false)
}
}
fun startVideoCodec() {
videoCodec?.start()
}
fun stop() {
audioEncoder?.stop()
pendingVEncoderInfos.clear()
pendingVEncoderIndices.clear()
surface?.release()
surface = null
if (videoCodec != null) {
try {
videoCodec?.stop()
} catch (e: IllegalStateException) {
} finally {
videoCodec?.release()
videoCodec = null
}
}
if (mediaMuxer != null) {
try {
mediaMuxer?.release()
} catch (e: IllegalStateException) {
logger.error(e)
} finally {
mediaMuxer = null
}
}
}
fun sendEndOfStreamSurface() {
videoCodec?.signalEndOfInputStream()
if (!ThreadRecord.VIDEO_CODEC_ASYNC) {
onVideoWritingFinished()
}
}
companion object {
const val ENCODING_VIDEO_BITRATE = 12000000
val POSSIBLE_MIME_TYPES = arrayOf("video/avc", "video/hevc", "video/x-vnd.on2.vp8")
}
}
i have created an application which uses firebase push notification to get commands and perform that task.
my app doesn't have any visible activity but a service which continuously working in background.
i have implemented a functionality of taking screenshot using Media Projection Api.
when i get command of Screenshot, app launches ScreenProjectionActivity, took screenshot and finish. but when it gets again command of Screenshot ScreenProjectionActivity doesn't launch again. i dont know what i am doing wrong and where i am doing wrong.
Here is how i am launching from service.
context.startActivity(
Intent(this, ScreenProjectionActivity::class.java)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
)
ScreenProjectionActivity.kt
class ScreenProjectionActivity : Activity()
{
lateinit var context: Context
private var mHandler: Handler? = null
#RequiresApi(Build.VERSION_CODES.KITKAT_WATCH)
override fun onCreate(savedInstanceState: Bundle?)
{
super.onCreate(savedInstanceState)
val tv = TextView(this)
tv.text = ""
setContentView(tv)
context = this
log("onCreate")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
val mgr = getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
startActivityForResult(mgr.createScreenCaptureIntent(), 7575)
// start capture handling thread
object : Thread() {
override fun run() {
Looper.prepare()
mHandler = Handler()
Looper.loop()
}
}.start()
}
}
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == 7575 && resultCode == RESULT_OK)
{
log("if taking screen")
//TakeScreenShot(applicationContext, Handler(Looper.getMainLooper()), resultCode, data).start()
takeScreenShot(resultCode, data)
}
super.onBackPressed()
}
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private fun takeScreenShot(resultCode: Int, data: Intent?)
{
log("takeScreenshot")
SystemClock.sleep(1000)
var flagScreenShot = true
val metrics = DisplayMetrics()
val windowManager = getSystemService(Context.WINDOW_SERVICE) as WindowManager
val mgr = getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
windowManager.defaultDisplay.getMetrics(metrics)
val mMediaProjection = mgr.getMediaProjection(resultCode, data!!)
val imgReader: ImageReader = ImageReader.newInstance(
metrics.widthPixels,
metrics.heightPixels,
PixelFormat.RGBA_8888,
1
)
val onImageAvailableListener =
OnImageAvailableListener {
log("onImageAvailableListener")
val image: Image? = it?.acquireLatestImage()
if (image != null && flagScreenShot)
{
flagScreenShot = false
mMediaProjection?.stop()
log("mMediaProjection Stopped!")
imgReader.setOnImageAvailableListener(null, null)
val mWidth = image.width
val mHeight = image.height
val planes = image.planes
val buffer = planes[0].buffer
val pixelStride = planes[0].pixelStride
val rowStride = planes[0].rowStride
val rowPadding = rowStride - pixelStride * mWidth
val bitmap = Bitmap.createBitmap(
mWidth + rowPadding / pixelStride,
mHeight,
Bitmap.Config.ARGB_8888
)
bitmap.copyPixelsFromBuffer(buffer)
saveImage(bitmap)
}
log("image close")
image?.close()
}
mMediaProjection?.createVirtualDisplay(
"ScreenCapture",
metrics.widthPixels,
metrics.heightPixels,
metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
imgReader.surface,
null,
mHandler
)
imgReader.setOnImageAvailableListener(onImageAvailableListener, mHandler)
}
private fun saveImage(finalBitmap: Bitmap) {
val root: String = Environment.getExternalStorageDirectory().toString()
val myDir = File("$root/saved_images")
myDir.mkdirs()
val timeStamp: String = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.ENGLISH).format(Date())
val fname = "Shutta_$timeStamp.jpg"
val file = File(myDir, fname)
if (file.exists()) file.delete()
try
{
val out = FileOutputStream(file)
finalBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
out.flush()
out.close()
log("Image Saved.")
finish()
} catch (e: Exception) {
log("Image Saved Exception: $e")
}
}
private fun encodeImage(bm: Bitmap): String {
val baos = ByteArrayOutputStream()
bm.compress(Bitmap.CompressFormat.JPEG, 100, baos)
val b = baos.toByteArray()
return Base64.encodeToString(b, Base64.DEFAULT)
}
override fun onDestroy() {
super.onDestroy()
}
}
Please help me out here. thanks
I solved it my self, what i did is startActivity with these flags
applicationContext.startActivity(
Intent(this, ScreenProjectionActivity::class.java)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
)
and in Manifest:
<activity
android:name=".ScreenProjectionActivity"
android:excludeFromRecents="true"
android:theme="#android:style/Theme.Translucent.NoTitleBar.Fullscreen" />