i have created an application which uses firebase push notification to get commands and perform that task.
my app doesn't have any visible activity but a service which continuously working in background.
i have implemented a functionality of taking screenshot using Media Projection Api.
when i get command of Screenshot, app launches ScreenProjectionActivity, took screenshot and finish. but when it gets again command of Screenshot ScreenProjectionActivity doesn't launch again. i dont know what i am doing wrong and where i am doing wrong.
Here is how i am launching from service.
context.startActivity(
Intent(this, ScreenProjectionActivity::class.java)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
)
ScreenProjectionActivity.kt
class ScreenProjectionActivity : Activity()
{
lateinit var context: Context
private var mHandler: Handler? = null
#RequiresApi(Build.VERSION_CODES.KITKAT_WATCH)
override fun onCreate(savedInstanceState: Bundle?)
{
super.onCreate(savedInstanceState)
val tv = TextView(this)
tv.text = ""
setContentView(tv)
context = this
log("onCreate")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
val mgr = getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
startActivityForResult(mgr.createScreenCaptureIntent(), 7575)
// start capture handling thread
object : Thread() {
override fun run() {
Looper.prepare()
mHandler = Handler()
Looper.loop()
}
}.start()
}
}
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == 7575 && resultCode == RESULT_OK)
{
log("if taking screen")
//TakeScreenShot(applicationContext, Handler(Looper.getMainLooper()), resultCode, data).start()
takeScreenShot(resultCode, data)
}
super.onBackPressed()
}
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private fun takeScreenShot(resultCode: Int, data: Intent?)
{
log("takeScreenshot")
SystemClock.sleep(1000)
var flagScreenShot = true
val metrics = DisplayMetrics()
val windowManager = getSystemService(Context.WINDOW_SERVICE) as WindowManager
val mgr = getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
windowManager.defaultDisplay.getMetrics(metrics)
val mMediaProjection = mgr.getMediaProjection(resultCode, data!!)
val imgReader: ImageReader = ImageReader.newInstance(
metrics.widthPixels,
metrics.heightPixels,
PixelFormat.RGBA_8888,
1
)
val onImageAvailableListener =
OnImageAvailableListener {
log("onImageAvailableListener")
val image: Image? = it?.acquireLatestImage()
if (image != null && flagScreenShot)
{
flagScreenShot = false
mMediaProjection?.stop()
log("mMediaProjection Stopped!")
imgReader.setOnImageAvailableListener(null, null)
val mWidth = image.width
val mHeight = image.height
val planes = image.planes
val buffer = planes[0].buffer
val pixelStride = planes[0].pixelStride
val rowStride = planes[0].rowStride
val rowPadding = rowStride - pixelStride * mWidth
val bitmap = Bitmap.createBitmap(
mWidth + rowPadding / pixelStride,
mHeight,
Bitmap.Config.ARGB_8888
)
bitmap.copyPixelsFromBuffer(buffer)
saveImage(bitmap)
}
log("image close")
image?.close()
}
mMediaProjection?.createVirtualDisplay(
"ScreenCapture",
metrics.widthPixels,
metrics.heightPixels,
metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
imgReader.surface,
null,
mHandler
)
imgReader.setOnImageAvailableListener(onImageAvailableListener, mHandler)
}
private fun saveImage(finalBitmap: Bitmap) {
val root: String = Environment.getExternalStorageDirectory().toString()
val myDir = File("$root/saved_images")
myDir.mkdirs()
val timeStamp: String = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.ENGLISH).format(Date())
val fname = "Shutta_$timeStamp.jpg"
val file = File(myDir, fname)
if (file.exists()) file.delete()
try
{
val out = FileOutputStream(file)
finalBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
out.flush()
out.close()
log("Image Saved.")
finish()
} catch (e: Exception) {
log("Image Saved Exception: $e")
}
}
private fun encodeImage(bm: Bitmap): String {
val baos = ByteArrayOutputStream()
bm.compress(Bitmap.CompressFormat.JPEG, 100, baos)
val b = baos.toByteArray()
return Base64.encodeToString(b, Base64.DEFAULT)
}
override fun onDestroy() {
super.onDestroy()
}
}
Please help me out here. thanks
I solved it my self, what i did is startActivity with these flags
applicationContext.startActivity(
Intent(this, ScreenProjectionActivity::class.java)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
)
and in Manifest:
<activity
android:name=".ScreenProjectionActivity"
android:excludeFromRecents="true"
android:theme="#android:style/Theme.Translucent.NoTitleBar.Fullscreen" />
Related
This is my code for class file
class LiveVideoFragment : BaseFragment(), IVLCVout.Callback {
private var libvlc: LibVLC? = null
private lateinit var holder: SurfaceHolder
private lateinit var mMediaPlayer: MediaPlayer
private var mFilePath = "rtsp://192.168.0.1:554/livestream/1"
private var mVideoWidth = 0
private var mVideoHeight = 0
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
return inflater.inflate(R.layout.fragment_rove_r3_live_video, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
showLiveVideoWhenR3Connected()
}
override fun onPause() {
super.onPause()
Log.d("CALLBACKK", "onPause")
Handler(Looper.getMainLooper()).postDelayed({ releasePlayer() }, 200)
}
private fun showLiveVideoWhenR3Connected() {
handleRtsp()
createPlayer(mFilePath)
}
override fun onNewLayout(
vlcVout: IVLCVout?,
width: Int,
height: Int,
visibleWidth: Int,
visibleHeight: Int,
sarNum: Int,
sarDen: Int
) {
if (width * height == 0) return
mVideoWidth = width
mVideoHeight = height
setSize(mVideoWidth, mVideoHeight)
}
override fun onSurfacesCreated(vlcVout: IVLCVout?) {
}
override fun onSurfacesDestroyed(vlcVout: IVLCVout?) {
}
override fun onHardwareAccelerationError(vlcVout: IVLCVout?) {
Handler(Looper.getMainLooper()).postDelayed({ releasePlayer() }, 200)
Toast.makeText(
requireContext(),
R.string.error_with_hardware_acceleration,
Toast.LENGTH_LONG
)
.show()
}
fun createPlayer(path: String) {
try {
releasePlayer()
// TODO: make this more robust, and sync with audio demo
val options = ArrayList<String>()
options.add("--audio-time-stretch") // time stretching
options.add("-vvv") // verbosity
options.add("--rtsp-tcp")
libvlc = LibVLC(context, options)
holder.setKeepScreenOn(true)
// Creating media player
mMediaPlayer = MediaPlayer(libvlc)
mMediaPlayer.setEventListener(mPlayerListener)
// Seting up video output
val vout: IVLCVout = mMediaPlayer.vlcVout
vout.setVideoView(view_surface)
vout.addCallback(this)
vout.attachViews()
val m = Media(libvlc, Uri.parse(path))
val cache = 500
m.addOption(":network-caching=$cache")
m.addOption(":file-caching=$cache")
m.addOption(":live-cacheing=$cache")
m.addOption(":sout-mux-caching=$cache")
m.addOption(":codec=mediacodec,iomx,all")
mMediaPlayer.media = m
mMediaPlayer.play()
val volume: Int = mMediaPlayer.volume
mMediaPlayer.volume = 0
Log.i("TAG", "createPlayerVolume: $volume")
} catch (e: Exception) {
Log.i("TAG", "createPlayer: " + e.localizedMessage)
}
}
private fun releasePlayer() {
if (libvlc == null)
return
mMediaPlayer.stop()
val vout = mMediaPlayer.vlcVout
vout.removeCallback(this)
vout.detachViews()
libvlc?.release()
}
private fun handleRtsp() {
holder = view_surface.holder
}
private val mPlayerListener: MediaPlayer.EventListener = MyPlayerListener(this)
inner class MyPlayerListener(owner: Rove3LiveVideoFragment) : MediaPlayer.EventListener {
private val mOwner: WeakReference<Rove3LiveVideoFragment>
init {
mOwner = WeakReference(owner)
}
override fun onEvent(event: MediaPlayer.Event) {
val player = mOwner.get()
when (event.type) {
MediaPlayer.Event.EndReached -> {
Log.d("MediaPlayerEVENTERRO", "MediaPlayerEndReached")
player?.releasePlayer()
}
MediaPlayer.Event.EncounteredError -> {
player?.releasePlayer()
libvlc?.release()
mMediaPlayer.stop()
mMediaPlayer.pause()
mMediaPlayer.retain()
mMediaPlayer.isSeekable
createPlayer(mFilePath)
val m = Media(libvlc, Uri.parse(mFilePath))
val cache = 1500
// m.addOption(":network-caching=$cache")
// m.addOption(":file-caching=$cache")
// m.addOption(":live-cacheing=$cache")
// m.addOption(":sout-mux-caching=$cache")
// m.addOption(":codec=mediacodec,iomx,all")
mMediaPlayer.media = m
mMediaPlayer.play()
Log.d("MediaPlayerEVENTERROR", "Media Player Error, re-try")
}
MediaPlayer.Event.Playing, MediaPlayer.Event.Paused, MediaPlayer.Event.Stopped -> {}
else -> {}
}
}
}
private fun setSize(width: Int, height: Int) {
mVideoWidth = width
mVideoHeight = height
if (mVideoWidth * mVideoHeight <= 1) return
if (holder == null || view_surface == null) return
var w = activity?.window?.decorView?.width
var h = activity?.window?.decorView?.height
val isPortrait = resources.configuration.orientation == Configuration.ORIENTATION_PORTRAIT
if (w != null) {
if (w > h!! && isPortrait || w < h && !isPortrait) {
val i = w
w = h
h = i
}
}
val videoAR = mVideoWidth.toFloat() / mVideoHeight.toFloat()
val screenAR = h?.toFloat()?.let { w?.toFloat()?.div(it) }
if (screenAR != null) {
if (screenAR < videoAR) if (w != null) {
h = (w / videoAR).toInt()
} else if (h != null) {
w = (h * videoAR).toInt()
}
}
holder.setFixedSize(mVideoWidth, mVideoHeight)
val lp: ViewGroup.LayoutParams = view_surface.layoutParams
if (w != null) {
lp.width = w
}
if (h != null) {
lp.height = h
}
view_surface.layoutParams = lp
view_surface.invalidate()
}
}
this code is working fine i am able to show live video using rtsp player
Some time when i fast open app then i am getting MediaPlayer.Event.EncounteredError i don't know what is reason but in that function i am trying to again restart and create media player but still my video is not playing its showing full black background in surface view . actually i want restart my live broadcast restart when ever any error coming like Hardware error or any MediaPlayer.Event.EncounteredError please help me what i am doing wrong how to restart live view .
I am using below library to show live video
**implementation "de.mrmaffen:vlc-android-sdk:2.0.6"
implementation 'org.videolan.libvlc:libvlc_options:2.0.6'**
i am trying to create a video player app using kotlin , First of all I got the videos files by using MediaStore , than store this in ArrayList so far it's been perfect but When I made a folder list of videos, I tried to find out the size of those folders and how many video files there are in those folders, but I failed. like this (Image)
Check this image for more clear
This is my data class code (VideoItem.Kt)
import android.net.Uri
data class VideoItem(
val id: String,
val title: String,
val duration: Long = 0,
val folderName: String,
val size: String,
val path: String,
val dateAdded: String,
val artUri: Uri
)
data class FolderItem(
val id: String,
val folderName: String,
val folderSize: Long
)
This is my MainActivity Code To get Allvideos Using MediaStore
class MainActivity : AppCompatActivity() {
private lateinit var binding: ActivityMainBinding
companion object {
lateinit var videoList: ArrayList<VideoItem>
lateinit var folderList: ArrayList<FolderItem>
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
folderList = ArrayList()
videoList = getAllVideos()
setFragment(VideoviewFragment())
}
private fun setFragment(fragment: Fragment) {
val transaction = supportFragmentManager.beginTransaction()
transaction.replace(R.id.FrameLayout, fragment)
transaction.disallowAddToBackStack()
transaction.commit()
}
#SuppressLint("Recycle", "Range")
private fun getAllVideos(): ArrayList<VideoItem> {
val tempList = ArrayList<VideoItem>()
val tempFolderList = ArrayList<String>()
val projection = arrayOf(
MediaStore.Video.Media.TITLE,
MediaStore.Video.Media.SIZE,
MediaStore.Video.Media._ID,
MediaStore.Video.Media.BUCKET_DISPLAY_NAME,
MediaStore.Video.Media.DATA,
MediaStore.Video.Media.DATE_ADDED,
MediaStore.Video.Media.DURATION,
MediaStore.Video.Media.BUCKET_ID
)
val cursor = this.contentResolver.query(
MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
projection,
null,
null,
MediaStore.Video.Media.DATE_ADDED + " DESC"
)
if (cursor != null)
if (cursor.moveToNext())
do {
val titleC =
cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.TITLE))
val idC = cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media._ID))
val folderNameC =
cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.BUCKET_DISPLAY_NAME))
val folderIdC =
cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.BUCKET_ID))
val sizeC = cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.SIZE))
val pathC = cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.DATA))
val dateAddedC =
cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.DATE_ADDED))
val durationC =
cursor.getString(cursor.getColumnIndex(MediaStore.Video.Media.DURATION))
.toLong()
try {
val file = File(pathC)
val artUriC = Uri.fromFile(file)
val video = VideoItem(
title = titleC,
id = idC,
folderName = folderNameC,
size = sizeC,
path = pathC,
duration = durationC,
dateAdded = dateAddedC,
artUri = artUriC
)
if (file.exists()) tempList.add(video)
//for adding Folders
if (!tempFolderList.contains(folderNameC)) {
tempFolderList.add(folderNameC)
val folderSizeC = getFileLength(pathC)
folderList.add(
FolderItem(
id = folderIdC,
folderName = folderNameC,
folderSize = folderSizeC
)
)
}
} catch (_: Exception) {
}
} while (cursor.moveToNext())
cursor?.close()
return tempList
}
private fun getFileLength(path: String?): Long {
return if (!isExistFile(path)) 0 else File(path.toString()).length()
}
private fun isExistFile(path: String?): Boolean {
val file = File(path.toString())
return file.exists()
}
}
This is my RecyclerviwAdapter Code(FolderAdapter.kt)
class FoldersAdapter(private val context: Context, private var foldersList: ArrayList<FolderItem>) :
RecyclerView.Adapter<FoldersAdapter.MyHolder>() {
class MyHolder(binding: FolderItemBinding) : RecyclerView.ViewHolder(binding.root) {
val folderName = binding.folderName
val noofFiles = binding.nooffiles
val folderSize = binding.foldersize
val root = binding.root
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): MyHolder {
return MyHolder(FolderItemBinding.inflate(LayoutInflater.from(context), parent, false))
}
override fun onBindViewHolder(holder: MyHolder, position: Int) {
holder.folderName.text = foldersList[position].folderName
val size: Long = foldersList[position].folderSize
holder.folderSize.text = android.text.format.Formatter.formatFileSize(context, (size))
holder.root.setOnClickListener {
val intent = Intent(context, FolderVideosActivity::class.java)
intent.putExtra("position", position)
ContextCompat.startActivity(context, intent, null)
}
}
override fun getItemCount(): Int {
return foldersList.size
}
}
This is my all codes now please check out all code and suggest the best.
Thank you
Use this function for size
private fun getFolderSize(f: File): Long {
var size: Long = 0
if (f.isDirectory) {
for (file in f.listFiles()!!) {
size += getFolderSize(file)
}
} else {
size = f.length()
}
return size
}
And Count number of files Use this
val length = File("/path/to/folder").listFiles()?.size
I've succesfully capture image with CameraX into JPEG files. The problem is the file size is big. On an Android powered walki talkie (Android 11), the result is typically 4 to 6 MB, so I'd like to resize it.
Here's my code:
fun takePhoto() {
val FILENAME_FORMAT = "ddMMyyyy_HHmmss"
val capturedContentValues = ContentValues()
capturedContentValues.put(MediaStore.MediaColumns.DISPLAY_NAME,
"CARAKA_"+SimpleDateFormat(FILENAME_FORMAT, Locale.US).format(System.currentTimeMillis()))
capturedContentValues.put(MediaStore.MediaColumns.RELATIVE_PATH, Environment.DIRECTORY_DCIM+"/TESTAPP")
capturedContentValues.put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
val outputOptions= ImageCapture.OutputFileOptions.Builder(
context.contentResolver,
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
capturedContentValues
).build()
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(context),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Toast.makeText(context,"Photo capture failed: ${exc.message}", Toast.LENGTH_SHORT).show()
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val theFile = getFile(context, output.savedUri!!)
val capturedBitmap = BitmapFactory.decodeFile(theFile!!.absolutePath)
val resizedBitmap = getResizedBitmap(capturedBitmap, 1024)
val fout = FileOutputStream(theFile.absolutePath)
resizedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, fout)
fout.flush()
fout.close()
}
})
}
fun getResizedBitmap(image: Bitmap, maxSize: Int): Bitmap {
var width = image.width
var height = image.height
val bitmapRatio = width.toFloat() / height.toFloat()
if (bitmapRatio > 1) {
width = maxSize
height = (width / bitmapRatio).toInt()
} else {
height = maxSize
width = (height * bitmapRatio).toInt()
}
return Bitmap.createScaledBitmap(image, width, height, true)
}
#Throws(IOException::class)
fun getFile(context: Context, uri: Uri): File? {
val destinationFilename =
File(context.filesDir.path + File.separatorChar + queryName(context, uri))
try {
context.contentResolver.openInputStream(uri).use { ins ->
createFileFromStream(
ins!!,
destinationFilename
)
}
} catch (ex: java.lang.Exception) {
Log.e("Save File", ex.message!!)
ex.printStackTrace()
}
return destinationFilename
}
fun createFileFromStream(ins: InputStream, destination: File?) {
try {
FileOutputStream(destination).use { os ->
val buffer = ByteArray(4096)
var length: Int
while (ins.read(buffer).also { length = it } > 0) {
os.write(buffer, 0, length)
}
os.flush()
}
} catch (ex: java.lang.Exception) {
Log.e("Save File", ex.message!!)
ex.printStackTrace()
}
}
private fun queryName(context: Context, uri: Uri): String {
val returnCursor: Cursor = context.contentResolver.query(uri, null, null, null, null)!!
val nameIndex: Int = returnCursor.getColumnIndex(OpenableColumns.DISPLAY_NAME)
returnCursor.moveToFirst()
val name: String = returnCursor.getString(nameIndex)
returnCursor.close()
return name
}
Those saved JPEGs size is still within 4 to 6 MB. Not reduced into hundreds KB. What's wrong here?
Instead of resizing it afterwards, another thing you can try is taking a smaller picture. You can set the target resolution to be lower: https://developer.android.com/reference/androidx/camera/core/ImageCapture.Builder#setTargetResolution(android.util.Size)
I want to use AccessibilityService to do a screencapture when a specific app is in use.
Running the AccessibilityServiceand using windowManager.addView to display the overlaid button on the screen was successful.
But how do i take a screenshot of a specific app?
class MyAccessibilityService : AccessibilityService() {
var layout: LinearLayout? = null
var flayout: View? = null
var initialX = 0f
var initialY = 0f
var initialTouchX = 0f
var initialTouchY = 0f
var lastAction: Int? = null
val flayoutParams = LayoutParams()
var componentName: ComponentName? = null;
var lastActivity: ActivityInfo? = null;
fun screenShot(view: View): Bitmap? {
val bitmap = Bitmap.createBitmap(
view.width,
view.height, Bitmap.Config.ARGB_8888
)
val canvas = Canvas(bitmap)
view.draw(canvas)
return bitmap
}
#RequiresApi(Build.VERSION_CODES.M)
#SuppressLint("ClickableViewAccessibility")
override fun onServiceConnected() {
val windowManager = getSystemService(WINDOW_SERVICE) as WindowManager
if (flayout == null) {
flayout = LayoutInflater.from(this).inflate(R.layout.button, null);
flayoutParams.apply {
y = 0
x = 0
width = LayoutParams.WRAP_CONTENT
height = LayoutParams.WRAP_CONTENT
type = LayoutParams.TYPE_ACCESSIBILITY_OVERLAY
gravity = Gravity.TOP or Gravity.LEFT
format = PixelFormat.TRANSPARENT
flags = LayoutParams.FLAG_NOT_FOCUSABLE
}
try {
windowManager.addView(flayout, flayoutParams)
} catch (ex: Exception) {
Log.e("ACCSVC", "adding view failed", ex)
}
}
flayout!!.setOnTouchListener { v, event ->
when(event.action) {
MotionEvent.ACTION_DOWN -> {
//remember the initial position.
initialX = flayoutParams.x.toFloat();
initialY = flayoutParams.y.toFloat();
//get the touch location
initialTouchX = event.rawX;
initialTouchY = event.rawY;
lastAction = event.action;
true
}
MotionEvent.ACTION_MOVE -> {
//Calculate the X and Y coordinates of the view.
flayoutParams.x = (initialX + (event.rawX - initialTouchX).toInt()).toInt()
flayoutParams.y = (initialY + (event.rawY - initialTouchY).toInt()).toInt()
//Update the layout with new X & Y coordinate
windowManager.updateViewLayout(flayout, flayoutParams)
lastAction = event.action
true
}
MotionEvent.ACTION_UP -> {
//take screen shot
}
}
true
}
}
override fun onAccessibilityEvent(event: AccessibilityEvent?) {
if (event != null) {
if (event.packageName == "com.tests.myapp") {
componentName = ComponentName(
event.packageName.toString(),
event.className.toString()
)
val activity = tryGetActivity(componentName!!);
if (activity != null){
lastActivity = activity;
}
}else if (event.eventType == AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED){
componentName = null
lastActivity = null
}
}
}
private fun tryGetActivity(componentName: ComponentName): ActivityInfo? {
return try {
packageManager.getActivityInfo(componentName, 0)
} catch (e: PackageManager.NameNotFoundException) {
null
}
}
override fun onInterrupt() {
Log.i("ACCSVC", "interrupt")
}
object ScreenMetricsCompat {
private val api: Api =
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) ApiLevel30()
else Api()
/**
* Returns screen size in pixels.
*/
#RequiresApi(Build.VERSION_CODES.M)
fun getScreenSize(context: Context): Size = api.getScreenSize(context)
#Suppress("DEPRECATION")
private open class Api {
#RequiresApi(Build.VERSION_CODES.M)
open fun getScreenSize(context: Context): Size {
val display = context.getSystemService(WindowManager::class.java).defaultDisplay
val metrics = if (display != null) {
DisplayMetrics().also { display.getRealMetrics(it) }
} else {
Resources.getSystem().displayMetrics
}
return Size(metrics.widthPixels, metrics.heightPixels)
}
}
#RequiresApi(Build.VERSION_CODES.R)
private class ApiLevel30 : Api() {
override fun getScreenSize(context: Context): Size {
val metrics: WindowMetrics = context.getSystemService(WindowManager::class.java).currentWindowMetrics
return Size(metrics.bounds.width(), metrics.bounds.height())
}
}
}
}
I want to put the ability to take screenshots in MotionEvent.ACTION_UP.
I use onAccessibilityEventto check if the app I want is running and save ActivityInfo.
But I don't know how to take a screenshot using ActivityInfo.
I tried using rootInActiveWindow.windowinstead of ActivityInfo, but also couldn't figure out how to screen capture AccessibilityWindowInfo.
I want a screenshot of the app, not a screenshot of the screen. Other answers I've seen said to use getWindow().getDecorView().getRootView().
Hello I have activity with two buttons. First I take a photo and in override onActivityResult I upload the photo in firebase and I get an url which I save to photoUrl . Then in the other btn listener inside oncreate I want to use this variable but I get null as I define it in the begin of the class. How can I get the value of photoUrl which I want?
class AddYourStory : AppCompatActivity() {
val storage = FirebaseStorage.getInstance()
private val REQUEST_IMAGE = 100
private val TAG = "MainActivity"
var destination: File? = null
var imagePath: String? = null
var photoUrl : String? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_add_your_story)
setSupportActionBar(toolbar)
val db: FirebaseFirestore
val intent = intent
val lat = intent.getStringExtra("lng")
val lng = intent.getStringExtra("lng")
db = FirebaseFirestore.getInstance()
val builder = StrictMode.VmPolicy.Builder()
StrictMode.setVmPolicy(builder.build());
val name = dateToString(Date(), "yyyy-MM-dd-hh-mm-ss")
destination = File(Environment.getExternalStorageDirectory(), "$name.jpg")
val takephoto = findViewById<Button>(R.id.button2)
takephoto.setOnClickListener {
val intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(destination));
startActivityForResult(intent, REQUEST_IMAGE);
}
val txtTitle = findViewById<TextInputEditText>(R.id.textInputEditText2)
val txtStory = findViewById<EditText>(R.id.editText)
val btn = findViewById<Button>(R.id.button)
btn.setOnClickListener{
println("????????????????????????>>>>"+photoUrl)
val MyStory: HashMap<String, String> = HashMap<String,String>()
MyStory.put("title",txtTitle.text.toString())
MyStory.put("story",txtStory.text.toString())
MyStory.put("lat",lat)
MyStory.put("lng",lng)
MyStory.put("url",photoUrl.toString())
db.collection("Stories").document().set(MyStory as Map<String, Any>)
val confirm = Intent(this, MapsActivity::class.java)
startActivity(confirm)
}
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
if (requestCode == REQUEST_IMAGE && resultCode == Activity.RESULT_OK) {
try {
val `in` = FileInputStream(destination)
val options = BitmapFactory.Options()
options.inSampleSize = 10
imagePath = destination!!.getAbsolutePath()
val storageRef = storage.reference
val stream = FileInputStream(File(imagePath))
val picRef = storageRef.child(dateToString(Date(), "yyyy-MM-dd-hh-mm-ss"))
val uploadTask = picRef.putStream(stream)
uploadTask.addOnFailureListener { exception ->
println("Failed")
}.addOnSuccessListener { taskSnapshot ->
println("OK")
picRef.downloadUrl.addOnCompleteListener () {taskSnapshot ->
photoUrl = taskSnapshot.result.toString()
println ("url =" + photoUrl.toString ())
}
}
val bmp = BitmapFactory.decodeStream(`in`, null, options)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
} else {
println("Cancel")
}
}
fun dateToString(date: Date, format: String): String {
val df = SimpleDateFormat(format)
return df.format(date)
}
}