Related
fun invSqrt(x: Float): Float {
var x = x
val xhalf = 0.5F * x
var i = java.lang.Float.floatToIntBits(x)
i = 0x5f3759df - (i shr 1)
x = java.lang.Float.intBitsToFloat(i)
x *= 1.5F - xhalf * x * x
return x
}
Is there any shorter or faster way to do this with Kotlin?
I am making an app that involves a CameraX API and Bitmap, My goal is to make the camera not only to capture a simple image, but to take the image and to draw a Watermark text on top of the image before making the file, however the Azimuth, Pitch and Roll shows as null results when the bitmap is finished, here is the result of it. [![The actual output of the JPEG file. If I didn't explained it correctly or made a mistake while explaining, please do let me know as soon as possible, also having some questions, I'll answer them. Thank you in advance.
I'm gonna give additional meaning of the image and translate from Bulgarian to English and what it stands for:
Посока -> Direction: where(West,South,North,East) Azimuth,
Наклон -> Tilting: Pitch, Roll
private val mSensorEvent: SensorEvent? = null
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
//Azimuth, Pitch and Roll
val azimuthWMText = resources.getString(R.string.value_format_2, mSensorEvent?.values?.get(0))
val pitchWMText = resources.getString(R.string.value_format_2, mSensorEvent?.values?.get(1))
val rollWMText = resources.getString(R.string.value_format_2, mSensorEvent?.values?.get(2))
//Bitmap that contains the addWatermark method and detecting the new photo path that is been taken and implements the watermark
//BitmapFactory.decodeFile(path.toString(), BitmapFactory.Options()) -> through File
//resources, R.layout.activity_preview_photo, BitmapFactory.Options() -> through resources
val originalBitmap = AddWatermark().addWatermark(
BitmapFactory.decodeFile(photoFile.toString(), BitmapFactory.Options()),
firstWatermarkText = "Дължина: $longitudeWM${resources.getString(R.string.degrees)}, Ширина: $latitudeWM${resources.getString(R.string.degrees)}",
secondWatermarkText = "Височина: ${altitudeWM}м, Ориентация: $orientationWM",
thirdWatermarkText = "Точност: Хоризонтална: ${hozAccuracyWM}м, Вертикална: ${verAccuracyWM}м",
fourthWatermarkText = "Посока: where $azimuthWMText${resources.getString(R.string.degrees)}",
fifthWatermarkText = "Наклон: pitchTilt $pitchWMText${resources.getString(R.string.degrees)}, rollTilt $rollWMText${resources.getString(R.string.degrees)}",
sixthWatermarkText = "Дата и Час: $dateTimeFormatWMText",
AddWatermark.WatermarkOptions(
AddWatermark.Corner.TOP_LEFT,
textSizeToWidthRation = 0.017f,
paddingToWidthRatio = 0.03f,
Color.parseColor("#FF0000"),
shadowColor = Color.BLACK,
strokeOutline = null,
typeface = null
)
)
previewView.bitmap.let { originalBitmap }
val outputStream = FileOutputStream(photoFile)
originalBitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream)
outputStream.flush()
outputStream.close()
Toast.makeText(
this#CameraActivity,
"Обработването и запазено успешно! Запазено е в: $photoFile",
Toast.LENGTH_LONG
).show()
}
The watermark class code:
class AddWatermark : AppCompatActivity() {
//Adding watermark method is here for declaring on to the top
fun addWatermark(
bitmap: Bitmap,
firstWatermarkText: String,
secondWatermarkText: String,
thirdWatermarkText: String,
fourthWatermarkText: String,
fifthWatermarkText: String,
sixthWatermarkText: String,
options: WatermarkOptions = WatermarkOptions()): Bitmap {
val result = bitmap.copy(bitmap.config, true)
val canvas = Canvas(result)
val paint = Paint(Paint.ANTI_ALIAS_FLAG or Paint.DITHER_FLAG)
//val strokePaint = Paint()
//We are including the Enum class and connecting with the data class WatermarkOptions variable
paint.textAlign = when (options.corner) {
//We include the alignment LEFT from Enum class and connecting with Paint variable
Corner.TOP_LEFT,
Corner.BOTTOM_LEFT -> Paint.Align.LEFT
//We include the alignment RIGHT from Enum class and connecting with Paint variable
Corner.TOP_RIGHT,
Corner.BOTTOM_RIGHT -> Paint.Align.RIGHT
}
/*strokePaint.textAlign = when (options.corner) {
Corner.TOP_LEFT,
Corner.BOTTOM_LEFT -> Paint.Align.LEFT
Corner.TOP_RIGHT,
Corner.BOTTOM_RIGHT -> Paint.Align.RIGHT
}
*/
//We connect the new textSize variable with the bitmap width(default is 0) and we multiply with the WatermarkOption's textSize
val textSize = result.width * options.textSizeToWidthRation
//Connecting the Paint textSize variable with the new textSize variable
paint.textSize = textSize//70.5f
//Connecting the Paint color variable with the WatermarkOptions textColor
paint.color = options.textColor
//If the shadowColor of the WMOptions is not null, then we make it as a Paint shadowLayer variable
if (options.shadowColor != null) {
paint.setShadowLayer( 2.5f, 0f, 0f, options.shadowColor)
}
/*if (options.strokeOutline != null) {
strokePaint.textSize = textSize//72f
strokePaint.color = options.strokeOutline
strokePaint.style = Paint.Style.STROKE
strokePaint.strokeWidth = 4.17f
}
*/
//If typeface of the WMOptions is not null,we make paint typeface variable and connecting with the WMOptions variable
if (options.typeface != null) {
paint.typeface = options.typeface
}
//We connect the new padding variable with the bitmap width(default is 0) and multiply with WMOptions padding
val padding = result.width * options.paddingToWidthRatio
//Create a variable that has something to do with the coordinates method
val coordinates = calculateCoordinates(
firstWatermarkText,
secondWatermarkText,
thirdWatermarkText,
fourthWatermarkText,
fifthWatermarkText,
sixthWatermarkText,
paint,
options,
canvas.width,
canvas.height,
padding)
/**drawText text as a Watermark, using Canvas**/
//canvas.drawText(firstWatermarkText, coordinates.x, coordinates.y, strokePaint)
//canvas.drawText(secondWatermarkText, coordinates.x, 240f, strokePaint) //We change the Y horizontal coordinates by typing the float number
//canvas.drawText(thirdWatermarkText, coordinates.x, 310f, strokePaint)
//canvas.drawText(fourthWatermarkText, coordinates.x, 380f, strokePaint)
//canvas.drawText(fifthWatermarkText, coordinates.x, 450f, strokePaint)
when (Build.VERSION.SDK_INT) {
//Android 11
30 -> {
canvas.drawText(firstWatermarkText, coordinates.x, coordinates.y, paint)
canvas.drawText(secondWatermarkText, coordinates.x, 240f, paint)
canvas.drawText(thirdWatermarkText, coordinates.x, 310f, paint)
canvas.drawText(fourthWatermarkText, coordinates.x, 380f, paint)
canvas.drawText(fifthWatermarkText, coordinates.x, 450f, paint)
canvas.drawText(sixthWatermarkText, coordinates.x, 520f, paint)
}
//Android 9
28 -> {
canvas.drawText(firstWatermarkText, coordinates.x, coordinates.y, paint)
canvas.drawText(secondWatermarkText, coordinates.x, 240f, paint)
canvas.drawText(thirdWatermarkText, coordinates.x, 310f, paint)
canvas.drawText(fourthWatermarkText, coordinates.x, 380f, paint)
canvas.drawText(fifthWatermarkText, coordinates.x, 450f, paint)
canvas.drawText(sixthWatermarkText, coordinates.x, 520f, paint)
}
//Android 5.0
21 -> {
canvas.drawText(firstWatermarkText, coordinates.x, coordinates.y, paint)
canvas.drawText(secondWatermarkText, coordinates.x, 270f, paint)
canvas.drawText(thirdWatermarkText, coordinates.x, 330f, paint)
canvas.drawText(fourthWatermarkText, coordinates.x, 420f, paint)
canvas.drawText(fifthWatermarkText, coordinates.x, 480f, paint)
canvas.drawText(sixthWatermarkText, coordinates.x, 540f, paint)
}
}
return result
}
//This it he corner alignment calculation method and using it on the drawText
private fun calculateCoordinates(
firstWatermarkText: String,
secondWatermarkText: String,
thirdWatermarkText: String,
fourthWatermarkText: String,
fifthWatermarkText: String,
sixthWatermarkText: String,
paint: Paint,
options: WatermarkOptions,
width: Int,
height: Int,
padding: Float
): PointF {
val x = when (options.corner) {
Corner.TOP_LEFT,
Corner.BOTTOM_LEFT -> {
padding
}
Corner.TOP_RIGHT,
Corner.BOTTOM_RIGHT -> {
width - padding
}
}
val y = when (options.corner) {
Corner.BOTTOM_LEFT,
Corner.BOTTOM_RIGHT -> {
height - padding
}
Corner.TOP_LEFT,
Corner.TOP_RIGHT -> {
val bounds = Rect()
paint.getTextBounds(firstWatermarkText, 0, firstWatermarkText.length, bounds)
paint.getTextBounds(secondWatermarkText, 0, secondWatermarkText.length, bounds)
paint.getTextBounds(thirdWatermarkText, 0, thirdWatermarkText.length, bounds)
paint.getTextBounds(fourthWatermarkText, 0, fourthWatermarkText.length, bounds)
paint.getTextBounds(fifthWatermarkText, 0, fifthWatermarkText.length, bounds)
paint.getTextBounds(sixthWatermarkText, 0, sixthWatermarkText.length, bounds)
val textHeight = bounds.height()
textHeight + padding
}
}
return PointF(x, y)
}
enum class Corner {
TOP_LEFT,
TOP_RIGHT,
BOTTOM_LEFT,
BOTTOM_RIGHT
}
data class WatermarkOptions(
val corner: Corner = Corner.BOTTOM_RIGHT,
val textSizeToWidthRation: Float = 0.04f,
val paddingToWidthRatio: Float = 0.03f,
#ColorInt val textColor: Int = Color.parseColor("#FFC800"),
#ColorInt val shadowColor: Int? = Color.BLACK,
#ColorInt val strokeOutline: Int? = Color.BLACK,
val typeface: Typeface? = null
)
}
Ok, I found out my issue and instead of using a new mSensorEvent, I used the sensor event inside "onSensorChanged(event: SensorEvent?)" instead and recasting all the lateinit variables, so here is the answer to my question:
private var azimuthDegree: Double = 0.0
private var currentAzimuth: Double = 0.0
private var currentPitch: Double = 0.0
private var currentRoll: Double = 0.0
//'dirs' is the directions array list of type String and 'currentDirection' will be used for recasting
private var dirs = ArrayList<String>()
private lateinit var currentDirection: String
override fun onResume() {
super.onResume()
window.decorView.systemUiVisibility = View.SYSTEM_UI_FLAG_FULLSCREEN
actionBar?.hide()
//Makes a register to the listener about the default sensor for the Azimuth orientation type
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION),
SensorManager.SENSOR_DELAY_GAME)
}
override fun onPause() {
super.onPause()
//If it fails, to unregister the listener from it's sensor
mSensorManager.unregisterListener(this)
}
/**The sensor method initializes the type sensors and calculates the gravity and geomagnetic field**/
#SuppressLint("CutPasteId")
override fun onSensorChanged(event: SensorEvent?) {
//Portrait measurements - Correct by default
try {
val windowOrientation = windowManager.defaultDisplay.orientation
//Azimuth
degreeAzimuth = event!!.values[0].toDouble()
val azimuthTxt = findViewById<TextView>(R.id.azimuthText)
//This makes sure that the calculations between Pitch and Roll, doesn't switch places when the orientation is turned between Portrait and Landscape by default
var degreeRoll: Double
var degreePitch: Double
if (windowOrientation == 1 || windowOrientation == 3) {
degreeRoll = event.values[1].toDouble()
degreePitch = event.values[2].toDouble()
} else {
degreeRoll = event.values[2].toDouble()
degreePitch = event.values[1].toDouble()
}
//Orientation calculations for the Pitch in degrees, when it's Portrait, Landscape, Reverse Portrait and Reverse Landscape, to stay the same case with each and every value
when (windowOrientation) {
Surface.ROTATION_0 -> {
degreePitch += 90
}
1 -> {
if (abs(degreeRoll) > 90) {
degreePitch -= 90
degreeRoll = if (degreeRoll < 0)
-(180 + degreeRoll)
else
180 - degreeRoll
} else {
degreePitch = 90 - degreePitch
}
}
2 -> {
degreePitch = if (degreePitch < 90)
90 - degreePitch
else
-(degreePitch - 90)
degreeRoll = -degreeRoll
}
3 -> {
if (abs(degreeRoll) > 90) {
degreePitch = -(90 + degreePitch)
degreeRoll = if (degreeRoll < 0)
-(180 + degreeRoll)
else
180 - degreeRoll
} else {
degreePitch += 90
degreeRoll = -degreeRoll
}
}
}
/*Calculations when Azimuth is been turned with a new orientation, that "azimuth"
is the new variable for making azimuth and window display to be times 90 degrees.
After that we change the new variable "azimuth" to the dirs array list.*/
//Direction display from the array list from 1 to 9 elements
val where = findViewById<TextView>(R.id.direction_text)
dirs = arrayListOf(
"North",
"North East",
"East",
"South East",
"South",
"South West",
"West",
"North West",
"North"
)
// We change the "azimuth" variable here, along with it's calculations.
var azimuth = degreeAzimuth + (windowOrientation * 90)
when (windowOrientation) {
//Portrait
Surface.ROTATION_0 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Landscape
Surface.ROTATION_90 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Reverse Portrait
Surface.ROTATION_180 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Reverse Landscape
Surface.ROTATION_270 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
}
currentDirection = dirs[((azimuth + 22.5) / 45.0).toInt()]
currentAzimuth = azimuth
val azimuthText = findViewById<TextView>(R.id.azimuthText)
azimuthText.text = resources.getString(R.string.value_format_2, currentAzimuth)
currentPitch = degreePitch
val pitchText = findViewById<TextView>(R.id.pitchText)
pitchText.text = resources.getString(R.string.value_format, currentPitch)
currentRoll = degreeRoll
val rollText = findViewById<TextView>(R.id.rollText)
rollText.text = resources.getString(R.string.value_format, currentRoll)
} catch (e: Exception) {
e.printStackTrace()
}
}
currentAzimuth, currentPith, currentRoll - Those are recasting and using them as an end result
azimuthDegree - This is used for calculation reasons only in order to make for every orientation of the display surface
Also the other way around since the Sensor.TYPE_ORIENTATION is deprecated from Java, the suggest way is this:
override fun onResume() {
super.onResume()
window.decorView.systemUiVisibility = View.SYSTEM_UI_FLAG_FULLSCREEN
actionBar?.hide()
//Makes a register to the listener about the default sensor for the Azimuth gravity and magnetic field
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
SensorManager.SENSOR_DELAY_GAME)
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_GAME)
}
/**The sensor method initializes the type sensors and calculates the gravity and geomagnetic field**/
#SuppressLint("CutPasteId")
override fun onSensorChanged(event: SensorEvent?) {
//Portrait measurements - Correct by default
try {
val windowOrientation = windowManager.defaultDisplay.orientation
//Announcing the alpha for timing
val alpha = 0.97f
//The Sensor Event selects a type between Accelerometer and Magnetic Field
when (event!!.sensor.type) {
Sensor.TYPE_ACCELEROMETER -> {
mGravity[0] = alpha * mGravity[0] + (1 - alpha) * event.values[0]
mGravity[1] = alpha * mGravity[1] + (1 - alpha) * event.values[1]
mGravity[2] = alpha * mGravity[2] + (1 - alpha) * event.values[2]
}
Sensor.TYPE_MAGNETIC_FIELD -> {
mGeomagnetic[0] = alpha * mGeomagnetic[0] + (1 - alpha) * event.values[0]
mGeomagnetic[1] = alpha * mGeomagnetic[1] + (1 - alpha) * event.values[1]
mGeomagnetic[2] = alpha * mGeomagnetic[2] + (1 - alpha) * event.values[2]
}
}
val rotationMatrix = FloatArray(9)
val inclinationMatrix = FloatArray(9)
val success: Boolean = SensorManager.getRotationMatrix(rotationMatrix, inclinationMatrix, mGravity, mGeomagnetic)
if (success) {
//Azimuth
degreeAzimuth = event!!.values[0].toDouble()
val azimuthTxt = findViewById<TextView>(R.id.azimuthText)
//This makes sure that the calculations between Pitch and Roll, doesn't switch places when the orientation is turned between Portrait and Landscape by default
var degreeRoll: Double
var degreePitch: Double
if (windowOrientation == 1 || windowOrientation == 3) {
degreeRoll = event.values[1].toDouble()
degreePitch = event.values[2].toDouble()
} else {
degreeRoll = event.values[2].toDouble()
degreePitch = event.values[1].toDouble()
}
// We change the "azimuth" variable here, along with it's calculations.
var azimuth = degreeAzimuth + (windowOrientation * 90)
when (windowOrientation) {
//Portrait
Surface.ROTATION_0 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Landscape
Surface.ROTATION_90 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Reverse Portrait
Surface.ROTATION_180 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
//Reverse Landscape
Surface.ROTATION_270 -> {
if (azimuth > 360) {
azimuth -= 360
}
azimuthTxt.text = resources.getString(R.string.value_format, azimuth)
where.text = dirs[((azimuth + 22.5) / 45.0).toInt()]
}
}
currentAzimuth = azimuth
val azimuthText = findViewById<TextView>(R.id.azimuthText)
azimuthText.text = resources.getString(R.string.value_format_2, currentAzimuth)
currentPitch = degreePitch
val pitchText = findViewById<TextView>(R.id.pitchText)
pitchText.text = resources.getString(R.string.value_format, currentPitch)
currentRoll = degreeRoll
val rollText = findViewById<TextView>(R.id.rollText)
rollText.text = resources.getString(R.string.value_format, currentRoll)
}
}
Then onImageSave() method when assign the AddWatermark class within it as it follows:
val originalBitmap = AddWatermark().addWatermark(
BitmapFactory.decodeFile(photoFile.absolutePath),
firstWatermarkText = "Azimuth:$dirs, $currentAzimuth",
secondWatermarkText = "Pitch: $currentPitch, Roll: $currentRoll",
AddWatermark.WatermarkOptions(
AddWatermark.Corner.TOP_LEFT,
textSizeToWidthRatio = 0.017f,
paddingToWidthRatio = 0.03f,
textColor = Color.parseColor("#FF0000"),
shadowColor = null,
typeface = null
)
)
viewFinder.bitmap.let { originalBitmap }
val outputStream = FileOutputStream(photoFile.toString())
originalBitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream)
outputStream.flush()
outputStream.close()
Background
We record a video of the user's face, and usually the face is located at the upper half of the video.
Later we wish to view the video, but the aspect ratio of the PlayerView might be different than the one of the video, so there needs to be some scaling and cropping.
The problem
The only way I've found to scale the PlayerView so that it will be shown in the entire space it has but keeping the aspect ratio (which will result in cropping when needed, of course) , is by using app:resize_mode="zoom" . Here's a sample of how it works with center-crop: http://s000.tinyupload.com/?file_id=00574047057406286563 . The more the Views that show the content have a similar aspect ratio, the less cropping is needed.
But this is only for the center, meaning it takes a point of 0.5x0.5 of the video, and scale-crops from that point. This causes many cases of losing the important content of the video.
For example, if we have a video that was taken in portrait, and we have a square PlayerView and want to show the top area, this is the part that will be visible:
Of course, if the content itself is square, and the views are also square, it should show the entire content, without cropping.
What I've tried
I've tried searching over the Internet, StackOverflow (here) and on Github, but I couldn't find how to do it. The only clue I've found is about AspectRatioFrameLayout and AspectRatioTextureView, but I didn't find how to use them for this task, if it's even possible.
I was told (here) that I should use a normal TextureView , and provide it directly to SimpleExoPlayer using SimpleExoPlayer.setVideoTextureView. And to set a special transformation to it using TextureView.setTransform.
After a lot of trying what is best to use (and looking at video-crop repository , SuperImageView repository , and JCropImageView repository which have examples of scale/crop of ImageView and video), I've published a working sample that seems to show the video correctly, but I'm still not sure about it, as I also use an ImageView that's shown on top of it before it starts playing (to have a nicer transition instead of black content).
Here's the current code:
class MainActivity : AppCompatActivity() {
private val imageResId = R.drawable.test
private val videoResId = R.raw.test
private val percentageY = 0.2f
private var player: SimpleExoPlayer? = null
override fun onCreate(savedInstanceState: Bundle?) {
window.setBackgroundDrawable(ColorDrawable(0xff000000.toInt()))
super.onCreate(savedInstanceState)
if (cache == null) {
cache = SimpleCache(File(cacheDir, "media"), LeastRecentlyUsedCacheEvictor(MAX_PREVIEW_CACHE_SIZE_IN_BYTES))
}
setContentView(R.layout.activity_main)
// imageView.visibility = View.INVISIBLE
imageView.setImageResource(imageResId)
imageView.doOnPreDraw {
imageView.imageMatrix = prepareMatrixForImageView(imageView, imageView.drawable.intrinsicWidth.toFloat(), imageView.drawable.intrinsicHeight.toFloat())
// imageView.imageMatrix = prepareMatrix(imageView, imageView.drawable.intrinsicWidth.toFloat(), imageView.drawable.intrinsicHeight.toFloat())
// imageView.visibility = View.VISIBLE
}
}
override fun onStart() {
super.onStart()
playVideo()
}
private fun prepareMatrix(view: View, contentWidth: Float, contentHeight: Float): Matrix {
var scaleX = 1.0f
var scaleY = 1.0f
val viewWidth = view.measuredWidth.toFloat()
val viewHeight = view.measuredHeight.toFloat()
Log.d("AppLog", "viewWidth $viewWidth viewHeight $viewHeight contentWidth:$contentWidth contentHeight:$contentHeight")
if (contentWidth > viewWidth && contentHeight > viewHeight) {
scaleX = contentWidth / viewWidth
scaleY = contentHeight / viewHeight
} else if (contentWidth < viewWidth && contentHeight < viewHeight) {
scaleY = viewWidth / contentWidth
scaleX = viewHeight / contentHeight
} else if (viewWidth > contentWidth)
scaleY = viewWidth / contentWidth / (viewHeight / contentHeight)
else if (viewHeight > contentHeight)
scaleX = viewHeight / contentHeight / (viewWidth / contentWidth)
val matrix = Matrix()
val pivotPercentageX = 0.5f
val pivotPercentageY = percentageY
matrix.setScale(scaleX, scaleY, viewWidth * pivotPercentageX, viewHeight * pivotPercentageY)
return matrix
}
private fun prepareMatrixForVideo(view: View, contentWidth: Float, contentHeight: Float): Matrix {
val msWidth = view.measuredWidth
val msHeight = view.measuredHeight
val matrix = Matrix()
matrix.setScale(1f, (contentHeight / contentWidth) * (msWidth.toFloat() / msHeight), msWidth / 2f, percentageY * msHeight) /*,msWidth/2f,msHeight/2f*/
return matrix
}
private fun prepareMatrixForImageView(view: View, contentWidth: Float, contentHeight: Float): Matrix {
val dw = contentWidth
val dh = contentHeight
val msWidth = view.measuredWidth
val msHeight = view.measuredHeight
// Log.d("AppLog", "viewWidth $msWidth viewHeight $msHeight contentWidth:$contentWidth contentHeight:$contentHeight")
val scalew = msWidth.toFloat() / dw
val theoryh = (dh * scalew).toInt()
val scaleh = msHeight.toFloat() / dh
val theoryw = (dw * scaleh).toInt()
val scale: Float
var dx = 0
var dy = 0
if (scalew > scaleh) { // fit width
scale = scalew
// dy = ((msHeight - theoryh) * 0.0f + 0.5f).toInt() // + 0.5f for rounding
} else {
scale = scaleh
dx = ((msWidth - theoryw) * 0.5f + 0.5f).toInt() // + 0.5f for rounding
}
dy = ((msHeight - theoryh) * percentageY + 0.5f).toInt() // + 0.5f for rounding
val matrix = Matrix()
// Log.d("AppLog", "scale:$scale dx:$dx dy:$dy")
matrix.setScale(scale, scale)
matrix.postTranslate(dx.toFloat(), dy.toFloat())
return matrix
}
private fun playVideo() {
player = ExoPlayerFactory.newSimpleInstance(this#MainActivity, DefaultTrackSelector())
player!!.setVideoTextureView(textureView)
player!!.addVideoListener(object : VideoListener {
override fun onVideoSizeChanged(width: Int, height: Int, unappliedRotationDegrees: Int, pixelWidthHeightRatio: Float) {
super.onVideoSizeChanged(width, height, unappliedRotationDegrees, pixelWidthHeightRatio)
Log.d("AppLog", "onVideoSizeChanged: $width $height")
val videoWidth = if (unappliedRotationDegrees % 180 == 0) width else height
val videoHeight = if (unappliedRotationDegrees % 180 == 0) height else width
val matrix = prepareMatrixForVideo(textureView, videoWidth.toFloat(), videoHeight.toFloat())
textureView.setTransform(matrix)
}
override fun onRenderedFirstFrame() {
Log.d("AppLog", "onRenderedFirstFrame")
player!!.removeVideoListener(this)
// imageView.animate().alpha(0f).setDuration(5000).start()
imageView.visibility = View.INVISIBLE
}
})
player!!.volume = 0f
player!!.repeatMode = Player.REPEAT_MODE_ALL
player!!.playRawVideo(this, videoResId)
player!!.playWhenReady = true
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/240/big_buck_bunny_240p_20mb.mkv", cache!!)
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv", cache!!)
// player!!.playVideoFromUrl(this#MainActivity, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv")
}
override fun onStop() {
super.onStop()
player!!.setVideoTextureView(null)
// playerView.player = null
player!!.release()
player = null
}
companion object {
const val MAX_PREVIEW_CACHE_SIZE_IN_BYTES = 20L * 1024L * 1024L
var cache: com.google.android.exoplayer2.upstream.cache.Cache? = null
#JvmStatic
fun getUserAgent(context: Context): String {
val packageManager = context.packageManager
val info = packageManager.getPackageInfo(context.packageName, 0)
val appName = info.applicationInfo.loadLabel(packageManager).toString()
return Util.getUserAgent(context, appName)
}
}
fun SimpleExoPlayer.playRawVideo(context: Context, #RawRes rawVideoRes: Int) {
val dataSpec = DataSpec(RawResourceDataSource.buildRawResourceUri(rawVideoRes))
val rawResourceDataSource = RawResourceDataSource(context)
rawResourceDataSource.open(dataSpec)
val factory: DataSource.Factory = DataSource.Factory { rawResourceDataSource }
prepare(LoopingMediaSource(ExtractorMediaSource.Factory(factory).createMediaSource(rawResourceDataSource.uri)))
}
fun SimpleExoPlayer.playVideoFromUrl(context: Context, url: String, cache: Cache? = null) = playVideoFromUri(context, Uri.parse(url), cache)
fun SimpleExoPlayer.playVideoFile(context: Context, file: File) = playVideoFromUri(context, Uri.fromFile(file))
fun SimpleExoPlayer.playVideoFromUri(context: Context, uri: Uri, cache: Cache? = null) {
val factory = if (cache != null)
CacheDataSourceFactory(cache, DefaultHttpDataSourceFactory(getUserAgent(context)))
else
DefaultDataSourceFactory(context, MainActivity.getUserAgent(context))
val mediaSource = ExtractorMediaSource.Factory(factory).createMediaSource(uri)
prepare(mediaSource)
}
}
I had various issues on trying this till I got to the current situation, and I've updated this question multiple times accordingly. Now it even works with the percentageY I talked about, so I could set it to be from 20% of the top of the video, if I wish. However, I still think that it has a big chance that something is wrong, because when I tried to set it to 50% , I've noticed that the content might not fit the entire View.
I even looked at the source code of ImageView (here), to see how center-crop is used. When applied to the ImageView, it still worked as center-crop, but when I used the same technique on the video, it gave me a very wrong result.
The questions
My goal here was to show both ImageView and the video so that it will smoothly transition from a static image to a video. All that while having both have the top-scale-crop of 20% from the top (for example). I've published a sample project here to try it out and share people of what I've found.
So now my questions are around why this doesn't seem to work well for the imageView and/or video :
As it turns out, none of the matrix creations that I've tried work well for either ImageView or the video. What's wrong with it exactly? How can I change it for them to look the same? To scale-crop from the top 20%, for example?
I tried to use the exact matrix for both, but it seems each need it differently, even though both have the exact same size and content size. Why would I need a different matrix for each?
EDIT: after this question was answered, I've decided to make a small sample of how to use it (Github repository available here) :
import android.content.Context
import android.graphics.Matrix
import android.graphics.PointF
import android.net.Uri
import android.os.Bundle
import android.view.TextureView
import android.view.View
import androidx.annotation.RawRes
import androidx.appcompat.app.AppCompatActivity
import androidx.core.view.doOnPreDraw
import com.google.android.exoplayer2.ExoPlayerFactory
import com.google.android.exoplayer2.Player
import com.google.android.exoplayer2.SimpleExoPlayer
import com.google.android.exoplayer2.source.ExtractorMediaSource
import com.google.android.exoplayer2.source.LoopingMediaSource
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector
import com.google.android.exoplayer2.upstream.*
import com.google.android.exoplayer2.upstream.cache.Cache
import com.google.android.exoplayer2.upstream.cache.CacheDataSourceFactory
import com.google.android.exoplayer2.upstream.cache.LeastRecentlyUsedCacheEvictor
import com.google.android.exoplayer2.upstream.cache.SimpleCache
import com.google.android.exoplayer2.util.Util
import com.google.android.exoplayer2.video.VideoListener
import kotlinx.android.synthetic.main.activity_main.*
import java.io.File
// https://stackoverflow.com/questions/54216273/how-to-have-similar-mechanism-of-center-crop-on-exoplayers-playerview-but-not
class MainActivity : AppCompatActivity() {
companion object {
private val FOCAL_POINT = PointF(0.5f, 0.2f)
private const val IMAGE_RES_ID = R.drawable.test
private const val VIDEO_RES_ID = R.raw.test
private var cache: Cache? = null
private const val MAX_PREVIEW_CACHE_SIZE_IN_BYTES = 20L * 1024L * 1024L
#JvmStatic
fun getUserAgent(context: Context): String {
val packageManager = context.packageManager
val info = packageManager.getPackageInfo(context.packageName, 0)
val appName = info.applicationInfo.loadLabel(packageManager).toString()
return Util.getUserAgent(context, appName)
}
}
private var player: SimpleExoPlayer? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
if (cache == null)
cache = SimpleCache(File(cacheDir, "media"), LeastRecentlyUsedCacheEvictor(MAX_PREVIEW_CACHE_SIZE_IN_BYTES))
// imageView.visibility = View.INVISIBLE
imageView.setImageResource(IMAGE_RES_ID)
}
private fun prepareMatrix(view: View, mediaWidth: Float, mediaHeight: Float, focalPoint: PointF): Matrix? {
if (view.visibility == View.GONE)
return null
val viewHeight = (view.height - view.paddingTop - view.paddingBottom).toFloat()
val viewWidth = (view.width - view.paddingStart - view.paddingEnd).toFloat()
if (viewWidth <= 0 || viewHeight <= 0)
return null
val matrix = Matrix()
if (view is TextureView)
// Restore true media size for further manipulation.
matrix.setScale(mediaWidth / viewWidth, mediaHeight / viewHeight)
val scaleFactorY = viewHeight / mediaHeight
val scaleFactor: Float
var px = 0f
var py = 0f
if (mediaWidth * scaleFactorY >= viewWidth) {
// Fit height
scaleFactor = scaleFactorY
px = -(mediaWidth * scaleFactor - viewWidth) * focalPoint.x / (1 - scaleFactor)
} else {
// Fit width
scaleFactor = viewWidth / mediaWidth
py = -(mediaHeight * scaleFactor - viewHeight) * focalPoint.y / (1 - scaleFactor)
}
matrix.postScale(scaleFactor, scaleFactor, px, py)
return matrix
}
private fun playVideo() {
player = ExoPlayerFactory.newSimpleInstance(this#MainActivity, DefaultTrackSelector())
player!!.setVideoTextureView(textureView)
player!!.addVideoListener(object : VideoListener {
override fun onVideoSizeChanged(videoWidth: Int, videoHeight: Int, unappliedRotationDegrees: Int, pixelWidthHeightRatio: Float) {
super.onVideoSizeChanged(videoWidth, videoHeight, unappliedRotationDegrees, pixelWidthHeightRatio)
textureView.setTransform(prepareMatrix(textureView, videoWidth.toFloat(), videoHeight.toFloat(), FOCAL_POINT))
}
override fun onRenderedFirstFrame() {
// Log.d("AppLog", "onRenderedFirstFrame")
player!!.removeVideoListener(this)
imageView.animate().alpha(0f).setDuration(2000).start()
// imageView.visibility = View.INVISIBLE
}
})
player!!.volume = 0f
player!!.repeatMode = Player.REPEAT_MODE_ALL
player!!.playRawVideo(this, VIDEO_RES_ID)
player!!.playWhenReady = true
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/240/big_buck_bunny_240p_20mb.mkv", cache!!)
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv", cache!!)
// player!!.playVideoFromUrl(this#MainActivity, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv")
}
override fun onStart() {
super.onStart()
imageView.doOnPreDraw {
val imageWidth: Float = imageView.drawable.intrinsicWidth.toFloat()
val imageHeight: Float = imageView.drawable.intrinsicHeight.toFloat()
imageView.imageMatrix = prepareMatrix(imageView, imageWidth, imageHeight, FOCAL_POINT)
}
playVideo()
}
override fun onStop() {
super.onStop()
if (player != null) {
player!!.setVideoTextureView(null)
// playerView.player = null
player!!.release()
player = null
}
}
override fun onDestroy() {
super.onDestroy()
if (!isChangingConfigurations)
cache?.release()
}
fun SimpleExoPlayer.playRawVideo(context: Context, #RawRes rawVideoRes: Int) {
val dataSpec = DataSpec(RawResourceDataSource.buildRawResourceUri(rawVideoRes))
val rawResourceDataSource = RawResourceDataSource(context)
rawResourceDataSource.open(dataSpec)
val factory: DataSource.Factory = DataSource.Factory { rawResourceDataSource }
prepare(LoopingMediaSource(ExtractorMediaSource.Factory(factory).createMediaSource(rawResourceDataSource.uri)))
}
fun SimpleExoPlayer.playVideoFromUrl(context: Context, url: String, cache: Cache? = null) = playVideoFromUri(context, Uri.parse(url), cache)
fun SimpleExoPlayer.playVideoFile(context: Context, file: File) = playVideoFromUri(context, Uri.fromFile(file))
fun SimpleExoPlayer.playVideoFromUri(context: Context, uri: Uri, cache: Cache? = null) {
val factory = if (cache != null)
CacheDataSourceFactory(cache, DefaultHttpDataSourceFactory(getUserAgent(context)))
else
DefaultDataSourceFactory(context, MainActivity.getUserAgent(context))
val mediaSource = ExtractorMediaSource.Factory(factory).createMediaSource(uri)
prepare(mediaSource)
}
}
Here's a solution for ImageView alone, if needed:
class ScaleCropImageView(context: Context, attrs: AttributeSet?) : AppCompatImageView(context, attrs) {
var focalPoint = PointF(0.5f, 0.5f)
set(value) {
field = value
updateMatrix()
}
private val viewWidth: Float
get() = (width - paddingLeft - paddingRight).toFloat()
private val viewHeight: Float
get() = (height - paddingTop - paddingBottom).toFloat()
init {
scaleType = ScaleType.MATRIX
}
override fun onSizeChanged(w: Int, h: Int, oldw: Int, oldh: Int) {
super.onSizeChanged(w, h, oldw, oldh)
updateMatrix()
}
override fun setImageDrawable(drawable: Drawable?) {
super.setImageDrawable(drawable)
updateMatrix()
}
#Suppress("MemberVisibilityCanBePrivate")
fun updateMatrix() {
if (scaleType != ImageView.ScaleType.MATRIX)
return
val dr = drawable ?: return
imageMatrix = prepareMatrix(
viewWidth, viewHeight,
dr.intrinsicWidth.toFloat(), dr.intrinsicHeight.toFloat(), focalPoint, Matrix()
)
}
private fun prepareMatrix(
viewWidth: Float, viewHeight: Float, mediaWidth: Float, mediaHeight: Float,
focalPoint: PointF, matrix: Matrix
): Matrix? {
if (viewWidth <= 0 || viewHeight <= 0)
return null
var scaleFactor = viewHeight / mediaHeight
if (mediaWidth * scaleFactor >= viewWidth) {
// Fit height
matrix.postScale(scaleFactor, scaleFactor, -(mediaWidth * scaleFactor - viewWidth) * focalPoint.x / (1 - scaleFactor), 0f)
} else {
// Fit width
scaleFactor = viewWidth / mediaWidth
matrix.postScale(scaleFactor, scaleFactor, 0f, -(mediaHeight * scaleFactor - viewHeight) * focalPoint.y / (1 - scaleFactor))
}
return matrix
}
}
The question is how to manipulate an image like ImageView.ScaleType.CENTER_CROP but to shift the focus from the center to another location that is 20% from the top of the image. First, let's look at what CENTER_CROP does:
From the documentation:
CENTER_CROP
Scale the image uniformly (maintain the image's aspect ratio) so that both dimensions (width and height) of the image will be equal to or larger than the corresponding dimension of the view (minus padding). The image is then centered in the view. From XML, use this syntax: android:scaleType="centerCrop".
In other words, scale the image without distortion such that either the width or height of the image (or both width and height) fit within the view so that the view is completely filled with the image (no gaps.)
Another way to think of this is that the center of the image is "pinned" to the center of the view. The image is then scaled to meet the criteria above.
In the following video, the white lines mark the center of the image; the red lines mark the center of the view. The scale type is CENTER_CROP. Notice how the center points of the image and the view coincide. As the view changes size, these two points continue to overlap and always appear at the center of the view regardless of the view size.
So, what does it mean to have center crop-like behavior at a different location such as 20% from the top? Like center crop, we can specify that the point that is 20% from the top of the image and the point that 20% from the top of the view will be "pinned" like the 50% point is "pinned" in center crop. The horizontal location of this point remains at 50% of the image and view. The image can now be scaled to satisfy the other conditions of center crop which specify that either the width and/or height of the image will fit the view with no gaps. (Size of view is understood to be the view size less padding.)
Here is a short video of this 20% crop behavior. In this video, the white lines show the middle of the image, the red lines show the pinned point in the view and the blue line that shows behind the horizontal red line identifies 20% from the top of the image. (Demo project is on GitHub.
Here is the result showing the full image that was supplied and the video in a square frame that transition from the still image. .
MainActivity.kt
prepareMatrix() is the method that does the work to determine how to scale/crop the image. There is some additional work to be done with the video since it appears that the video is made to fit the TextureViewas a scale type "FIT_XY" when it is assigned to the TextureView. Because of this scaling, the media size must be restored before prepareMatrix() is called for the video
class MainActivity : AppCompatActivity() {
private val imageResId = R.drawable.test
private val videoResId = R.raw.test
private var player: SimpleExoPlayer? = null
private val mFocalPoint = PointF(0.5f, 0.2f)
override fun onCreate(savedInstanceState: Bundle?) {
window.setBackgroundDrawable(ColorDrawable(0xff000000.toInt()))
super.onCreate(savedInstanceState)
if (cache == null) {
cache = SimpleCache(File(cacheDir, "media"), LeastRecentlyUsedCacheEvictor(MAX_PREVIEW_CACHE_SIZE_IN_BYTES))
}
setContentView(R.layout.activity_main)
// imageView.visibility = View.INVISIBLE
imageView.setImageResource(imageResId)
imageView.doOnPreDraw {
imageView.scaleType = ImageView.ScaleType.MATRIX
val imageWidth: Float = ContextCompat.getDrawable(this, imageResId)!!.intrinsicWidth.toFloat()
val imageHeight: Float = ContextCompat.getDrawable(this, imageResId)!!.intrinsicHeight.toFloat()
imageView.imageMatrix = prepareMatrix(imageView, imageWidth, imageHeight, mFocalPoint, Matrix())
val b = BitmapFactory.decodeResource(resources, imageResId)
val d = BitmapDrawable(resources, b.copy(Bitmap.Config.ARGB_8888, true))
val c = Canvas(d.bitmap)
val p = Paint()
p.color = resources.getColor(android.R.color.holo_red_dark)
p.style = Paint.Style.STROKE
val strokeWidth = 10
p.strokeWidth = strokeWidth.toFloat()
// Horizontal line
c.drawLine(0f, imageHeight * mFocalPoint.y, imageWidth, imageHeight * mFocalPoint.y, p)
// Vertical line
c.drawLine(imageWidth * mFocalPoint.x, 0f, imageWidth * mFocalPoint.x, imageHeight, p)
// Line in horizontal and vertical center
p.color = resources.getColor(android.R.color.white)
c.drawLine(imageWidth / 2, 0f, imageWidth / 2, imageHeight, p)
c.drawLine(0f, imageHeight / 2, imageWidth, imageHeight / 2, p)
imageView.setImageBitmap(d.bitmap)
imageViewFull.setImageBitmap(d.bitmap)
}
}
fun startPlay(view: View) {
playVideo()
}
private fun getViewWidth(view: View): Float {
return (view.width - view.paddingStart - view.paddingEnd).toFloat()
}
private fun getViewHeight(view: View): Float {
return (view.height - view.paddingTop - view.paddingBottom).toFloat()
}
private fun prepareMatrix(targetView: View, mediaWidth: Float, mediaHeight: Float,
focalPoint: PointF, matrix: Matrix): Matrix {
if (targetView.visibility != View.VISIBLE) {
return matrix
}
val viewHeight = getViewHeight(targetView)
val viewWidth = getViewWidth(targetView)
val scaleFactorY = viewHeight / mediaHeight
val scaleFactor: Float
val px: Float
val py: Float
if (mediaWidth * scaleFactorY >= viewWidth) {
// Fit height
scaleFactor = scaleFactorY
px = -(mediaWidth * scaleFactor - viewWidth) * focalPoint.x / (1 - scaleFactor)
py = 0f
} else {
// Fit width
scaleFactor = viewWidth / mediaWidth
px = 0f
py = -(mediaHeight * scaleFactor - viewHeight) * focalPoint.y / (1 - scaleFactor)
}
matrix.postScale(scaleFactor, scaleFactor, px, py)
return matrix
}
private fun playVideo() {
player = ExoPlayerFactory.newSimpleInstance(this#MainActivity, DefaultTrackSelector())
player!!.setVideoTextureView(textureView)
player!!.addVideoListener(object : VideoListener {
override fun onVideoSizeChanged(width: Int, height: Int, unappliedRotationDegrees: Int, pixelWidthHeightRatio: Float) {
super.onVideoSizeChanged(width, height, unappliedRotationDegrees, pixelWidthHeightRatio)
val matrix = Matrix()
// Restore true media size for further manipulation.
matrix.setScale(width / getViewWidth(textureView), height / getViewHeight(textureView))
textureView.setTransform(prepareMatrix(textureView, width.toFloat(), height.toFloat(), mFocalPoint, matrix))
}
override fun onRenderedFirstFrame() {
Log.d("AppLog", "onRenderedFirstFrame")
player!!.removeVideoListener(this)
imageView.animate().alpha(0f).setDuration(2000).start()
imageView.visibility = View.INVISIBLE
}
})
player!!.volume = 0f
player!!.repeatMode = Player.REPEAT_MODE_ALL
player!!.playRawVideo(this, videoResId)
player!!.playWhenReady = true
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/240/big_buck_bunny_240p_20mb.mkv", cache!!)
// player!!.playVideoFromUrl(this, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv", cache!!)
// player!!.playVideoFromUrl(this#MainActivity, "https://sample-videos.com/video123/mkv/720/big_buck_bunny_720p_1mb.mkv")
}
override fun onStop() {
super.onStop()
if (player != null) {
player!!.setVideoTextureView(null)
// playerView.player = null
player!!.release()
player = null
}
}
companion object {
const val MAX_PREVIEW_CACHE_SIZE_IN_BYTES = 20L * 1024L * 1024L
var cache: com.google.android.exoplayer2.upstream.cache.Cache? = null
#JvmStatic
fun getUserAgent(context: Context): String {
val packageManager = context.packageManager
val info = packageManager.getPackageInfo(context.packageName, 0)
val appName = info.applicationInfo.loadLabel(packageManager).toString()
return Util.getUserAgent(context, appName)
}
}
fun SimpleExoPlayer.playRawVideo(context: Context, #RawRes rawVideoRes: Int) {
val dataSpec = DataSpec(RawResourceDataSource.buildRawResourceUri(rawVideoRes))
val rawResourceDataSource = RawResourceDataSource(context)
rawResourceDataSource.open(dataSpec)
val factory: DataSource.Factory = DataSource.Factory { rawResourceDataSource }
prepare(LoopingMediaSource(ExtractorMediaSource.Factory(factory).createMediaSource(rawResourceDataSource.uri)))
}
fun SimpleExoPlayer.playVideoFromUrl(context: Context, url: String, cache: Cache? = null) = playVideoFromUri(context, Uri.parse(url), cache)
fun SimpleExoPlayer.playVideoFile(context: Context, file: File) = playVideoFromUri(context, Uri.fromFile(file))
fun SimpleExoPlayer.playVideoFromUri(context: Context, uri: Uri, cache: Cache? = null) {
val factory = if (cache != null)
CacheDataSourceFactory(cache, DefaultHttpDataSourceFactory(getUserAgent(context)))
else
DefaultDataSourceFactory(context, MainActivity.getUserAgent(context))
val mediaSource = ExtractorMediaSource.Factory(factory).createMediaSource(uri)
prepare(mediaSource)
}
}
you can use app:resize_mode="zoom" in com.google.android.exoplayer2.ui.PlayerView
I had a similar problem and solved it by applying transformations on the TextureView whose Surface is used by ExoPlayer:
player.addVideoListener(object : VideoListener {
override fun onVideoSizeChanged(
videoWidth: Int,
videoHeight: Int,
unappliedRotationDegrees: Int,
pixelWidthHeightRatio: Float,
) {
removeVideoListener(this)
val viewWidth: Int = textureView.width - textureView.paddingStart - textureView.paddingEnd
val viewHeight: Int = textureView.height - textureView.paddingTop - textureView.paddingBottom
if (videoWidth == viewWidth && videoHeight == viewHeight) {
return
}
val matrix = Matrix().apply {
// TextureView makes a best effort in fitting the video inside the View. The first transformation we apply is for reverting the fitting.
setScale(
videoWidth.toFloat() / viewWidth,
videoHeight.toFloat() / viewHeight,
)
}
// This algorithm is from ImageView's CENTER_CROP transformation
val offset = 0.5f // the center in CENTER_CROP but you probably want a different value here
val scale: Float
val dx: Float
val dy: Float
if (videoWidth * viewHeight > viewWidth * videoHeight) {
scale = viewHeight.toFloat() / videoHeight
dx = (viewWidth - videoWidth * scale) * offset
dy = 0f
} else {
scale = viewWidth.toFloat() / videoWidth
dx = 0f
dy = (viewHeight - videoHeight * scale) * offset
}
setTransform(matrix.apply {
postScale(scale, scale)
postTranslate(dx, dy)
})
}
})
player.setVideoTextureView(textureView)
player.prepare(createMediaSource())
Note that unless you're using DefaultRenderersFactory you need to make sure that your video Renderer actually calls onVideoSizeChanged by for instance creating the factory like so:
val renderersFactory = RenderersFactory { handler, videoListener, _, _, _, _ ->
// Allows other renderers to be removed by R8
arrayOf(
MediaCodecVideoRenderer(
context,
MediaCodecSelector.DEFAULT,
DefaultRenderersFactory.DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS,
handler,
videoListener,
-1,
),
MediaCodecAudioRenderer(context, MediaCodecSelector.DEFAULT),
)
}
I'm trying to implement a pie chart as shown in pictures below, where corners of the arc should be rounded.
I've tried to use CornerPathEffect(), but it seems to work only on the intersection of two lines (path.lineTo()). Nothing changes if I use this method for arc (path.arcTo()).
Try to set Stroke Cap of paint.
mPaint.setStrokeCap(Paint.Cap.ROUND);
I know it's too late for this answer but here is my solution.
PieSlice.kt
data class PieSlice(
val name: String,
var value: Double,
var startAngle: Float,
var sweepAngle: Float,
var indicatorCircleLocation: PointF,
val paint: Paint
)
Function to make round corner arc
private fun drawCurvedArc(canvas: Canvas?, pieItem: PieSlice) {
val path = Path()
path.moveTo(originX, originY)
val angleStart = pieItem.startAngle
val angleEnd = (pieItem.startAngle - pieItem.sweepAngle)
val arcOffset = pieItem.sweepAngle.coerceAtMost(7f)
val lineOffset = if (pieItem.sweepAngle < 7f) 0f else 25f
// line from origin to top
val line1x = getPointX(angleStart, lineOffset)
val line1y = getPointY(angleStart, lineOffset)
path.lineTo(line1x, line1y)
//Curve corner from line top to arc start
val arcStartx = getPointX(angleStart - arcOffset)
val arcStarty = getPointY(angleStart - arcOffset)
joinLineAndArc(path, line1x, line1y, arcStartx, arcStarty)
//Arc
path.arcTo(
outerRect, (pieItem.startAngle - arcOffset),
(-pieItem.sweepAngle + 2 * arcOffset), true
)
val line2x = getPointX(angleEnd, lineOffset)
val line2y = getPointY(angleEnd, lineOffset)
val arcEndx = getPointX(angleEnd + arcOffset)
val arcEndy = getPointY(angleEnd + arcOffset)
//Curve corner from arc end to bottom line
joinLineAndArc(path, arcEndx, arcEndy, line2x, line2y)
// Bottom line
path.lineTo(originX, originY)
val borderPaint = Paint()
borderPaint.strokeJoin = Paint.Join.ROUND
borderPaint.strokeCap = Paint.Cap.ROUND
borderPaint.color = pieItem.paint.color
borderPaint.style = Paint.Style.FILL
borderPaint.strokeWidth = 0f
canvas?.drawPath(path, borderPaint)
}
/**
* Join line and arc with a curve
*
* vector = (x1-x2,y1-y2)
*
* pVector perpendicular to vector
* pVector = (y1-y2,x2-x1)
*
* midX = (x1+x2)/2
* midY = (y1+y2)/2
*
* (px,py) = (midX,midY) ± (D/√((y1−y2)^2,(x2−x1)^2))*(y1-y2,x2-x1)
*/
private fun joinLineAndArc(
path: Path,
x1: Float,
y1: Float,
x2: Float,
y2: Float
) {
val midX: Float = (x2 + x1) / 2f
val midY: Float = (y2 + y1) / 2f
val x2_x1 = (x2 - x1).toDouble()
val y1_y2 = (y1 - y2).toDouble()
val powY = y1_y2.pow(2.0)
val powX = x2_x1.pow(2.0)
val den = sqrt(powY + powX)
val len = 20.0
// perpendicular1
val p1x = (midX + ((len * y1_y2) / den)).toFloat()
val p1y = (midY + ((len * x2_x1) / den)).toFloat()
// perpendicular2
val p2x = (midX - ((len * y1_y2) / den)).toFloat()
val p2y = (midY - ((len * x2_x1) / den)).toFloat()
val len1 = Math.sqrt(
Math.pow((originX - p1x).toDouble(), 2.0)
+ Math.pow((originY - p1y).toDouble(), 2.0)
)
val len2 = Math.sqrt(
Math.pow((originX - p2x).toDouble(), 2.0)
+ Math.pow((originY - p2y).toDouble(), 2.0)
)
//Make a curve to the point which is far from origin
if (len1 > len2) {
path.cubicTo(x1, y1, p1x, p1y, x2, y2)
} else {
path.cubicTo(x1, y1, p2x, p2y, x2, y2)
}
}
/**
* Get the x coordinate on a circle
* formula for x pos: (radius) * cos(angle) + (distance from left edge of screen)
* #param angle angle of point from origin
* #param offset to make shorter line than actual radius
*/
private fun getPointX(angle: Float, offset: Float = 0f): Float {
return ((radius - offset)
* cos(Math.toRadians((angle.toDouble())))
+ originX).toFloat()
}
/**
* Get the y coordinate on a circle
* formula for y pos: (radius) * sin(angle) + (distance from top edge of screen)
*
* #param angle angle of point from origin
* #param offset to make shorter line than actual radius
*/
private fun getPointY(angle: Float, offset: Float = 0f): Float {
return ((radius - offset)
* sin(Math.toRadians((angle.toDouble())))
+ originY).toFloat()
}
I want my app to simulate a swipe touch event (to up/down/left/right) when I click a button, then a TextView will scroll down/up.
I have tried to use Motion Event, but nothing happen after I dispatch 3 Motion Event of ACTION_DOWN, ACTION_MOVE and ACTION_UP respectively.
Is that possible to simulate a swipe event?
public void simulation(View view){
swipe(Direction.Bot);
}
public enum Direction {
Top, Bot, Left, Right;
}
protected void swipe(Direction direction) {
Point size = new Point();
this.getWindowManager().getDefaultDisplay().getSize(size);
int height = size.y; // height will be at top of the screen
int width = size.x; // width will be rightmost location of the screen
float xStart = size.x-50;
float xEnd = size.x-50;
float yStart = size.y-50;
float yEnd = size.y-50;
long downTime = SystemClock.uptimeMillis();
if(direction == Direction.Top || direction == Direction.Bot){
yStart = ((direction == Direction.Top) ? 50 : (height - 50));
yEnd = ((direction == Direction.Top) ? (height - 50) : 50);
}else {
xStart = ((direction == Direction.Left) ? (width - 50) : 50); // true: xStart = w-10; else: = 10
xEnd = ((direction == Direction.Left) ? 50 : (width - 50)); // true: xEnd = 10; else: = w-10
}
findViewById(R.id.my_id).dispatchTouchEvent(MotionEvent.obtain(downTime, SystemClock.uptimeMillis(),MotionEvent.ACTION_DOWN, xStart/2, yStart/2, 0));
System.out.println("ACTION_DOWN");
findViewById(R.id.my_id).dispatchTouchEvent(MotionEvent.obtain(downTime, SystemClock.uptimeMillis() + 500, MotionEvent.ACTION_MOVE, xEnd / 2, yEnd / 2, 0));
System.out.println("ACTION_MOVE");
findViewById(R.id.my_id).dispatchTouchEvent(MotionEvent.obtain(downTime, SystemClock.uptimeMillis() + 1000, MotionEvent.ACTION_UP, xEnd / 2, yEnd / 2, 0));
System.out.println("ACTION_UP");
}
I was able to programmatically emulate fling event in scrolling activity demo.
This is an example of an emulating fling event I was trying and it worked.
Blue dotted line is the fling event I have emulated:
class ScrollingActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_scrolling)
setSupportActionBar(toolbar)
fab.setOnClickListener { view ->
Thread(Runnable {
try {
fling(500f ,900f ,530f ,20f, 5);
// emulateMptionEvent()
} catch (e: Exception) {
}
}).start()
}
}
/** * Simulate touching a specific location and dragging to a new location.
*
* #param fromX X coordinate of the initial touch, in screen coordinates
* #param toX Xcoordinate of the drag destination, in screen coordinates
* #param fromY X coordinate of the initial touch, in screen coordinates
* #param toY Y coordinate of the drag destination, in screen coordinates
* #param stepCount How many move steps to include in the drag
*/
fun fling(
fromX: Float, toX: Float, fromY: Float,
toY: Float, stepCount: Int
) {
val inst = Instrumentation()
val downTime = SystemClock.uptimeMillis()
var eventTime = SystemClock.uptimeMillis()
var y = fromY
var x = fromX
val yStep = (toY - fromY) / stepCount
val xStep = (toX - fromX) / stepCount
var event = MotionEvent.obtain(
downTime, eventTime,
MotionEvent.ACTION_DOWN, fromX, fromY, 0
)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
event.source = InputDevice.SOURCE_TOUCHSCREEN
}
inst.sendPointerSync(event)
for (i in 0 until stepCount) {
y += yStep
x += xStep
eventTime = SystemClock.uptimeMillis()
event = MotionEvent.obtain(
downTime, eventTime + stepCount,
MotionEvent.ACTION_MOVE, x, y, 0
)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
event.source = InputDevice.SOURCE_TOUCHSCREEN
}
inst.sendPointerSync(event)
}
eventTime = SystemClock.uptimeMillis() + stepCount.toLong() + 2
event = MotionEvent.obtain(
downTime, eventTime,
MotionEvent.ACTION_UP, toX, toY, 0
)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
event.source = InputDevice.SOURCE_TOUCHSCREEN
}
inst.sendPointerSync(event)
}
}
Hope it help somebody
I have wrote extension functions that doesn't require Instrumentation, so they could be used not only in androidTest but also in robolectric tests and even in release build:
fun ViewGroup.performSwipeToLeft(target: View) {
this.performSwipe(target, distanceX = -this.width * .5f, distanceY = 0f)
}
fun ViewGroup.performSwipeToRight(target: View) {
this.performSwipe(target, distanceX = +this.width * .5f, distanceY = 0f)
}
fun ViewGroup.performSwipeToTop(target: View) {
this.performSwipe(target, distanceX = 0f, distanceY = -this.height * .5f)
}
fun ViewGroup.performSwipeToBottom(target: View) {
this.performSwipe(target, distanceX = 0f, distanceY = +this.width * .5f)
}
fun ViewGroup.performSwipe(target: View, distanceX: Float, distanceY: Float) {
val parentCoords = intArrayOf(0, 0)
this.getLocationInWindow(parentCoords)
val childCoords = intArrayOf(0, 0)
target.getLocationInWindow(childCoords)
val initGlobalX = childCoords[0].toFloat() + 1f
val initGlobalY = childCoords[1].toFloat() + 1f
val initLocalX = (childCoords[0] - parentCoords[0]).toFloat() + 1f
val initLocalY = (childCoords[1] - parentCoords[1]).toFloat() + 1f
val downTime = SystemClock.uptimeMillis()
var eventTime = SystemClock.uptimeMillis()
this.dispatchTouchEvent(
MotionEvent.obtain(
downTime,
eventTime,
MotionEvent.ACTION_DOWN,
initGlobalX,
initGlobalY,
0
).apply {
setLocation(initLocalX, initLocalY)
source = InputDevice.SOURCE_TOUCHSCREEN
}
)
val steps = 20
var i = 0
while (i in 0..steps) {
val globalX = initGlobalX + i * distanceX / steps
val globalY = initGlobalY + i * distanceY / steps
val localX = initLocalX + i * distanceX / steps
val localY = initLocalY + i * distanceY / steps
if (globalX <= 10f || globalY <= 10f) {
break
}
this.dispatchTouchEvent(
MotionEvent.obtain(
downTime,
++eventTime,
MotionEvent.ACTION_MOVE,
globalX,
globalY,
0
).apply {
setLocation(localX, localY)
source = InputDevice.SOURCE_TOUCHSCREEN
}
)
i++
}
this.dispatchTouchEvent(
MotionEvent.obtain(
downTime,
++eventTime,
MotionEvent.ACTION_UP,
initGlobalX + i * distanceX,
initGlobalY + i * distanceY,
0
).apply {
setLocation(initLocalX + i * distanceX, initLocalY + i * distanceY)
source = InputDevice.SOURCE_TOUCHSCREEN
}
)
}
To use it you need pass your textView as an argument, and parent of textView should be receiver of this functions:
val textView = ...
(textView.parent as ViewGroup).performSwipeToTop(textView)
With RecyclerView you also wanna take a look at the smoothScollBy function where you can pass an Interpolator like the following.
recyclerView.smoothScrollBy(0, 500, AccelerateDecelerateInterpolator())
Comes close enough to a "swipe scoll"