How to save video file using canvas on android? - android

Currently I'm working on video effects like thermal, mono, etc.. for that I'm using preview callback with canvas stuff.
Can anyone please tell me how to save this video?
Below I have my callback code:
mCamera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data1, Camera _camera)
{
Camera.Parameters parameters = _camera.getParameters();
parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);
previewSize = parameters.getPreviewSize();
int frameSize = previewSize.width * previewSize.height;
int height = previewSize.height;
int width = previewSize.width;
int pixel;
int A, R, G, B;
final double GS_RED = 0.299;
final double GS_GREEN = 0.587;
final double GS_BLUE = 0.114;
int[] rgba = new int[frameSize+1];
data = data1;
for (int i = 0; i < height; i++)
for(int j = 0; j < width; j++)
{
int y = (0xff & ((int) data[i * previewSize.width + j]));
int u = (0xff & ((int) data[frameSize + (i >> 1) * previewSize.width + (j & ~1) + 0]));
int v = (0xff & ((int) data[frameSize + (i >> 1) * previewSize.width + (j & ~1) + 1]));
y = y < 16 ? 16 : y;
int r = Math.round(1.164f * (y - 16) + 1.596f * (v - 128));
int g = Math.round(1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = Math.round(1.164f * (y - 16) + 2.018f * (u - 128));
r = r < 0 ? 0 : (r > 255 ? 255 : r);
g = g < 0 ? 0 : (g > 255 ? 255 : g);
b = b < 0 ? 0 : (b > 255 ? 255 : b);
rgba[i * previewSize.width + j] = 0xff000000 + (b << 16) + (g<< 8) + r;
}
Bitmap bmp = Bitmap.createBitmap(width, height,
Bitmap.Config.RGB_565);
bmp.setPixels(rgba, 0 , width , 0, 0, width, height);
// scan through every single pixel
for(int x = 0; x < width; ++x) {
for(int y = 0; y < height; ++y) {
// get one pixel color
pixel = bmp.getPixel(x, y);
// retrieve color of all channels
A = Color.alpha(pixel);
R = Color.red(pixel);
G = Color.green(pixel);
B = Color.blue(pixel);
// take conversion up to one single value
R = G = B = (int)(GS_RED * R + GS_GREEN * G + GS_BLUE * B);
// set new pixel color to output bitmap
bmp.setPixel(x, y, Color.argb(A, R, G, B));
}
}
canvas = mHolder.lockCanvas();
if (canvas != null)
{
canvas.drawBitmap(bmp, (canvas.getWidth() - width) / 4, (canvas.getHeight() - height) / 4, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
});

You can save each frames and then can merge them with ffmpeg to encode , to create a video file.

Related

What may cause the unexpected behavior with bitmap filter?

I have a problem with a function to adjust bitmap contrast.
This function is being set by 2 seekbars, when I change the first value on a seekbar everything is fine, but when value is from 0 to 99 on a second seekbar nothing is changing. Only if the value is 100 the bitmap is changing and gets the contrast effect.
public Bitmap applyRGBFilters2(Bitmap src, int valueContrast, int valueBrightness){
int width = src.getWidth();
int height = src.getHeight();
Bitmap bmOut = Bitmap.createBitmap(width, height, src.getConfig());
int A, R, G, B;
int pixel;
double contrast = Math.pow((100 + valueContrast) / 100, 2);
for(int x = 0; x < width; ++x) {
for(int y = 0; y < height; ++y) {
pixel = src.getPixel(x, y);
A = Color.alpha(pixel);
R = Color.red(pixel);
R = (int)(((((R / 255.0) - 0.5) * contrast) + 0.5) * 255.0);
if(R < 0) { R = 0; }
else if(R > 255) { R = 255; }
G = Color.green(pixel);
G = (int)(((((G / 255.0) - 0.5) * contrast) + 0.5) * 255.0);
if(G < 0) { G = 0; }
else if(G > 255) { G = 255; }
B = Color.blue(pixel);
B = (int)(((((B / 255.0) - 0.5) * contrast) + 0.5) * 255.0);
if(B < 0) { B = 0; }
else if(B > 255) { B = 255; }
bmOut.setPixel(x, y, Color.argb(A, R, G, B));
}
}
Bitmap bmOut2 = Bitmap.createBitmap(bmOut);
Bitmap bmOut3 = Bitmap.createBitmap(width, height, bmOut2.getConfig());
for(int x = 0; x < width; ++x) {
for(int y = 0; y < height; ++y) {
pixel = bmOut3.getPixel(x, y);
A = Color.alpha(pixel);
R = Color.red(pixel);
G = Color.green(pixel);
B = Color.blue(pixel);
R += valueBrightness;
if(R > 255) { R = 255; }
else if(R < 0) { R = 0; }
G += valueBrightness;
if(G > 255) { G = 255; }
else if(G < 0) { G = 0; }
B += valueBrightness;
if(B > 255) { B = 255; }
else if(B < 0) { B = 0; }
bmOut3.setPixel(x, y, Color.argb(A, R, G, B));
}
}
return bmOut3;
}
Do you see any wrong things here?

How to detect average pixel intensity from live camera preview?

I am building a scanner app, and trying to determine the "preview quality" from the preview callback of the camera. I want to customize the camera's AUTO_FLASH_MODE where it will be turned on if the environment is too dark.
How can I detect if there is a high average of dark pixels? This means (in preview) I am getting darkness and therefore need to turn on the camera's flash light.
Either find out how to access pixel values of your image and calculate the average intensity yourself or use any image processing library to do so.
Dark pixels have low values, bright pixels have high values.
You want to calculate the average of all red, green and blue values divided by three times your pixel count.
Define a threshold for when to turn on the flash, but keep in mind that you have to get a new exposure time then.
Prefer flash over exposure time increase as long exposure times yield higher image noise.
I tried this approach but i think it is taking unnecessary time of processing the bitmap and then get an average screen color,
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Size cameraResolution = resolution;
PreviewCallback callback = this.callback;
if (cameraResolution != null && callback != null)
{
int format = camera.getParameters().getPreviewFormat();
SourceData source = new SourceData(data, cameraResolution.width, cameraResolution.height, format, getCameraRotation());
callback.onPreview(source);
final int[] rgb = decodeYUV420SP(data, cameraResolution.width, cameraResolution.height);
//Bitmap bmp = decodeBitmap(source.getData());
Bitmap bmp = Bitmap.createBitmap(rgb, cameraResolution.width, cameraResolution.height, Bitmap.Config.ARGB_8888);
if (bmp != null)
{
//bmp = decodeBitmap(source.getData());
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
// bmp.compress(Bitmap.CompressFormat.JPEG, 70, bytes);
Bitmap resizebitmap = Bitmap.createBitmap(bmp,
bmp.getWidth() / 2, bmp.getHeight() / 2, 60, 60);
int color = getAverageColor(resizebitmap);
Log.i("Color Int", color + "");
// int color = resizebitmap.getPixel(resizebitmap.getWidth()/2,resizebitmap.getHeight()/2);
String strColor = String.format("#%06X", 0xFFFFFF & color);
//String colorname = sColorNameMap.get(strColor);
Log.d("strColor", strColor);
Log.i("strColor", color + "");
if(!mIsOn)
{
if (color == -16777216 || color < -16777216)//minimum color code (full dark)
{
mIsOn = true;
setTorch(true);
Log.d("Yahooooo", "" + color);
}
}
Log.i("Pixel Value",
"Top Left pixel: " + Integer.toHexString(color));
}
}
else
{
Log.d(TAG, "Got preview callback, but no handler or resolution available");
}
}
}
private int[] decodeYUV420SP(byte[] yuv420sp, int width, int height)
{
final int frameSize = width * height;
int rgb[]=new int[width*height];
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) &
0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
private int getAverageColor(Bitmap bitmap)
{
int redBucket = 0;
int greenBucket = 0;
int blueBucket = 0;
int pixelCount = 0;
for (int y = 0; y < bitmap.getHeight(); y++) {
for (int x = 0; x < bitmap.getWidth(); x++) {
int c = bitmap.getPixel(x, y);
pixelCount++;
redBucket += Color.red(c);
greenBucket += Color.green(c);
blueBucket += Color.blue(c);
// does alpha matter?
}
}
int averageColor = Color.rgb(redBucket / pixelCount, greenBucket
/ pixelCount, blueBucket / pixelCount);
return averageColor;
}

YUV to RGB and CreateBitmap...which is the format?

I'm not expert with image format. I'm testing frame rate performance of camera.
When I convert data from YUV to RGB, this data which RGB format is? rgb565 or argb8888?
And why createBitmap take a long time? add info to raw data?
This is the rgb code
public int[] YUV_NV21_TO_RGB( byte[] yuv, int width, int height) {
final int frameSize = width * height;
int[] argb = new int[width*height];
final int ii = 0;
final int ij = 0;
final int di = +1;
final int dj = +1;
int a = 0;
for (int i = 0, ci = ii; i < height; ++i, ci += di) {
for (int j = 0, cj = ij; j < width; ++j, cj += dj) {
int y = (0xff & ((int) yuv[ci * width + cj]));
int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
y = y < 16 ? 16 : y;
int a0 = 1192 * (y - 16);
int a1 = 1634 * (v - 128);
int a2 = 832 * (v - 128);
int a3 = 400 * (u - 128);
int a4 = 2066 * (u - 128);
int r = (a0 + a1) >> 10;
int g = (a0 - a2 - a3) >> 10;
int b = (a0 + a4) >> 10;
r = r < 0 ? 0 : (r > 255 ? 255 : r);
g = g < 0 ? 0 : (g > 255 ? 255 : g);
b = b < 0 ? 0 : (b > 255 ? 255 : b);
argb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
return argb;
}
The problem is that if i use CreateBitmap with RGB_565 option, time is at least 10 ms faster than ARGB8888.
If RGB_565 is a sort of compression (loss of data), should not be the opposite ( createBitmap with ARGB888 faster than RGB_565)?

Android bitmap image

I am new to android development .
I would like to know how i can modify an image selected by the user from gallery.
Is it possible to do this by getting the image using "File Input Stream"?
Or should i use Bitmap?
Create a canvas object from the respective Bitmap.
All editing can be done on the canvas instance and will be applied to the bitmap Object.
Using Aviary SDK. It also support iOS and WindowPhone7. Aviary provide most of function such as Orientation, Crop, and Sharpness,Red-Eye, Whiten, and Blemish,Stickers, Drawing, Text, and Meme (beta),Brightness, Saturation, and Contrast and custom Options.
import android.graphics.Bitmap;
public class ProcessingImage {
private Bitmap defaultImg;
private int idBitmap;
public int getIdBitmap() {
return idBitmap;
}
public void setIdBitmap(int idBitmap) {
this.idBitmap = idBitmap;
}
public Bitmap getDefaultImg() {
return defaultImg;
}
public void setDefaultImg(Bitmap defaultImg) {
this.defaultImg = defaultImg;
}
public ProcessingImage() {
}
public Bitmap processingI(Bitmap myBitmap) {
return myBitmap;
}
public Bitmap TintThePicture(int deg, Bitmap defaultBitmap) {
int w = defaultBitmap.getWidth();
int h = defaultBitmap.getHeight();
int[] pix = new int[w * h];
defaultBitmap.getPixels(pix, 0, w, 0, 0, w, h);
double angle = (3.14159d * (double) deg) / 180.0d;
int S = (int) (256.0d * Math.sin(angle));
int C = (int) (256.0d * Math.cos(angle));
int r, g, b, index;
int RY, BY, RYY, GYY, BYY, R, G, B, Y;
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
index = y * w + x;
r = (pix[index] >> 16) & 0xff;
g = (pix[index] >> 8) & 0xff;
b = pix[index] & 0xff;
RY = (70 * r - 59 * g - 11 * b) / 100;
BY = (-30 * r - 59 * g + 89 * b) / 100;
Y = (30 * r + 59 * g + 11 * b) / 100;
RYY = (S * BY + C * RY) / 256;
BYY = (C * BY - S * RY) / 256;
GYY = (-51 * RYY - 19 * BYY) / 100;
R = Y + RYY;
R = (R < 0) ? 0 : ((R > 255) ? 255 : R);
G = Y + GYY;
G = (G < 0) ? 0 : ((G > 255) ? 255 : G);
B = Y + BYY;
B = (B < 0) ? 0 : ((B > 255) ? 255 : B);
pix[index] = 0xff000000 | (R << 16) | (G << 8) | B;
}
}
Bitmap bm = Bitmap.createBitmap(w, h, defaultBitmap.getConfig());
bm.setPixels(pix, 0, w, 0, 0, w, h);
pix = null;
return bm;
}
}

Android JavaCV create IplImage from Camera to use with ColorHistogram

I am using JavaCV in Android.
In my code, I have created a ImageComparator(class of OpenCV CookBook
http://code.google.com/p/javacv/source/browse/OpenCV2_Cookbook/src/opencv2_cookbook/chapter04/ImageComparator.scala?repo=examples
http://code.google.com/p/javacv/wiki/OpenCV2_Cookbook_Examples_Chapter_4) Object and use that object to compare images. If I use file from SD card the comparator is working.
File referenceImageFile = new File(absPath1); // Read an image.
IplImage reference = Util.loadOrExit(referenceImageFile,CV_LOAD_IMAGE_COLOR);
comparator = new ImageComparator(reference);
comparator = new ImageComparator(reference);
But from Camera Preview, when I am creating IplImage it is not working. I am getting the following Exception during comparison "score" calculation.
score = referenceComparator.compare(grayImage) / imageSize;
java.lang.RuntimeException: /home/saudet/android/OpenCV-2.4.2/modules/core/src/convert.cpp:1196: error: (-215) i < src.channels() in function void cvSplit(const void*, void*, void*, void*, void*)
For CameraPreview I am using the code from FacePreview to create IplImage.But it create Image in grayScale.
int f = SUBSAMPLING_FACTOR;
if (grayImage == null || grayImage.width() != width / f
|| grayImage.height() != height / f) {
grayImage = IplImage.create(width / f, height / f, IPL_DEPTH_8U, 1);
}
int imageWidth = grayImage.width();
int imageHeight = grayImage.height();
int dataStride = f * width;
int imageStride = grayImage.widthStep();
ByteBuffer imageBuffer = grayImage.getByteBuffer();
for (int y = 0; y < imageHeight; y++) {
int dataLine = y * dataStride;
int imageLine = y * imageStride;
for (int x = 0; x < imageWidth; x++) {
imageBuffer.put(imageLine + x, data[dataLine + f * x]);
}
}
How to create a Color IplImage from Camera to use with ImageComparator?
The below code seems to be working fine.
public void onPreviewFrame(final byte[] data, final Camera camera) {
try {
Camera.Size size = camera.getParameters().getPreviewSize();
processImage(data, size.width, size.height);
camera.addCallbackBuffer(data);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
Log.d("Exception", " " + e);
}
}
protected void processImage(byte[] data, int width, int height) {
score.clear();
// First, downsample our image
int f = SUBSAMPLING_FACTOR;
IplImage _4image = IplImage.create(width, height, IPL_DEPTH_8U, f);
int[] _temp = new int[width * height];
if (_4image != null) {
decodeYUV420SP(_temp, data, width, height);
_4image.getIntBuffer().put(_temp);
}
//bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
//bitmap.copyPixelsFromBuffer(_4image.getByteBuffer());
Log.d("CompareAndroid", "processImage");
int imageSize = _4image.width() * _4image.height();
Iterator<ImageComparator> iterator = reference_List.iterator();
// Compute histogram match and normalize by image size.
// 1 means perfect match.
while(iterator.hasNext()){
score.add(((ImageComparator) iterator.next()).compare(_4image) / imageSize);
}
Log.d("CompareImages", "Score Size "+score.size());
postInvalidate();
}
This code seems to be working fine.
private void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width,
int height) {
int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((b << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((r >> 10) & 0xff);
}
}
}
I haven't tested it, but something like this should work:
IplImage yuvimage = IplImage.create(width, height * 3 / 2, IPL_DEPTH_8U, 2);
IplImage rgbimage = IplImage.create(width, height, IPL_DEPTH_8U, 3);
cvCvtColor(yuvimage, rgbimage, CV_YUV2BGR_NV21);

Categories

Resources