CameraX Analysis / Camera onPreviewFrame - android

In CameraX Analysis, setTargetResolution(new Size(2560, 800), but in Analyzer imageProxy.getImage.getWidth=1280 and getHeight=400, and YUVToByte(imageProxy.getImage).length()=768000。In Camera, parameter.setPreviewSize(2560, 800) then byte[].length in onPreviewFrame is 3072000(equales 768000*(2560/1280)*(800/400))。How can I make CameraX Analyzer imageProxy.getImage.getWidth and getHeight = 2560 and 800, and YUVToByte(ImageProxy.getImage).length()=3072000? In CameraX onPreviewFrame(), res always = null, in Camera onPreviewFrame(), res can get currect value, what's the different between CameraX and Camera? And what should I do in CameraX?
CameraX:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
try {
copyDataBase();
} catch (IOException e) {
e.printStackTrace();
}
getSupportActionBar().hide();
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
viewFinder = findViewById(R.id.previewView);
executor = Executors.newSingleThreadExecutor();
if (!allPermissionGranted()) {
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
width = metric.widthPixels;
height = metric.heightPixels;
l = 100;
r = width - 100;
ntmpH = (r - l) * 58 / 100;
t = (height - ntmpH) / 2;
b = t + ntmpH;
double proportion = (double) width / (double) preHeight;
double hproportion = (double) height / (double) preWidth;
l = (int) (l / proportion);
t = (int) (t / hproportion);
r = (int) (r / proportion);
b = (int) (b / hproportion);
m_ROI[0] = l;
m_ROI[1] = t;
m_ROI[2] = r;
m_ROI[3] = b;
cameraProviderFuture = processCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
#SuppressLint("RestrictedApi") Preview preview = new Preview.Builder().build();
CameraSelector cameraSelector = new CameraSelector.Builder().
requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setTargetResolution(new Size(2560, 800))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.setTargetRotation(Surface.ROTATION_90)
.build();
imageAnalysis.setAnalyzer(executor, new ImageAnalysis.Analyzer() {
#SuppressLint("UnsafeExperimentalUsageError")
#Override
public void analyze(#NonNull ImageProxy imageProxy) {
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) {
image = imageProxy.getImage();
bIninKernal();
Log.d("Size ", image.getWidth() + "/" + image.getHeight());
onPreviewFrame(YUVToByte(image));
} else {
Log.d("Status ", "照片格式錯誤" + imageProxy.getFormat());
}
imageProxy.close();
}
});
cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalysis);
preview.setSurfaceProvider(viewFinder.createSurfaceProvider());
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
}
}, ContextCompat.getMainExecutor(this));
}
#NonNull
#Override
public CameraXConfig getCameraXConfig() {
return Camera2Config.defaultConfig();
}
private boolean allPermissionGranted() {
for (String permission : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
private byte[] YUVToByte(Image image) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer0 = planes[0].getBuffer();
ByteBuffer buffer1 = planes[1].getBuffer();
ByteBuffer buffer2 = planes[2].getBuffer();
int width = image.getWidth();
int height = image.getHeight();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData1 = new byte[planes[1].getRowStride()];
byte[] rowData2 = new byte[planes[2].getRowStride()];
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
// loop via rows of u/v channels
int offsetY = 0;
int sizeY = width * height * bytesPerPixel;
int sizeUV = (width * height * bytesPerPixel) / 4;
for (int row = 0; row < height; row++) {
// fill data for Y channel, two row
{
int length = bytesPerPixel * width;
buffer0.get(data, offsetY, length);
if (height - row != 1)
buffer0.position(buffer0.position() + planes[0].getRowStride() - length);
offsetY += length;
}
if (row >= height / 2)
continue;
{
int uvlength = planes[1].getRowStride();
if ((height / 2 - row) == 1) {
uvlength = width / 2 - planes[1].getPixelStride() + 1;
}
buffer1.get(rowData1, 0, uvlength);
buffer2.get(rowData2, 0, uvlength);
// fill data for u/v channels
for (int col = 0; col < width / 2; ++col) {
// u channel
data[sizeY + (row * width) / 2 + col] = rowData1[col * planes[1].getPixelStride()];
// v channel
data[sizeY + sizeUV + (row * width) / 2 + col] = rowData2[col * planes[2].getPixelStride()];
}
}
}
return data;
}
private void bIninKernal() {
api = new LPR();
String FilePath = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
int nRet = api.Init(this, m_ROI[0], m_ROI[1], m_ROI[2], m_ROI[3], preHeight, preWidth, FilePath);
if (nRet != 0) {
bInitKernal = false;
Log.d("Status ", "相機開啟失敗");
} else {
bInitKernal = true;
}
}
private void onPreviewFrame(byte[] data) {
bIninKernal();
tackData = data;
Log.d("data length ", data.length + "");
resultStr = "";
if (!leaving && bInitKernal) {
byte[] result;
String res = "";
result = api.VideoRec(tackData, 1280, 400, 1);
try {
res = new String(result, "gb2312");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
if (res != null && !"".equals(res.trim())) {
resultStr = res.trim();
if (resultStr != "") {
leaving = true;
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
Log.d("Status ", "辨識成功");
Log.d("車牌號碼", resultStr);
Thread thread = new Thread(Image_update);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
Intent intent = new Intent(MainActivity2.this, MainActivity.class);
intent.putExtra("result", resultStr);
Log.d("result", resultStr);
setResult(1, intent);
finish();
}
} else {
Log.d("Status ", "未分辨車牌號碼,請重拍");
}
}
}
public void copyDataBase() throws IOException {
// Common common = new Common();
// 取得SK卡路徑/lpr.key
String dst = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
File file = new File(dst);
if (!file.exists()) {
// file.createNewFile();
} else {
file.delete();
}
Log.d("File Name", file.toString());
try {
InputStream myInput = getAssets().open("lpr.key");
OutputStream myOutput = new FileOutputStream(dst);
byte[] buffer = new byte[1024];
int length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
myOutput.flush();
myOutput.close();
myInput.close();
} catch (Exception e) {
System.out.println("lpr.key" + "is not found");
}
}
private Runnable Image_update = new Runnable() {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl("https://0d9dccd7eac8.ngrok.io/")
.addConverterFactory(GsonConverterFactory.create())
.build();
MyAPIService myAPIService = retrofit.create(MyAPIService.class);
#Override
public void run() {
Log.d("Status ", "run");
Log.d("resultStr ", resultStr);
String url = "D:\\Images\\license_plate\\";
String imgStr = bitmap2base64(toBitmap(image));
LicensePlate licensePlate = new LicensePlate();
licensePlate.setsPlate(resultStr);
licensePlate.setsPicPosition(url + resultStr);
licensePlate.setImgStr(imgStr);
Call<LicensePlate> call = myAPIService.uploadLicensePlate(licensePlate);
call.enqueue(new Callback<LicensePlate>() {
#Override
public void onResponse(Call<LicensePlate> call, Response<LicensePlate> response) {
if(response.isSuccessful()){
Log.d("Status ", "照片上傳成功");
}else{
Log.d("Status ", "照片上傳失敗");
Log.d("response code ", response.code() + "");
}
}
#Override
public void onFailure(Call<LicensePlate> call, Throwable t) {
Log.d("Status ", "onFailure");
Log.d("Message ", t.getMessage());
}
});
}
};
private String bitmap2base64(Bitmap bitmap){
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
return Base64.encodeToString(outputStream.toByteArray(), Base64.DEFAULT).trim().replaceAll("\n", "").replaceAll("\r", "");
}
private Bitmap toBitmap(Image image) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
byte[] nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, image.getWidth(), image.getHeight(), null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 75, out);
byte[] imageBytes = out.toByteArray();
return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
}
Camera
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);// 隐藏标题
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
int metricwidth = metric.widthPixels; // 屏幕宽度(像素)
int metricheight = metric.heightPixels; // 屏幕高度(像素)
try {
copyDataBase();
} catch (IOException e) {
e.printStackTrace();
}
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);// 竖屏
Configuration cf= this.getResources().getConfiguration(); //获取设置的配置信息
int noriention=cf.orientation;
requestWindowFeature(Window.FEATURE_NO_TITLE);// 隐藏标题
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);// 设置全屏
// // 屏幕常亮
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_lpr2);
findView();
}
private void findView() {
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
re_c = (RelativeLayout) findViewById(R.id.re_c);
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
width = metric.widthPixels; // 屏幕宽度(像素)
height = metric.heightPixels; // 屏幕高度(像素)
if(myView==null)
{
if (isFatty)
{
myView = new LPRfinderView2(LPR2Activity.this, width, height, isFatty);
}
else
{
myView = new LPRfinderView2(LPR2Activity.this, width, height);
}
re_c.addView(myView);
}
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(LPR2Activity.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
surfaceView.setFocusable(true);
//surfaceView.invali.date();
}
public void copyDataBase() throws IOException {
// Common common = new Common();
// 取得SK卡路徑/lpr.key
String dst = Environment.getExternalStorageDirectory().toString() + "/lpr.key";
File file = new File(dst);
if (!file.exists()) {
// file.createNewFile();
} else {
file.delete();
}
Log.d("File Name", file.toString());
try {
InputStream myInput = getAssets().open("lpr.key");
OutputStream myOutput = new FileOutputStream(dst);
byte[] buffer = new byte[1024];
int length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
myOutput.flush();
myOutput.close();
myInput.close();
} catch (Exception e) {
System.out.println("lpr.key" + "is not found");
}
}
public void surfaceCreated(SurfaceHolder holder) {
if (mycamera == null) {
try {
mycamera = Camera.open();
} catch (Exception e) {
e.printStackTrace();
String mess = "打开摄像头失败";
Toast.makeText(getApplicationContext(), mess, Toast.LENGTH_LONG).show();
return;
}
}
if(mycamera!=null)
{
try {
mycamera.setPreviewDisplay(holder);
timer2 = new Timer();
if (timer == null)
{
timer = new TimerTask()
{
public void run()
{
if (mycamera != null)
{
try
{
mycamera.autoFocus(new AutoFocusCallback()
{
public void onAutoFocus(boolean success, Camera camera)
{
}
});
}
catch (Exception e)
{
e.printStackTrace();
}
}
};
};
}
timer2.schedule(timer, 500, 2500);
initCamera();
//mycamera.startPreview();
//mycamera.autoFocus(null);
} catch (IOException e) {
e.printStackTrace();
}
}
if(api==null)
{
api= new LPR();
String FilePath =Environment.getExternalStorageDirectory().toString()+"/lpr.key";
int nRet = api.Init(this,m_ROI[0], m_ROI[1], m_ROI[2], m_ROI[3], preHeight, preWidth,FilePath);
if(nRet!=0)
{
Toast.makeText(getApplicationContext(), "啟動失敗,請調整時間", Toast.LENGTH_SHORT).show();
Log.d("nRet ", nRet + "");
bInitKernal =false;
}
else
{
bInitKernal=true;
}
}
if(alertDialog==null){
alertDialog = new AlertDialog.Builder(this).create();
alertDialoginfo = new AlertDialog.Builder(this).create();
}
}
#Override
public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
if (mycamera != null) {
mycamera.setPreviewCallback(null);
mycamera.stopPreview();
mycamera.release();
mycamera = null;
}
} catch (Exception e) {
}
if(bInitKernal){
bInitKernal=false;
api = null;
}
if(toast!=null){
toast.cancel();
toast = null;
}
if(timer2!=null){
timer2.cancel();
timer2=null;
}
if(alertDialog!=null)
{
alertDialog.dismiss();
alertDialog.cancel();
alertDialog=null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
try {
if (mycamera != null) {
mycamera.setPreviewCallback(null);
mycamera.stopPreview();
mycamera.release();
mycamera = null;
}
} catch (Exception e) {
e.printStackTrace();
}
if(bInitKernal)
{
bInitKernal=false;
api = null;
}
finish();
if(toast!=null){
toast.cancel();
toast=null;
}
if(timer2!=null){
timer2.cancel();
timer2=null;
}
if(alertDialog!=null)
{
alertDialog.cancel();
alertDialog=null;
}
}
return super.onKeyDown(keyCode, event);
}
#TargetApi(14)
private void initCamera() {
Camera.Parameters parameters = mycamera.getParameters();
List<Camera.Size> list = parameters.getSupportedPreviewSizes();
preWidth = list.get(4).width;
preHeight = list.get(4).height;
parameters.setPictureFormat(PixelFormat.JPEG);
parameters.setPreviewSize(preWidth,preHeight);
if (!bROI) {
int l,t,r,b;
l = 100;
r = width-100;
int ntmpH =(r-l)*58/100;
t = (height-ntmpH)/2;
b = t+ntmpH;
double proportion = (double) width / (double) preHeight;
double hproportion=(double)height/(double) preWidth;
l = (int) (l /proportion);
t = (int) (t /hproportion);
r = (int) (r /proportion);
b = (int) (b / hproportion);
m_ROI[0]=l;
m_ROI[1]=t;
m_ROI[2]=r;
m_ROI[3]=b;
bROI = true;
}
if (parameters.getSupportedFocusModes().contains(
parameters.FOCUS_MODE_AUTO))
{
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);// 1连续对焦
}
mycamera.setPreviewCallback(LPR2Activity.this);
mycamera.setParameters(parameters);
mycamera.setDisplayOrientation(90); //一般機種
mycamera.startPreview();
}
public void onPreviewFrame(byte[] data, Camera camera) {
tackData = data;
Log.d("data length ", data.length + "");
ByteArrayInputStream bis = new ByteArrayInputStream(data);
resultStr = "";
if (!leaving&& bInitKernal ) {
Log.d("Status ", "開始判斷");
byte result[];//[] = new byte[10];
String res="";
result = api.VideoRec(tackData, preWidth, preHeight, 1);
Log.d("preWidth ", preWidth + "");
Log.d("preHeight", preHeight + "");
Log.d("width ", width + "");
Log.d("height", height + "");
try {
res = new String(result,"gb2312");
Log.d("try ", res);
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
Log.d("Exception ", e.getMessage());
e.printStackTrace();
}
Log.d("res ", res);
if(res!=null&&!"".equals(res.trim()))
{
Camera.Parameters parameters = mycamera.getParameters();
resultStr =res.trim();
if (resultStr != "") {
leaving = true;
//拍照音效
MediaActionSound sound = null;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
Intent intent = new Intent();
intent.putExtra("strPltNo",resultStr);
setResult(2,intent);
finish();
}else{
Log.d("Status ", "未分配車牌號碼,請重拍");
}
}
}
}
#RequiresApi(api = Build.VERSION_CODES.CUPCAKE)
public String pictureName() {
String str = "";
Time t = new Time();
t.setToNow(); // 取得系统时间。
int year = t.year;
int month = t.month + 1;
int date = t.monthDay;
int hour = t.hour; // 0-23
int minute = t.minute;
int second = t.second;
if (month < 10)
str = String.valueOf(year) + "0" + String.valueOf(month);
else {
str = String.valueOf(year) + String.valueOf(month);
}
if (date < 10)
str = str + "0" + String.valueOf(date + "_");
else {
str = str + String.valueOf(date + "_");
}
if (hour < 10)
str = str + "0" + String.valueOf(hour);
else {
str = str + String.valueOf(hour);
}
if (minute < 10)
str = str + "0" + String.valueOf(minute);
else {
str = str + String.valueOf(minute);
}
if (second < 10)
str = str + "0" + String.valueOf(second);
else {
str = str + String.valueOf(second);
}
return str;
}
public void Leave(View view) {
Intent intent = new Intent();
intent.putExtra("strPltNo","");
setResult(3,intent);
finish();
}
}
CameraX Log
Camera Log

With regards to the image analysis resolution, the documentation of ImageAnalysis.Builder.setTargetResolution() states that:
The maximum available resolution that could be selected for an
ImageAnalysis is limited to be under 1080p.
So setting a size of 2560x800 won't work as you expect. In return CameraX seems to be selecting the maximum ImageAnalysis resolution that has the same aspect ratio you requested (2560/800 = 1280/400).

Related

Stop() called but track is not started

Here is my logcat:
System.err:java.lang.IllegalStateException: Failed to add the track to the muxer
W/System.err: at android.media.MediaMuxer.nativeAddTrack(Native Method)
W/System.err: at android.media.MediaMuxer.addTrack(MediaMuxer.java:626)
W/System.err: at com.marvhong.videoeffect.composer.MuxRender.onSetOutputFormat(MuxRender.java:64)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.drainEncoder(VideoComposer.java:224)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.stepPipeline(VideoComposer.java:113)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.runPipelines(Mp4ComposerEngine.java:181)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.compose(Mp4ComposerEngine.java:127)
W/System.err: at com.marvhong.videoeffect.composer.Mp4Composer$1.run(Mp4Composer.java:198)
W/System.err: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
W/System.err: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
W/System.err: at java.lang.Thread.run(Thread.java:764)
E/TrimVideoActivity: filterVideo---onFailed()
In my application, I'm trying to add filters on video. But sometimes my app crashes and sometimes it works fine. The error is Failed to add the track to the muxer
I have debugged code and found the issue for video under audio available then apply filter and save it but doesn't work save filter video.
MuxRender Class :
class MuxRender {
private static final String TAG = "MuxRender";
private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
private final MediaMuxer muxer;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private int videoTrackIndex;
private int audioTrackIndex;
private ByteBuffer byteBuffer;
private final List<SampleInfo> sampleInfoList;
private boolean started;
MuxRender(MediaMuxer muxer) {
this.muxer = muxer;
sampleInfoList = new ArrayList<>();
}
void setOutputFormat(SampleType sampleType, MediaFormat format) {
switch (sampleType) {
case VIDEO:
videoFormat = format;
break;
case AUDIO:
ObLogger.i(TAG, "format > " + format);
audioFormat = format;
break;
default:
throw new AssertionError();
}
}
void onSetOutputFormat() {
if (videoFormat != null && audioFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
ObLogger.i(TAG, "audioFormat > " + audioFormat);
audioTrackIndex = muxer.addTrack(audioFormat);
ObLogger.v(TAG, "Added track #" + audioTrackIndex + " with " + audioFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
} else if (videoFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
}
muxer.start();
started = true;
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocate(0);
}
byteBuffer.flip();
ObLogger.v(TAG, "Output format determined, writing " + sampleInfoList.size() +
" samples / " + byteBuffer.limit() + " bytes to muxer.");
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int offset = 0;
for (SampleInfo sampleInfo : sampleInfoList) {
sampleInfo.writeToBufferInfo(bufferInfo, offset);
muxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.sampleType), byteBuffer, bufferInfo);
offset += sampleInfo.size;
}
sampleInfoList.clear();
byteBuffer = null;
}
void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
if (started) {
muxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
return;
}
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
byteBuf.position(bufferInfo.offset);
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
}
byteBuffer.put(byteBuf);
sampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
}
private int getTrackIndexForSampleType(SampleType sampleType) {
switch (sampleType) {
case VIDEO:
return videoTrackIndex;
case AUDIO:
return audioTrackIndex;
default:
throw new AssertionError();
}
}
public enum SampleType {VIDEO, AUDIO}
private static class SampleInfo {
private final SampleType sampleType;
private final int size;
private final long presentationTimeUs;
private final int flags;
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
this.sampleType = sampleType;
this.size = size;
presentationTimeUs = bufferInfo.presentationTimeUs;
flags = bufferInfo.flags;
}
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
bufferInfo.set(offset, size, presentationTimeUs, flags);
}
}
}
Mp4ComposerEngine Class:
class Mp4ComposerEngine {
private static final String TAG = "Mp4ComposerEngine";
private static final double PROGRESS_UNKNOWN = -1.0;
private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
private static final long PROGRESS_INTERVAL_STEPS = 10;
private FileDescriptor inputFileDescriptor;
private VideoComposer videoComposer;
private IAudioComposer audioComposer;
private MediaExtractor mediaExtractor;
private MediaMuxer mediaMuxer;
private ProgressCallback progressCallback;
private long durationUs;
void setDataSource(FileDescriptor fileDescriptor) {
inputFileDescriptor = fileDescriptor;
}
void setProgressCallback(ProgressCallback progressCallback) {
this.progressCallback = progressCallback;
}
void compose(
final String destPath,
final Resolution outputResolution,
final GlFilter filter,
final int bitrate,
final boolean mute,
final Rotation rotation,
final Resolution inputResolution,
final FillMode fillMode,
final FillModeCustomItem fillModeCustomItem,
final int timeScale,
final boolean flipVertical,
final boolean flipHorizontal
) throws IOException {
try {
mediaExtractor = new MediaExtractor();
mediaExtractor.setDataSource(inputFileDescriptor);
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer = new MediaMuxer(destPath, OutputFormat.MUXER_OUTPUT_MPEG_4);
}
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(inputFileDescriptor);
try {
durationUs = Long
.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
} catch (NumberFormatException e) {
durationUs = -1;
}
ObLogger.d(TAG, "Duration (us): " + durationUs);
MediaFormat videoOutputFormat = MediaFormat
.createVideoFormat("video/avc", outputResolution.width(), outputResolution.height());
videoOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
videoOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
videoOutputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
videoOutputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
MuxRender muxRender = new MuxRender(mediaMuxer);
// identify track indices
MediaFormat format = mediaExtractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
final int videoTrackIndex;
final int audioTrackIndex;
if (mime.startsWith("video/")) {
videoTrackIndex = 0;
audioTrackIndex = 1;
} else {
videoTrackIndex = 1;
audioTrackIndex = 0;
}
// setup video composer
videoComposer = new VideoComposer(mediaExtractor, videoTrackIndex, videoOutputFormat, muxRender, timeScale);
videoComposer.setUp(filter, rotation, outputResolution, inputResolution, fillMode, fillModeCustomItem, flipVertical, flipHorizontal);
mediaExtractor.selectTrack(videoTrackIndex);
// setup audio if present and not muted
if (mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO) != null && !mute) {
// has Audio video
if (timeScale < 2) {
audioComposer = new AudioComposer(mediaExtractor, audioTrackIndex, muxRender);
} else {
audioComposer = new RemixAudioComposer(mediaExtractor, audioTrackIndex, mediaExtractor.getTrackFormat(audioTrackIndex), muxRender, timeScale);
}
audioComposer.setup();
mediaExtractor.selectTrack(audioTrackIndex);
runPipelines();
} else {
// no audio video
runPipelinesNoAudio();
}
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.stop();
}
} finally {
try {
if (videoComposer != null) {
videoComposer.release();
videoComposer = null;
}
if (audioComposer != null) {
audioComposer.release();
audioComposer = null;
}
if (mediaExtractor != null) {
mediaExtractor.release();
mediaExtractor = null;
}
} catch (RuntimeException e) {
e.printStackTrace();
// Too fatal to make alive the app, because it may leak native resources.
// throw new Error("Could not shutdown mediaExtractor, codecs and mediaMuxer pipeline.", e);
}
try {
if (mediaMuxer != null) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.release();
}
mediaMuxer = null;
}
} catch (RuntimeException e) {
ObLogger.e(TAG, "Failed to release mediaMuxer.", e);
}
}
}
private void runPipelines() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
}// unknown
}
while (!(videoComposer.isFinished() && audioComposer.isFinished())) {
boolean stepped = videoComposer.stepPipeline()
|| audioComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
double audioProgress = audioComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) audioComposer.getWrittenPresentationTimeUs() / durationUs);
double progress = (videoProgress + audioProgress) / 2.0;
if (progressCallback != null) {
progressCallback.onProgress(progress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
private void runPipelinesNoAudio() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
} // unknown
}
while (!videoComposer.isFinished()) {
boolean stepped = videoComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
if (progressCallback != null) {
progressCallback.onProgress(videoProgress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
interface ProgressCallback {
/**
* Called to notify progress. Same thread which initiated transcode is used.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
}
}
Mp4Composer Class :
public class Mp4Composer {
private final static String TAG = Mp4Composer.class.getSimpleName();
private final String srcPath;
private final String destPath;
private GlFilter filter;
private Resolution outputResolution;
private int bitrate = -1;
private boolean mute = false;
private Rotation rotation = Rotation.NORMAL;
private Listener listener;
private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
private FillModeCustomItem fillModeCustomItem;
private int timeScale = 1;
private boolean flipVertical = false;
private boolean flipHorizontal = false;
private ExecutorService executorService;
public Mp4Composer(#NonNull final String srcPath, #NonNull final String destPath) {
this.srcPath = srcPath;
this.destPath = destPath;
}
public Mp4Composer filter(#NonNull GlFilter filter) {
this.filter = filter;
return this;
}
public Mp4Composer size(int width, int height) {
this.outputResolution = new Resolution(width, height);
return this;
}
public Mp4Composer videoBitrate(int bitrate) {
this.bitrate = bitrate;
return this;
}
public Mp4Composer mute(boolean mute) {
this.mute = mute;
return this;
}
public Mp4Composer flipVertical(boolean flipVertical) {
this.flipVertical = flipVertical;
return this;
}
public Mp4Composer flipHorizontal(boolean flipHorizontal) {
this.flipHorizontal = flipHorizontal;
return this;
}
public Mp4Composer rotation(#NonNull Rotation rotation) {
this.rotation = rotation;
return this;
}
public Mp4Composer fillMode(#NonNull FillMode fillMode) {
this.fillMode = fillMode;
return this;
}
public Mp4Composer customFillMode(#NonNull FillModeCustomItem fillModeCustomItem) {
this.fillModeCustomItem = fillModeCustomItem;
this.fillMode = FillMode.CUSTOM;
return this;
}
public Mp4Composer listener(#NonNull Listener listener) {
this.listener = listener;
return this;
}
public Mp4Composer timeScale(final int timeScale) {
this.timeScale = timeScale;
return this;
}
private ExecutorService getExecutorService() {
if (executorService == null) {
executorService = Executors.newSingleThreadExecutor();
}
return executorService;
}
public Mp4Composer start() {
getExecutorService().execute(new Runnable() {
#Override
public void run() {
Mp4ComposerEngine engine = new Mp4ComposerEngine();
engine.setProgressCallback(new Mp4ComposerEngine.ProgressCallback() {
#Override
public void onProgress(final double progress) {
if (listener != null) {
listener.onProgress(progress);
}
}
});
final File srcFile = new File(srcPath);
final FileInputStream fileInputStream;
try {
fileInputStream = new FileInputStream(srcFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
try {
engine.setDataSource(fileInputStream.getFD());
} catch (IOException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
final int videoRotate = getVideoRotation(srcPath);
final Resolution srcVideoResolution = getVideoResolution(srcPath, videoRotate);
if (filter == null) {
filter = new GlFilter();
}
if (fillMode == null) {
fillMode = FillMode.PRESERVE_ASPECT_FIT;
}
if (fillModeCustomItem != null) {
fillMode = FillMode.CUSTOM;
}
if (outputResolution == null) {
if (fillMode == FillMode.CUSTOM) {
outputResolution = srcVideoResolution;
} else {
Rotation rotate = Rotation.fromInt(rotation.getRotation() + videoRotate);
if (rotate == Rotation.ROTATION_90 || rotate == Rotation.ROTATION_270) {
outputResolution = new Resolution(srcVideoResolution.height(), srcVideoResolution.width());
} else {
outputResolution = srcVideoResolution;
}
}
}
if (filter instanceof IResolutionFilter) {
((IResolutionFilter) filter).setResolution(outputResolution);
}
if (timeScale < 2) {
timeScale = 1;
}
ObLogger.d(TAG, "rotation = " + (rotation.getRotation() + videoRotate));
ObLogger.d(TAG, "inputResolution width = " + srcVideoResolution.width() + " height = " + srcVideoResolution.height());
ObLogger.d(TAG, "outputResolution width = " + outputResolution.width() + " height = " + outputResolution.height());
ObLogger.d(TAG, "fillMode = " + fillMode);
try {
if (bitrate < 0) {
bitrate = calcBitRate(outputResolution.width(), outputResolution.height());
}
engine.compose(
destPath,
outputResolution,
filter,
bitrate,
mute,
Rotation.fromInt(rotation.getRotation() + videoRotate),
srcVideoResolution,
fillMode,
fillModeCustomItem,
timeScale,
flipVertical,
flipHorizontal
);
} catch (Exception e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
executorService.shutdown();
return;
}
if (listener != null) {
listener.onCompleted();
}
executorService.shutdown();
}
});
return this;
}
public void cancel() {
getExecutorService().shutdownNow();
}
public interface Listener {
/**
* Called to notify progress.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
/**
* Called when transcode completed.
*/
void onCompleted();
/**
* Called when transcode canceled.
*/
void onCanceled();
void onFailed(Exception exception);
}
private int getVideoRotation(String videoFilePath) {
try {
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(videoFilePath);
ObLogger.e("MediaMetadataRetriever", "getVideoRotation error");
String orientation = mediaMetadataRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
return Integer.valueOf(orientation);
} catch (NumberFormatException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
return 0;
}
private int calcBitRate(int width, int height) {
final int bitrate = (int) (0.25 * 30 * width * height);
ObLogger.i(TAG, "bitrate=" + bitrate);
return bitrate;
}
private Resolution getVideoResolution(final String path, final int rotation) {
int width = 0;
int height = 0;
if (path != null && !path.isEmpty()) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(path);
try {
String Strwidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String Strheight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
if (Strwidth != null && Strheight != null) {
width = Integer.valueOf(Strwidth);
height = Integer.valueOf(Strheight);
}
retriever.release();
} catch (NumberFormatException e) {
retriever.release();
e.printStackTrace();
} catch (IllegalArgumentException e) {
retriever.release();
e.printStackTrace();
}
}
return new Resolution(width, height);
}
}
Main Activity Call Mp4Composer:
private void startMediaCodec(String srcPath,String outputPath) {
mMp4Composer = new Mp4Composer(srcPath, outputPath)
// .rotation(Rotation.ROTATION_270)
//.size(720, 1280)
.fillMode(FillMode.PRESERVE_ASPECT_FIT)
.filter(MagicFilterFactory.getFilter())
.mute(false)
.flipHorizontal(false)
.flipVertical(false)
.listener(new Listener() {
#Override
public void onProgress(double progress) {
ObLogger.d(TAG, "filterVideo---onProgress: " + (int) (progress * 100));
runOnUiThread(new Runnable() {
#Override
public void run() {
//show progress
}
});
}
#Override
public void onCompleted() {
ObLogger.d(TAG, "filterVideo---onCompleted");
runOnUiThread(new Runnable() {
#Override
public void run() {
ObLogger.i(TAG, "run: Editor Screen is >>> ");
Intent intent = new Intent();
intent.putExtra(Extras.EXTRA_FILTER_SCREEN, outputPath);
setResult(RESULT_OK, intent);
finish();
}
});
}
#Override
public void onCanceled() {
ObLogger.e(TAG, "onCanceled");
NormalProgressDialog.stopLoading();
}
#Override
public void onFailed(Exception exception) {
ObLogger.e(TAG, "filterVideo---onFailed()");
NormalProgressDialog.stopLoading();
// Toast.makeText(TrimVideoActivity.this, "Video processing failed", Toast.LENGTH_SHORT).show();
}
})
.start();
}
The below links were used but my problem is not solved.
https://stackoverflow.com/a/53140941/11138845
https://stackoverflow.com/a/21759073/11138845

video steganography using mediacodec

I need to create videos with data hidden in them. i managed to extract video frames using mediacodec decoder as NV21 buffer and save them, then i create mp4 file from frames using mediacodec encoder.
the class below is responsible for saving frame files if we are in encode process or check the value if we want to extract data from stego-video.
public class ExtractMpegFramesBufferDecoder {
private static final String TAG = "ExtractMpegFramesDec";
private static final boolean VERBOSE = true; // lots of logging
// where to find files (note: requires WRITE_EXTERNAL_STORAGE permission)
private File STORE_FRAME_DIRECTORY;
private String INPUT_FILE;
private int frameRate; // stop extracting after this many
private int saveWidth;
private int saveHeight;
private int decodeCount;
private Handler _progressBarHandler;
private int duration;
//
private int MAX_FRAMES;
private boolean fromDecode;
//
public ExtractMpegFramesBufferDecoder(File storeFrameDirectory, String inputVideoPath, int frameRate
, int saveWidth, int saveHeight
, double duration, int rotation
, Handler _progressBarHandler) {
this.STORE_FRAME_DIRECTORY = storeFrameDirectory;
this.INPUT_FILE = inputVideoPath;
this.frameRate = frameRate;
this.saveWidth = saveWidth;
this.saveHeight = saveHeight;
this._progressBarHandler = _progressBarHandler;
this.duration = (int) duration;
}
/**
* Tests extraction from an MP4 to a series of PNG files.
* <p>
* We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
* video with the GPU. If the input video has a different aspect ratio, we could preserve
* it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
* you're extracting frames you don't want black bars.
*/
public void extractMpegFrames(int maxFrame, boolean fromDecode) throws IOException {
MediaCodec decoder = null;
MediaExtractor extractor = null;
MAX_FRAMES = maxFrame;
this.fromDecode = fromDecode;
try {
File inputFile = new File(INPUT_FILE); // must be an absolute path
// The MediaExtractor error messages aren't very useful. Check to see if the input
// file exists so we can throw a better one if it's not there.
if (!inputFile.canRead()) {
throw new FileNotFoundException("Unable to read " + inputFile);
}
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + inputFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
if (VERBOSE) {
Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
format.getInteger(MediaFormat.KEY_HEIGHT));
}
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder);
} finally {
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
/**
* Selects the video track, if any.
*
* #return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor) {
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
if (VERBOSE) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
}
return i;
}
}
return -1;
}
/**
* Work loop.
*/
public void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
MediaFormat decoderOutputFormat = null;
long rawSize = 0;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
decoderOutputBuffers = decoder.getOutputBuffers();
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
decoderOutputFormat = newFormat;
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
outputFrame.position(info.offset);
outputFrame.limit(info.offset + info.size);
rawSize += info.size;
if (info.size == 0) {
if (VERBOSE) Log.d(TAG, "got empty frame");
} else {
// if it's decode then check the altered value
// else save the frames
if (fromDecode) {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
int size = saveWidth * saveHeight;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
int uvIndex = 0;
if (decodeCount == 1) {
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
y1 = data[i] & 0xff;
y2 = data[i + 1] & 0xff;
y3 = data[saveWidth + i] & 0xff;
y4 = data[saveWidth + i + 1] & 0xff;
u = data[offset + k] & 0xff;
v = data[offset + k + 1] & 0xff;
// getting size
if (uvIndex == 0) {
int specialByte1P1 = u & 15;
int specialByte1P2 = v & 15;
int specialCharacter1 = (specialByte1P1 << 4) | specialByte1P2;
if (specialCharacter1 != 17) {
throw new IllegalArgumentException("value has changed");
}
}
uvIndex++;
if (i != 0 && (i + 2) % saveWidth == 0)
i += saveWidth;
}
}
} else {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
try {
File outputFile = new File(STORE_FRAME_DIRECTORY,
String.format(Locale.US, "frame_%d.frame", decodeCount));
FileOutputStream stream = new FileOutputStream(outputFile.getAbsoluteFile());
stream.write(data);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
}
decodeCount++;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
decoder.releaseOutputBuffer(decoderStatus, false);
}
}
}
int numSaved = (frameRate < decodeCount) ? frameRate : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
public int getDecodeCount() {
return decodeCount;
}
}
in class below i encode frames, alter one u v value of (frame 1), store digit number 17 in lsb of first u and v and build mp4 using mediacodec encoder.
public class YUVFrameBufferToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private static final int ERROR_IN_PROCESS = 0;
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<File> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static int BIT_RATE;
private static int FRAME_RATE; // Frames per second
private int frameCount;
private Handler _progressBarHandler;
private Handler _processHandler;
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
//
private byte[] dataToHide;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public YUVFrameBufferToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public boolean startEncoding(int width, int height, int fps, int bitrate, int frameCount
, byte[] dataToHide, Handler _progressBarHandler, Handler _processHandler
, File outputFile) {
mWidth = width;
mHeight = height;
FRAME_RATE = fps;
BIT_RATE = bitrate;
this.frameCount = frameCount;
this._progressBarHandler = _progressBarHandler;
this._processHandler = _processHandler;
mOutputFile = outputFile;
this.dataToHide = dataToHide;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
ErrorManager.getInstance().addErrorMessage("Unable to get path for " + outputFile);
return false;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
ErrorManager.getInstance().addErrorMessage("Unable to find an appropriate codec for " + MIME_TYPE);
return false;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("Unable to create MediaCodec " + e.getMessage());
return false;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG, "MediaMuxer creation failed. " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("MediaMuxer creation failed. " + e.getMessage());
return false;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
Completable.fromAction(this::encode)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe();
return true;
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(File frame) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
mEncodeQueue.add(frame);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
private void encode() {
Log.d(TAG, "Encoder started");
while (true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
File frame = mEncodeQueue.poll();
if (frame == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {
}
frame = mEncodeQueue.poll();
}
if (frame == null) continue;
int size = (int) frame.length();
byte[] bytesNV21 = new byte[size];
try {
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(frame));
buf.read(bytesNV21, 0, bytesNV21.length);
buf.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
int offsetSize = mWidth * mHeight;
int byteNV21Offset = offsetSize;
int u, v, y1, y2, y3, y4;
//
int dataToHideLength = 0;
if (dataToHide != null)
dataToHideLength = dataToHide.length;
boolean isLastIndexInserted1 = false;
boolean isLastIndexInserted2 = false;
boolean isLastIndexInserted3 = false;
int uvIndex = 0;
int frameByteCapacity = ((mWidth * mHeight) / 4) / 20;
Log.e(TAG, "encode: dataToHideLength: " + dataToHideLength);
Log.e(TAG, "encode: frameByteCapacity: " + dataToHideLength);
//
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < offsetSize; i += 2, k += 2) {
y1 = bytesNV21[i] & 0xff;
y2 = bytesNV21[i + 1] & 0xff;
y3 = bytesNV21[mWidth + i] & 0xff;
y4 = bytesNV21[mWidth + i + 1] & 0xff;
u = bytesNV21[byteNV21Offset + k] & 0xff;
v = bytesNV21[byteNV21Offset + k + 1] & 0xff;
// frame 1
// altering u and v for test
if (mGenerateIndex == 1) {
int Unew = u & 240;
int Vnew = v & 240;
if (uvIndex == 0) {
// used in start and end of stego bytes
int specialByte1Integer = 17;
int specialByte1P1 = specialByte1Integer & 240;
int specialByte1P2 = specialByte1Integer & 15;
// shift p1 right 4 position
specialByte1P1 = specialByte1P1 >> 4;
u = Unew | specialByte1P1;
v = Vnew | specialByte1P2;
}
bytesNV21[byteNV21Offset + k] = (byte) u;
bytesNV21[byteNV21Offset + k + 1] = (byte) v;
}
uvIndex++;
if (i != 0 && (i + 2) % mWidth == 0)
i += mWidth;
}
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffers()[inputBufIndex];
inputBuffer.clear();
inputBuffer.put(bytesNV21);
mediaCodec.queueInputBuffer(inputBufIndex, 0, bytesNV21.length, ptsUsec, 0);
mGenerateIndex++;
int percentComplete = 70 + (int) ((((double) mGenerateIndex) / (frameCount)) * 30);
if (_progressBarHandler != null) {
_progressBarHandler.sendMessage(_progressBarHandler.obtainMessage(percentComplete));
}
Log.w("creatingVideo: ", "is:" + percentComplete);
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffers()[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
} else {
mCallback.onEncodingComplete(mOutputFile);
}
}
private void release() {
try {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG, "RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG, "RELEASE MUXER");
}
} catch (Exception ignored) {
ErrorManager.getInstance().addErrorMessage("unsupported video file");
Message res = _processHandler.obtainMessage(ERROR_IN_PROCESS);
_processHandler.sendMessage(res);
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}}
output video is created successfully without any problem, but mediacodec has changed the altered test value and i cannot retrieve it.
here is my question, is this a right approach for doing video steganography in android? if it is not the right way can you please make a suggestion?
Steganography comes with a prerequisite - lossless encoding.
None of the codecs available on Android support lossless video encoding, as of now.
So I'm afraid your LSBs would never remain the same post encoding/decoding.
Suggestion: If you don't have a lot many frames, I would suggest you use a lossless format. You may encode your frames into a sequence of PNG images.

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously?

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously? I finished to record audio from handset device by this code. And I am doing the second step- play this sound in android speaker. Please help me to resolve it. Thank you so much
_audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
_audioManager.startBluetoothSco();
_recorder = new MediaRecorder();
_recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
_recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
_recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
_recorder.setOutputFile(file.toString());
_recorder.prepare();
_recorder.start();
Recording using AudioRecord
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
Button showPref;
OnBluetoothRecording bluetoothRecording;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
showPref = (Button)findViewById(R.id.showPreferece);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
showPref.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(getApplicationContext(),BluetoothPreferenceActivity.class));
}
});
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new OnClickListener() {
#Override
public void onClick(final View v) {
BluetoothRecordingManager.checkAndRecord(getApplicationContext(), new OnBluetoothRecording() {
#Override
public void onStartRecording(boolean state, boolean bluetoothFlag) {
Log.d("CallBack","starting Recording");
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onCancelRecording() {
}
}, true);
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

Gain control in Android

I am confused. I read here that Android has no support for gain control (this article is dated 2013/09/21). and I found AutomaticGainControl class in AudioEffect. what does "software gain control means"? any difference?
Is it used by default while using AudioRecord or i have to call it?
Audio gain conrol
To increase the amplitude of the audio u need to calculate the gain factor and multiply calculated gain factor with every sample captured. The following code does that.
P.S. Ignore the unrelated code
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

Video encoder(mediacodec with encoder surface) re-initialisation for orientation change

Hi I am asking this question with reference to the code at https://github.com/google/grafika .
I am trying to re-initialize my encoder during orientation change in order to adjust the aspect ratio of encoded frames..
UPDATE:
Here is my GLSurfaceView.Renderer class
class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final boolean ROSE_COLORED_GLASSES = false; // experiment
private MainActivity.CameraHandler mCameraHandler;
private TextureMovieEncoder mVideoEncoder;
private File mOutputFile;
TextView tv;
private TextureRender mTextureRender;
private SurfaceTexture mSurfaceTexture;
private boolean mRecordingEnabled;
Context cntex;
public CameraSurfaceRenderer(MainActivity.CameraHandler cameraHandler, TextureMovieEncoder movieEncoder, File outputFile, TextView tx, Context c) {
mOutputFile = outputFile;
mVideoEncoder = movieEncoder;
mRecordingEnabled = false;
mCameraHandler = cameraHandler;
tv = tx;
cntex = c;
}
public void notifyPausing() {
if (mSurfaceTexture != null) {
Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
mSurfaceTexture.release();
mSurfaceTexture = null;
}
}
public void changeRecordingState(boolean isRecording) {
Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
mRecordingEnabled = isRecording;
}
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
System.out.println("onSurfaceCreated start");
Log.d(TAG, "onSurfaceCreated");
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
// assign value to gl_FragColor.g and .b as well to get simple B&W
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// Tell the UI thread to enable the camera preview.
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
}
mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
System.out.println("onSurfaceCreated end");
}
boolean is_stpd = false;
int count = 0;
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
System.out.println("onSurfaceChanged start");
Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
GLES20.glViewport(0, 0, width, height);
mCameraHandler.sendEmptyMessage(2); // to display toast message of surface change.
int orientation = cntex.getResources().getConfiguration().orientation;
// System.out.println("onSurfaceChanged before reinit");
mVideoEncoder.encoder_reinitialisation(EGL14.eglGetCurrentContext(), orientation);
// System.out.println("onSurfaceChanged after reinit");
System.out.println("onSurfaceChanged end");
}
int _frm_cnt = 0;
double _strt_tm = 0;
#Override
public void onDrawFrame(GL10 unused) {
System.out.println("onDrawFrame start");
if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureRender.getTextureId());
++_frm_cnt;
if(_frm_cnt == 1) {
_strt_tm = System.currentTimeMillis();
}
if((System.currentTimeMillis() - _strt_tm) >= 1000) {
System.out.println("fps = " + _frm_cnt );
//tx.setText("fps = " + Integer.toString(frm_cnt));
_frm_cnt = 0;
}
mSurfaceTexture.updateTexImage();
//
mVideoEncoder.setTextureId(mTextureRender.getTextureId());
//
if(mVideoEncoder.is_prepared == true) {
// // This will be ignored if we're not actually recording.
mVideoEncoder.frameAvailable(mSurfaceTexture);
}
// Draw the video frame.
mTextureRender.drawFrame(mSurfaceTexture);
System.out.println("onDrawFrame end");
}
}
Here is my TextureMovieEncoder class
public class TextureMovieEncoder implements Runnable {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 5; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
private static final int MSG_QUIT = 5;
private static final int MSG_REINIT = 6;
// ----- accessed by encoder thread -----
private EglCore mEglBase;
private WindowSurface mInputWindowSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
private TextureRender mTextureRender;
private int mTextureId;
private int mFrameNum;
File enc_file = new File(Environment.getExternalStorageDirectory().getPath() + "/encoded_preview.webm");
FileOutputStream fp_enc = null;
PrintWriter enc_len = null;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private boolean mReady;
public boolean mRunning;
public boolean is_prepared = false;
public TextureMovieEncoder(Context cntxt) {
context = cntxt;
}
public static class EncoderConfig {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext) {
System.out.println("EncoderConfig start");
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
System.out.println("EncoderConfig end");
}
#Override
public String toString() {
return "EncoderConfig: " + mWidth + "x" + mHeight + " #" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
public void startRecording(EncoderConfig config) {
System.out.println("startRecording start");
Log.d(TAG, "Encoder: startRecording()");
synchronized (mReadyFence) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
System.out.println("startRecording end");
}
int orientation_local = -1;
public void encoder_reinitialisation(EGLContext eglContext, int orientation) {
System.out.println("encoder_reinitialisation start");
is_prepared = false;
System.out.println("encoder_reinitialisation before message oriebta = " + orientation);
mHandler.sendMessage(mHandler.obtainMessage(MSG_REINIT, eglContext));
System.out.println("encoder_reinitialisation after message");
orientation_local = orientation;
System.out.println("encoder_reinitialisation end");
}
public void stopRecording() {
System.out.println("stopRecording start");
if(mHandler != null) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
}
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
System.out.println("stopRecording end");
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext sharedContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
}
public void frameAvailable(SurfaceTexture st) {
System.out.println("frameAvailable start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
System.out.println("frameAvailable end");
}
public void setTextureId(int id) {
System.out.println("setTextureId start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
System.out.println("setTextureId end");
}
#Override
public void run() {
System.out.println("run start");
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
System.out.println("run end");
}
}
/**
* Handles encoder state change requests.
*/
private static class EncoderHandler extends Handler {
private WeakReference<TextureMovieEncoder> mWeakEncoder;
public EncoderHandler(TextureMovieEncoder encoder) {
mWeakEncoder = new WeakReference<TextureMovieEncoder>(encoder);
}
#Override
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
TextureMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_REINIT:
encoder.encoder_reinit((EGLContext) inputMessage.obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
encoder.handleSetTexture(inputMessage.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Start recording.
*/
private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
mFrameNum = 0;
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
}
private void encoder_reinit(EGLContext obj) {
System.out.println("encoder_reinit start ");
drainEncoder(true);
releaseEncoder();
prepareEncoder(obj, video_width, video_height, 1400000,
null);
System.out.println("encoder_reinit end ");
}
private void handleFrameAvailable(float[] transform, long timestamp) {
System.out.println("handleFrameAvailable start");
if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
if(is_prepared == true) {
drainEncoder(false);
mTextureRender.setTextureId(mTextureId);
mTextureRender.drawFrame(transform);
mInputWindowSurface.setPresentationTime(timestamp);
mInputWindowSurface.swapBuffers();
}
System.out.println("handleFrameAvailable end");
}
private void handleStopRecording() {
Log.d(TAG, "handleStopRecording");
drainEncoder(true);
releaseEncoder();
}
private void handleSetTexture(int id) {
//Log.d(TAG, "handleSetTexture " + id);
mTextureId = id;
}
/**
* Tears down the EGL surface and context we've been using to feed the MediaCodec input
* surface, and replaces it with a new one that shares with the new context.
*/
private void handleUpdateSharedContext(EGLContext newSharedContext) {
System.out.println("handleUpdateSharedContext start");
Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
// Release the EGLSurface and EGLContext.
if(mInputWindowSurface != null) {
mInputWindowSurface.releaseEglSurface();
mEglBase.release();
}
// Create a new EGLContext and recreate the window surface.
mEglBase = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface.recreate(mEglBase);
mInputWindowSurface.makeCurrent();
// Create new programs and such for the new context.
mTextureRender.surfaceCreated();
System.out.println("handleUpdateSharedContext end");
}
boolean created =false;
EGLContext sharedContext_local;
private Context context;
private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
System.out.println("prepareEncoder start width = " + width + "height = " + height);
sharedContext_local = sharedContext;
enc_strm = new byte[width * height * 3 / 2];
encoded_data_buffer = new ofi_vc_buffer();
video_width = width;
video_height = height;
if(!created) {
try {
fp_enc = new FileOutputStream(enc_file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
enc_len = new PrintWriter(Environment.getExternalStorageDirectory().getPath() + "/encoded_len.xls");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
created = true;
}
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = null;
if(orientation_local == 1) {
System.out.println("videoformatting portrait");
format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
} else {
System.out.println("videoformatting landscape");
format = MediaFormat.createVideoFormat(MIME_TYPE, height, width);
}
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEglBase = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglBase, mEncoder.createInputSurface());
mEncoder.start();
mInputWindowSurface.makeCurrent();
mTextureRender = new TextureRender(context);
mTextureRender.surfaceCreated();
mTrackIndex = -1;
mMuxerStarted = false;
is_prepared = true;
System.out.println("prepareEncoder end");
}
/**
* Releases encoder resources.
*/
private void releaseEncoder() {
System.out.println("releaseEncoder end");
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mEglBase != null) {
mEglBase.release();
mEglBase = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
System.out.println("releaseEncoder start");
}
byte[] enc_strm = null;
byte sps_pps_nal[] = null;
int sps_pps_nal_size = 0;
ofi_vc_buffer encoded_data_buffer = null;
private int encod_len = 0;
private int frame_type;
private encoded_stream_info enc_buffer_global;
private int video_width;
private int video_height;
private void drainEncoder(boolean endOfStream) {
System.out.println("drainEncoder start");
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
//encoded_stream_info enc_buffer = new encoded_stream_info(video_width * video_height * 3 / 2);
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
// mTrackIndex = mMuxer.addTrack(newFormat);
// mMuxer.start();
// mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d("Main", "ignoring BUFFER_FLAG_CODEC_CONFIG");
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
System.out.println("Encode SPS PPS buffer size" + mBufferInfo.size );
sps_pps_nal_size = mBufferInfo.size;
sps_pps_nal = new byte[sps_pps_nal_size];
encodedData.get(sps_pps_nal, 0, sps_pps_nal_size);
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// if (!mMuxerStarted) {
// throw new RuntimeException("muxer hasn't started");
// }
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
encodedData.get(enc_strm, sps_pps_nal_size, mBufferInfo.size);
System.arraycopy(sps_pps_nal, 0, enc_strm, 0, sps_pps_nal_size);
encod_len = mBufferInfo.size + sps_pps_nal_size;
if ((enc_strm[sps_pps_nal_size + 4] & 0x1F)== 5) {
frame_type = 2;
} else {
frame_type = 0;
}
//mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d("Main", "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
enc_buffer_global.encoded_len = 0; // frame sending is disabled.
if(enc_buffer_global.encoded_len == 0) {
enc_buffer_global.encoded_data = enc_strm;
enc_buffer_global.encoded_len = encod_len;
enc_buffer_global.frame_type = frame_type;
// System.out.println("encoded Wrote stream len =" + enc_buffer_global.encoded_len);
try {
fp_enc.write(enc_strm, 0, encod_len);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
fp_enc.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
enc_len.format("%d\n", encod_len);
enc_len.flush();
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
System.out.println("drainEncoder end");
}
public void set_buffer(encoded_stream_info bufer) {
enc_buffer_global = bufer;
}
}
Other than these I have added
EglCore.java
EglSurfaceBase.java
TextureRender.java
WindowSurface.java
classes from the grafika link given above.
Still I am not able to figure out the reason for my app getting freezed up during re-initialization. But sometimes it works also.
Can anyone help....
Else what are the steps I should do to change the encoding resolution.
Thanks in advance...........
I got a working one with the following code....
public void onConfigurationChanged(Configuration newConfig) {
System.out.println("On config change start ");
super.onConfigurationChanged(newConfig);
mGLView.queueEvent(new Runnable() {
#Override public void run() {
// Tell the renderer that it's about to be paused so it can clean up.
mRenderer.notifyPausing();
}
});
mGLView.queueEvent(new Runnable() {
#Override public void run() {
mRenderer.re_init();
}
});
}
where re-init function is.
public void re_init() {
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
}
//mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
}
and in mRenderer.notifyPausing() I have added videoEncoder.stopRecording() call also which will wait untill the whole recorder is stopped (did an object based synchronisation there).
But the whole re-initialisation takes 250 - 400 ms................

Categories

Resources