I tried to take flashed images and RAW with camera2 API and save JPEG and DNG files. When I make first shoot, the result image (jpeg) is black. On second shot everything is fine! I make photos in completely dark room. Flash fires for both shots. And each time flash fires twice (precapture and capture).
Also similar issue happens, when I try to capture just JPEG image (without RAW) and exposure is set to 0.
Here is code of creation of capture requests:
public static void CreateRequests(final int format) throws CameraAccessException
{
final boolean isRAWCapture = (format == CameraController.RAW);
stillRequestBuilder = HALv3.getInstance().camDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
precaptureRequestBuilder = HALv3.getInstance().camDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
rawRequestBuilder = HALv3.getInstance().camDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
if (format == CameraController.YUV_RAW)
{
stillRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF);
stillRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
precaptureRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF);
precaptureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
} else if (isRAWCapture)
{
stillRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
stillRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
precaptureRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
precaptureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
rawRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF);
rawRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
} else
{
stillRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
stillRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
precaptureRequestBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
precaptureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
}
stillRequestBuilder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
precaptureRequestBuilder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
if (isRAWCapture)
rawRequestBuilder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
if ((zoomLevel > 1.0f) && (format != CameraController.YUV_RAW))
{
zoomCropCapture = getZoomRect(zoomLevel, activeRect.width(), activeRect.height());
stillRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomCropCapture);
precaptureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomCropCapture);
if (isRAWCapture)
rawRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomCropCapture);
}
int focusMode = PreferenceManager.getDefaultSharedPreferences(MainScreen.getMainContext()).getInt(
CameraController.isFrontCamera() ? MainScreen.sRearFocusModePref : MainScreen.sFrontFocusModePref, -1);
stillRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, focusMode);
precaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, focusMode);
if (isRAWCapture)
rawRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, focusMode);
if (format == CameraController.JPEG)
{
stillRequestBuilder.addTarget(MainScreen.getJPEGImageReader().getSurface());
} else if (format == CameraController.YUV || format == CameraController.YUV_RAW)
{
stillRequestBuilder.addTarget(MainScreen.getYUVImageReader().getSurface());
} else if (format == CameraController.RAW)
{
rawRequestBuilder.addTarget(MainScreen.getRAWImageReader().getSurface());
stillRequestBuilder.addTarget(MainScreen.getJPEGImageReader().getSurface());
}
precaptureRequestBuilder.addTarget(MainScreen.getPreviewYUVImageReader().getSurface());
int flashMode = PreferenceManager.getDefaultSharedPreferences(MainScreen.getMainContext()).getInt(
MainScreen.sFlashModePref, -1);
Integer previewFlash = null;
if (flashMode == CameraParameters.FLASH_MODE_OFF) {
previewFlash = CaptureRequest.FLASH_MODE_OFF;
} else if (flashMode == CameraParameters.FLASH_MODE_SINGLE) {
previewFlash = CaptureRequest.FLASH_MODE_SINGLE;
} else if (flashMode == CameraParameters.FLASH_MODE_TORCH) {
previewFlash = CaptureRequest.FLASH_MODE_TORCH;
}
HALv3.stillRequestBuilder.set(CaptureRequest.FLASH_MODE, previewFlash);
HALv3.precaptureRequestBuilder.set(CaptureRequest.FLASH_MODE, previewFlash);
HALv3.rawRequestBuilder.set(CaptureRequest.FLASH_MODE, previewFlash);
if(flashMode == CameraParameters.FLASH_MODE_SINGLE || flashMode == CameraParameters.FLASH_MODE_AUTO || flashMode == CameraParameters.FLASH_MODE_REDEYE)
{
if(flashMode == CameraParameters.FLASH_MODE_SINGLE)
flashMode = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
else if(flashMode == CameraParameters.FLASH_MODE_AUTO )
flashMode = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH;
else
flashMode = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
HALv3.stillRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
HALv3.stillRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashMode);
HALv3.precaptureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
HALv3.precaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashMode);
HALv3.rawRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
HALv3.rawRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashMode);
}
}
and capture methods:
public static int captureImageWithParamsHALv3Simple(final int nFrames, final int format, final int[] pause,
final int[] evRequested, final int[] gain, final long[] exposure, final boolean resInHeap,
final boolean playShutter)
{
int requestID;
try
{
requestID = HALv3.getInstance().mCaptureSession.capture(stillRequestBuilder.build(), stillCaptureCallback,
null);
if (isRAWCapture)
HALv3.getInstance().mCaptureSession.capture(rawRequestBuilder.build(), stillCaptureCallback, null);
} catch (CameraAccessException e)
{
e.printStackTrace();
}
return 0;
}
public static int captureImageWithParamsHALv3(final int nFrames, final int format, final int[] pause,
final int[] evRequested, final int[] gain, final long[] exposure, final boolean resInHeap,
final boolean playShutter)
{
int requestID = -1;
try
{
CreateRequests(format);
precaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
requestID = HALv3.getInstance().mCaptureSession.capture(precaptureRequestBuilder.build(),
new CameraCaptureSession.CaptureCallback()
{
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result)
{
precaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
captureImageWithParamsHALv3Simple(nFrames, format, pause, evRequested, gain, exposure,
resInHeap, playShutter);
}
}, null);
} catch (CameraAccessException e)
{
Log.e(TAG, "setting up still image capture request failed");
e.printStackTrace();
throw new RuntimeException();
}
return requestID;
}
enter code here
Also you may see full code of project here: https://github.com/almalence/OpenCamera
And camera2 part:
https://github.com/almalence/OpenCamera/blob/master/src/com/almalence/opencam/cameracontroller/HALv3.java
Related
Here is my logcat:
System.err:java.lang.IllegalStateException: Failed to add the track to the muxer
W/System.err: at android.media.MediaMuxer.nativeAddTrack(Native Method)
W/System.err: at android.media.MediaMuxer.addTrack(MediaMuxer.java:626)
W/System.err: at com.marvhong.videoeffect.composer.MuxRender.onSetOutputFormat(MuxRender.java:64)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.drainEncoder(VideoComposer.java:224)
W/System.err: at com.marvhong.videoeffect.composer.VideoComposer.stepPipeline(VideoComposer.java:113)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.runPipelines(Mp4ComposerEngine.java:181)
W/System.err: at com.marvhong.videoeffect.composer.Mp4ComposerEngine.compose(Mp4ComposerEngine.java:127)
W/System.err: at com.marvhong.videoeffect.composer.Mp4Composer$1.run(Mp4Composer.java:198)
W/System.err: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
W/System.err: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
W/System.err: at java.lang.Thread.run(Thread.java:764)
E/TrimVideoActivity: filterVideo---onFailed()
In my application, I'm trying to add filters on video. But sometimes my app crashes and sometimes it works fine. The error is Failed to add the track to the muxer
I have debugged code and found the issue for video under audio available then apply filter and save it but doesn't work save filter video.
MuxRender Class :
class MuxRender {
private static final String TAG = "MuxRender";
private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
private final MediaMuxer muxer;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private int videoTrackIndex;
private int audioTrackIndex;
private ByteBuffer byteBuffer;
private final List<SampleInfo> sampleInfoList;
private boolean started;
MuxRender(MediaMuxer muxer) {
this.muxer = muxer;
sampleInfoList = new ArrayList<>();
}
void setOutputFormat(SampleType sampleType, MediaFormat format) {
switch (sampleType) {
case VIDEO:
videoFormat = format;
break;
case AUDIO:
ObLogger.i(TAG, "format > " + format);
audioFormat = format;
break;
default:
throw new AssertionError();
}
}
void onSetOutputFormat() {
if (videoFormat != null && audioFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
ObLogger.i(TAG, "audioFormat > " + audioFormat);
audioTrackIndex = muxer.addTrack(audioFormat);
ObLogger.v(TAG, "Added track #" + audioTrackIndex + " with " + audioFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
} else if (videoFormat != null) {
videoTrackIndex = muxer.addTrack(videoFormat);
ObLogger.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(
MediaFormat.KEY_MIME) + " to muxer");
}
muxer.start();
started = true;
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocate(0);
}
byteBuffer.flip();
ObLogger.v(TAG, "Output format determined, writing " + sampleInfoList.size() +
" samples / " + byteBuffer.limit() + " bytes to muxer.");
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int offset = 0;
for (SampleInfo sampleInfo : sampleInfoList) {
sampleInfo.writeToBufferInfo(bufferInfo, offset);
muxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.sampleType), byteBuffer, bufferInfo);
offset += sampleInfo.size;
}
sampleInfoList.clear();
byteBuffer = null;
}
void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
if (started) {
muxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
return;
}
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
byteBuf.position(bufferInfo.offset);
if (byteBuffer == null) {
byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
}
byteBuffer.put(byteBuf);
sampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
}
private int getTrackIndexForSampleType(SampleType sampleType) {
switch (sampleType) {
case VIDEO:
return videoTrackIndex;
case AUDIO:
return audioTrackIndex;
default:
throw new AssertionError();
}
}
public enum SampleType {VIDEO, AUDIO}
private static class SampleInfo {
private final SampleType sampleType;
private final int size;
private final long presentationTimeUs;
private final int flags;
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
this.sampleType = sampleType;
this.size = size;
presentationTimeUs = bufferInfo.presentationTimeUs;
flags = bufferInfo.flags;
}
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
bufferInfo.set(offset, size, presentationTimeUs, flags);
}
}
}
Mp4ComposerEngine Class:
class Mp4ComposerEngine {
private static final String TAG = "Mp4ComposerEngine";
private static final double PROGRESS_UNKNOWN = -1.0;
private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
private static final long PROGRESS_INTERVAL_STEPS = 10;
private FileDescriptor inputFileDescriptor;
private VideoComposer videoComposer;
private IAudioComposer audioComposer;
private MediaExtractor mediaExtractor;
private MediaMuxer mediaMuxer;
private ProgressCallback progressCallback;
private long durationUs;
void setDataSource(FileDescriptor fileDescriptor) {
inputFileDescriptor = fileDescriptor;
}
void setProgressCallback(ProgressCallback progressCallback) {
this.progressCallback = progressCallback;
}
void compose(
final String destPath,
final Resolution outputResolution,
final GlFilter filter,
final int bitrate,
final boolean mute,
final Rotation rotation,
final Resolution inputResolution,
final FillMode fillMode,
final FillModeCustomItem fillModeCustomItem,
final int timeScale,
final boolean flipVertical,
final boolean flipHorizontal
) throws IOException {
try {
mediaExtractor = new MediaExtractor();
mediaExtractor.setDataSource(inputFileDescriptor);
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer = new MediaMuxer(destPath, OutputFormat.MUXER_OUTPUT_MPEG_4);
}
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(inputFileDescriptor);
try {
durationUs = Long
.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
} catch (NumberFormatException e) {
durationUs = -1;
}
ObLogger.d(TAG, "Duration (us): " + durationUs);
MediaFormat videoOutputFormat = MediaFormat
.createVideoFormat("video/avc", outputResolution.width(), outputResolution.height());
videoOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
videoOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
videoOutputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
videoOutputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
MuxRender muxRender = new MuxRender(mediaMuxer);
// identify track indices
MediaFormat format = mediaExtractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
final int videoTrackIndex;
final int audioTrackIndex;
if (mime.startsWith("video/")) {
videoTrackIndex = 0;
audioTrackIndex = 1;
} else {
videoTrackIndex = 1;
audioTrackIndex = 0;
}
// setup video composer
videoComposer = new VideoComposer(mediaExtractor, videoTrackIndex, videoOutputFormat, muxRender, timeScale);
videoComposer.setUp(filter, rotation, outputResolution, inputResolution, fillMode, fillModeCustomItem, flipVertical, flipHorizontal);
mediaExtractor.selectTrack(videoTrackIndex);
// setup audio if present and not muted
if (mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO) != null && !mute) {
// has Audio video
if (timeScale < 2) {
audioComposer = new AudioComposer(mediaExtractor, audioTrackIndex, muxRender);
} else {
audioComposer = new RemixAudioComposer(mediaExtractor, audioTrackIndex, mediaExtractor.getTrackFormat(audioTrackIndex), muxRender, timeScale);
}
audioComposer.setup();
mediaExtractor.selectTrack(audioTrackIndex);
runPipelines();
} else {
// no audio video
runPipelinesNoAudio();
}
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.stop();
}
} finally {
try {
if (videoComposer != null) {
videoComposer.release();
videoComposer = null;
}
if (audioComposer != null) {
audioComposer.release();
audioComposer = null;
}
if (mediaExtractor != null) {
mediaExtractor.release();
mediaExtractor = null;
}
} catch (RuntimeException e) {
e.printStackTrace();
// Too fatal to make alive the app, because it may leak native resources.
// throw new Error("Could not shutdown mediaExtractor, codecs and mediaMuxer pipeline.", e);
}
try {
if (mediaMuxer != null) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2) {
mediaMuxer.release();
}
mediaMuxer = null;
}
} catch (RuntimeException e) {
ObLogger.e(TAG, "Failed to release mediaMuxer.", e);
}
}
}
private void runPipelines() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
}// unknown
}
while (!(videoComposer.isFinished() && audioComposer.isFinished())) {
boolean stepped = videoComposer.stepPipeline()
|| audioComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
double audioProgress = audioComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) audioComposer.getWrittenPresentationTimeUs() / durationUs);
double progress = (videoProgress + audioProgress) / 2.0;
if (progressCallback != null) {
progressCallback.onProgress(progress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
private void runPipelinesNoAudio() {
long loopCount = 0;
if (durationUs <= 0) {
if (progressCallback != null) {
progressCallback.onProgress(PROGRESS_UNKNOWN);
} // unknown
}
while (!videoComposer.isFinished()) {
boolean stepped = videoComposer.stepPipeline();
loopCount++;
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
double videoProgress = videoComposer.isFinished() ? 1.0 : Math
.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
if (progressCallback != null) {
progressCallback.onProgress(videoProgress);
}
}
if (!stepped) {
try {
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
} catch (InterruptedException e) {
// nothing to do
}
}
}
}
interface ProgressCallback {
/**
* Called to notify progress. Same thread which initiated transcode is used.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
}
}
Mp4Composer Class :
public class Mp4Composer {
private final static String TAG = Mp4Composer.class.getSimpleName();
private final String srcPath;
private final String destPath;
private GlFilter filter;
private Resolution outputResolution;
private int bitrate = -1;
private boolean mute = false;
private Rotation rotation = Rotation.NORMAL;
private Listener listener;
private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
private FillModeCustomItem fillModeCustomItem;
private int timeScale = 1;
private boolean flipVertical = false;
private boolean flipHorizontal = false;
private ExecutorService executorService;
public Mp4Composer(#NonNull final String srcPath, #NonNull final String destPath) {
this.srcPath = srcPath;
this.destPath = destPath;
}
public Mp4Composer filter(#NonNull GlFilter filter) {
this.filter = filter;
return this;
}
public Mp4Composer size(int width, int height) {
this.outputResolution = new Resolution(width, height);
return this;
}
public Mp4Composer videoBitrate(int bitrate) {
this.bitrate = bitrate;
return this;
}
public Mp4Composer mute(boolean mute) {
this.mute = mute;
return this;
}
public Mp4Composer flipVertical(boolean flipVertical) {
this.flipVertical = flipVertical;
return this;
}
public Mp4Composer flipHorizontal(boolean flipHorizontal) {
this.flipHorizontal = flipHorizontal;
return this;
}
public Mp4Composer rotation(#NonNull Rotation rotation) {
this.rotation = rotation;
return this;
}
public Mp4Composer fillMode(#NonNull FillMode fillMode) {
this.fillMode = fillMode;
return this;
}
public Mp4Composer customFillMode(#NonNull FillModeCustomItem fillModeCustomItem) {
this.fillModeCustomItem = fillModeCustomItem;
this.fillMode = FillMode.CUSTOM;
return this;
}
public Mp4Composer listener(#NonNull Listener listener) {
this.listener = listener;
return this;
}
public Mp4Composer timeScale(final int timeScale) {
this.timeScale = timeScale;
return this;
}
private ExecutorService getExecutorService() {
if (executorService == null) {
executorService = Executors.newSingleThreadExecutor();
}
return executorService;
}
public Mp4Composer start() {
getExecutorService().execute(new Runnable() {
#Override
public void run() {
Mp4ComposerEngine engine = new Mp4ComposerEngine();
engine.setProgressCallback(new Mp4ComposerEngine.ProgressCallback() {
#Override
public void onProgress(final double progress) {
if (listener != null) {
listener.onProgress(progress);
}
}
});
final File srcFile = new File(srcPath);
final FileInputStream fileInputStream;
try {
fileInputStream = new FileInputStream(srcFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
try {
engine.setDataSource(fileInputStream.getFD());
} catch (IOException e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
return;
}
final int videoRotate = getVideoRotation(srcPath);
final Resolution srcVideoResolution = getVideoResolution(srcPath, videoRotate);
if (filter == null) {
filter = new GlFilter();
}
if (fillMode == null) {
fillMode = FillMode.PRESERVE_ASPECT_FIT;
}
if (fillModeCustomItem != null) {
fillMode = FillMode.CUSTOM;
}
if (outputResolution == null) {
if (fillMode == FillMode.CUSTOM) {
outputResolution = srcVideoResolution;
} else {
Rotation rotate = Rotation.fromInt(rotation.getRotation() + videoRotate);
if (rotate == Rotation.ROTATION_90 || rotate == Rotation.ROTATION_270) {
outputResolution = new Resolution(srcVideoResolution.height(), srcVideoResolution.width());
} else {
outputResolution = srcVideoResolution;
}
}
}
if (filter instanceof IResolutionFilter) {
((IResolutionFilter) filter).setResolution(outputResolution);
}
if (timeScale < 2) {
timeScale = 1;
}
ObLogger.d(TAG, "rotation = " + (rotation.getRotation() + videoRotate));
ObLogger.d(TAG, "inputResolution width = " + srcVideoResolution.width() + " height = " + srcVideoResolution.height());
ObLogger.d(TAG, "outputResolution width = " + outputResolution.width() + " height = " + outputResolution.height());
ObLogger.d(TAG, "fillMode = " + fillMode);
try {
if (bitrate < 0) {
bitrate = calcBitRate(outputResolution.width(), outputResolution.height());
}
engine.compose(
destPath,
outputResolution,
filter,
bitrate,
mute,
Rotation.fromInt(rotation.getRotation() + videoRotate),
srcVideoResolution,
fillMode,
fillModeCustomItem,
timeScale,
flipVertical,
flipHorizontal
);
} catch (Exception e) {
e.printStackTrace();
if (listener != null) {
listener.onFailed(e);
}
executorService.shutdown();
return;
}
if (listener != null) {
listener.onCompleted();
}
executorService.shutdown();
}
});
return this;
}
public void cancel() {
getExecutorService().shutdownNow();
}
public interface Listener {
/**
* Called to notify progress.
*
* #param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
*/
void onProgress(double progress);
/**
* Called when transcode completed.
*/
void onCompleted();
/**
* Called when transcode canceled.
*/
void onCanceled();
void onFailed(Exception exception);
}
private int getVideoRotation(String videoFilePath) {
try {
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(videoFilePath);
ObLogger.e("MediaMetadataRetriever", "getVideoRotation error");
String orientation = mediaMetadataRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
return Integer.valueOf(orientation);
} catch (NumberFormatException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
return 0;
}
private int calcBitRate(int width, int height) {
final int bitrate = (int) (0.25 * 30 * width * height);
ObLogger.i(TAG, "bitrate=" + bitrate);
return bitrate;
}
private Resolution getVideoResolution(final String path, final int rotation) {
int width = 0;
int height = 0;
if (path != null && !path.isEmpty()) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(path);
try {
String Strwidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String Strheight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
if (Strwidth != null && Strheight != null) {
width = Integer.valueOf(Strwidth);
height = Integer.valueOf(Strheight);
}
retriever.release();
} catch (NumberFormatException e) {
retriever.release();
e.printStackTrace();
} catch (IllegalArgumentException e) {
retriever.release();
e.printStackTrace();
}
}
return new Resolution(width, height);
}
}
Main Activity Call Mp4Composer:
private void startMediaCodec(String srcPath,String outputPath) {
mMp4Composer = new Mp4Composer(srcPath, outputPath)
// .rotation(Rotation.ROTATION_270)
//.size(720, 1280)
.fillMode(FillMode.PRESERVE_ASPECT_FIT)
.filter(MagicFilterFactory.getFilter())
.mute(false)
.flipHorizontal(false)
.flipVertical(false)
.listener(new Listener() {
#Override
public void onProgress(double progress) {
ObLogger.d(TAG, "filterVideo---onProgress: " + (int) (progress * 100));
runOnUiThread(new Runnable() {
#Override
public void run() {
//show progress
}
});
}
#Override
public void onCompleted() {
ObLogger.d(TAG, "filterVideo---onCompleted");
runOnUiThread(new Runnable() {
#Override
public void run() {
ObLogger.i(TAG, "run: Editor Screen is >>> ");
Intent intent = new Intent();
intent.putExtra(Extras.EXTRA_FILTER_SCREEN, outputPath);
setResult(RESULT_OK, intent);
finish();
}
});
}
#Override
public void onCanceled() {
ObLogger.e(TAG, "onCanceled");
NormalProgressDialog.stopLoading();
}
#Override
public void onFailed(Exception exception) {
ObLogger.e(TAG, "filterVideo---onFailed()");
NormalProgressDialog.stopLoading();
// Toast.makeText(TrimVideoActivity.this, "Video processing failed", Toast.LENGTH_SHORT).show();
}
})
.start();
}
The below links were used but my problem is not solved.
https://stackoverflow.com/a/53140941/11138845
https://stackoverflow.com/a/21759073/11138845
Please help me! I used this example in https://github.com/pchab/AndroidRTC to streaming video and audio from a android device to an other android device.In this example, they used 2 librarys is : libjingle_peerConnection and SocketIo client but i don't know how to save streaming data as h.264 format?
After a lot of tries and hard work about this project, I found the solution for saving video as mp4 without any problem.
add this VideoFileRenderer.java to your project
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoFileRenderer implements VideoSink, SamplesReadyCallback {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final HandlerThread audioThread;
private final Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
private AudioDeviceModule audioDeviceModule;
public VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
if (withAudio) {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());
} else {
audioThread = null;
audioThreadHandler = null;
}
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = withAudio ? -1 : 0;
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
#Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioThread.quit();
});
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.w(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();
Log.w(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
#Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
if (!isRunning)
return;
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}
}
then add this implementation for this recording MediaRecorderImpl.java
package com.vedja.hassan.kavandeh_master.utils;
import android.support.annotation.Nullable;
import android.util.Log;
import com.vedja.hassan.kavandeh_master.utils.utils.EglUtils;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoTrack;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import java.io.File;
public class MediaRecorderImpl {
private final Integer id;
private final VideoTrack videoTrack;
private final AudioSamplesInterceptor audioInterceptor;
private VideoFileRenderer videoFileRenderer;
private boolean isRunning = false;
private File recordFile;
public MediaRecorderImpl(Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioSamplesInterceptor audioInterceptor) {
this.id = id;
this.videoTrack = videoTrack;
this.audioInterceptor = audioInterceptor;
}
public void startRecording(File file) throws Exception {
recordFile = file;
if (isRunning)
return;
isRunning = true;
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
if (videoTrack != null) {
videoFileRenderer = new VideoFileRenderer(
file.getAbsolutePath(),
EglUtils.getRootEglBaseContext(),
audioInterceptor != null
);
videoTrack.addSink(videoFileRenderer);
if (audioInterceptor != null)
audioInterceptor.attachCallback(id, videoFileRenderer);
} else {
Log.e(TAG, "Video track is null");
if (audioInterceptor != null) {
//TODO(rostopira): audio only recording
throw new Exception("Audio-only recording not implemented yet");
}
}
}
public File getRecordFile() { return recordFile; }
public void stopRecording() {
isRunning = false;
if (audioInterceptor != null)
audioInterceptor.detachCallback(id);
if (videoTrack != null && videoFileRenderer != null) {
videoTrack.removeSink(videoFileRenderer);
videoFileRenderer.release();
videoFileRenderer = null;
}
}
private static final String TAG = "MediaRecorderImpl";
}
and use above code with this code
final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor();
private OutputAudioSamplesInterceptor outputSamplesInterceptor = null;
private final SparseArray<MediaRecorderImpl> mediaRecorders = new SparseArray<>();
void startRecordingToFile(String path, Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioChannel audioChannel) throws Exception {
AudioSamplesInterceptor interceptor = null;
if (audioChannel == AudioChannel.INPUT)
interceptor = inputSamplesInterceptor;
else if (audioChannel == AudioChannel.OUTPUT) {
if (outputSamplesInterceptor == null)
outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule);
interceptor = outputSamplesInterceptor;
}
mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor);
mediaRecorder.startRecording(new File(path));
mediaRecorders.append(id, mediaRecorder);
}
void stopRecording(Integer id) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
if (file != null) {
ContentValues values = new ContentValues(3);
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
}
}
}
finaly use this
try {
VedjaSharedPreference sharedPreference = new VedjaSharedPreference(getContext());
final File dir = new File(sharedPreference.getStringParam(StaticParameter.SAVING_URL) + "/audio/");
dir.mkdirs(); //create folders where write files
final File file = new File(dir, "Vedja-".concat(String.valueOf(System.currentTimeMillis())).concat(".mp3"));
VideoTrack videoTrack = null;
MediaStreamTrack track = slaveManagerActivity.remoteStream.videoTracks.get(0);
if (track instanceof VideoTrack)
videoTrack = (VideoTrack) track;
AudioChannel audioChannel = AudioChannel.OUTPUT;
slaveManagerActivity.startRecordingToFile(file.getPath(), 1, videoTrack, audioChannel);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(
"Failed to open video file for output: ", e);
}
Maybe after copying this code, some class is not found in your project. you can search this class on the internet.
in this project have a class VideoFileRendere you can use this Rendere for save video in file
https://github.com/Piasy/AppRTC-Android
i'm developing image capture app in which after capturing image if you do not want to save image, press back button [ onBackPress i launch new activity i.e BottomNavigationActivity] it should switch to home activity (like normal camera app doing).
#Override
public void onBackPressed()
{
startActivity(new Intent(getApplicationContext(), BottomNavigationActivity.class));
}
But when i press back button TransactionTooLargeException occur and then app crash. i don't know why this happen even i am not transferring any huge amount of data like arrayList etc. help me to solve this problem
thanks in advance.
CameraFragment.java [here i capture the image]
public class CameraFragment extends Fragment implements SurfaceHolder.Callback, Camera.PictureCallback
{
public static final String TAG = CameraFragment.class.getSimpleName();
public static final String CAMERA_ID_KEY = "camera_id";
public static final String CAMERA_FLASH_KEY = "flash_mode";
public static final String IMAGE_INFO = "image_info";
private static final int PICTURE_SIZE_MAX_WIDTH = 1280;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
private int mCameraID;
private String mFlashMode;
private Camera mCamera;
private SquareCameraPreview mPreviewView;
private SurfaceHolder mSurfaceHolder;
private boolean mIsSafeToTakePhoto = false;
private ImageParameters mImageParameters;
private CameraOrientationListener mOrientationListener;
ImageView iv_camera_close;
public static Fragment newInstance() {
return new CameraFragment();
}
public CameraFragment() {}
#Override
public void onAttach(Context context) {
super.onAttach(context);
mOrientationListener = new CameraOrientationListener(context);
}
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Restore your state here because a double rotation with this fragment
// in the backstack will cause improper state restoration
// onCreate() -> onSavedInstanceState() instead of going through onCreateView()
if (savedInstanceState == null) {
mCameraID = getBackCameraID();
mFlashMode = CameraSettingPreferences.getCameraFlashMode(getActivity());
mImageParameters = new ImageParameters();
}
else
{
mCameraID = savedInstanceState.getInt(CAMERA_ID_KEY);
mFlashMode = savedInstanceState.getString(CAMERA_FLASH_KEY);
mImageParameters = savedInstanceState.getParcelable(IMAGE_INFO);
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
return inflater.inflate(R.layout.squarecamera__fragment_camera, container, false);
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mOrientationListener.enable();
mPreviewView = (SquareCameraPreview) view.findViewById(R.id.camera_preview_view);
mPreviewView.getHolder().addCallback(CameraFragment.this);
final View topCoverView = view.findViewById(R.id.cover_top_view);
final View btnCoverView = view.findViewById(R.id.cover_bottom_view);
iv_camera_close = view.findViewById(R.id.iv_camera_close);
iv_camera_close.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View v)
{
startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
mImageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (savedInstanceState == null) {
ViewTreeObserver observer = mPreviewView.getViewTreeObserver();
observer.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
mImageParameters.mPreviewWidth = mPreviewView.getWidth();
mImageParameters.mPreviewHeight = mPreviewView.getHeight();
mImageParameters.mCoverWidth = mImageParameters.mCoverHeight
= mImageParameters.calculateCoverWidthHeight();
// Log.d(TAG, "parameters: " + mImageParameters.getStringValues());
// Log.d(TAG, "cover height " + topCoverView.getHeight());
resizeTopAndBtmCover(topCoverView, btnCoverView);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mPreviewView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
} else {
mPreviewView.getViewTreeObserver().removeGlobalOnLayoutListener(this);
}
}
});
} else {
if (mImageParameters.isPortrait()) {
topCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
btnCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
} else {
topCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
btnCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
}
}
final ImageView swapCameraBtn = (ImageView) view.findViewById(R.id.change_camera);
swapCameraBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mCameraID == CameraInfo.CAMERA_FACING_FRONT) {
mCameraID = getBackCameraID();
} else {
mCameraID = getFrontCameraID();
}
restartPreview();
}
});
final View changeCameraFlashModeBtn = view.findViewById(R.id.flash);
changeCameraFlashModeBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_AUTO)) {
mFlashMode = Camera.Parameters.FLASH_MODE_ON;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_ON)) {
mFlashMode = Camera.Parameters.FLASH_MODE_OFF;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_OFF)) {
mFlashMode = Camera.Parameters.FLASH_MODE_AUTO;
}
setupFlashMode();
setupCamera();
}
});
setupFlashMode();
final ImageView takePhotoBtn = (ImageView) view.findViewById(R.id.capture_image_button);
takePhotoBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void setupFlashMode() {
View view = getView();
if (view == null) return;
final TextView autoFlashIcon = (TextView) view.findViewById(R.id.auto_flash_icon);
if (Camera.Parameters.FLASH_MODE_AUTO.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Auto");
} else if (Camera.Parameters.FLASH_MODE_ON.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("On");
} else if (Camera.Parameters.FLASH_MODE_OFF.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Off");
}
}
#Override
public void onSaveInstanceState(Bundle outState) {
// Log.d(TAG, "onSaveInstanceState");
outState.putInt(CAMERA_ID_KEY, mCameraID);
outState.putString(CAMERA_FLASH_KEY, mFlashMode);
outState.putParcelable(IMAGE_INFO, mImageParameters);
super.onSaveInstanceState(outState);
}
private void resizeTopAndBtmCover( final View topCover, final View bottomCover) {
ResizeAnimation resizeTopAnimation
= new ResizeAnimation(topCover, mImageParameters);
resizeTopAnimation.setDuration(800);
resizeTopAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
topCover.startAnimation(resizeTopAnimation);
ResizeAnimation resizeBtmAnimation
= new ResizeAnimation(bottomCover, mImageParameters);
resizeBtmAnimation.setDuration(800);
resizeBtmAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
bottomCover.startAnimation(resizeBtmAnimation);
}
private void getCamera(int cameraID) {
try {
mCamera = Camera.open(cameraID);
mPreviewView.setCamera(mCamera);
} catch (Exception e) {
Log.d(TAG, "Can't open camera with id " + cameraID);
e.printStackTrace();
}
}
/**
* Restart the camera preview
*/
private void restartPreview() {
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
getCamera(mCameraID);
startCameraPreview();
}
/**
* Start the camera preview
*/
private void startCameraPreview() {
determineDisplayOrientation();
setupCamera();
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
setSafeToTakePhoto(true);
setCameraFocusReady(true);
} catch (IOException e) {
Log.d(TAG, "Can't start camera preview due to IOException " + e);
e.printStackTrace();
}
}
/**
* Stop the camera preview
*/
private void stopCameraPreview() {
setSafeToTakePhoto(false);
setCameraFocusReady(false);
// Nulls out callbacks, stops face detection
mCamera.stopPreview();
mPreviewView.setCamera(null);
}
private void setSafeToTakePhoto(final boolean isSafeToTakePhoto) {
mIsSafeToTakePhoto = isSafeToTakePhoto;
}
private void setCameraFocusReady(final boolean isFocusReady) {
if (this.mPreviewView != null) {
mPreviewView.setIsFocusReady(isFocusReady);
}
}
private void determineDisplayOrientation() {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraID, cameraInfo);
// Clockwise rotation needed to align the window display to the natural position
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: {
degrees = 0;
break;
}
case Surface.ROTATION_90: {
degrees = 90;
break;
}
case Surface.ROTATION_180: {
degrees = 180;
break;
}
case Surface.ROTATION_270: {
degrees = 270;
break;
}
}
int displayOrientation;
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
// Orientation is angle of rotation when facing the camera for
// the camera image to match the natural orientation of the device
displayOrientation = (cameraInfo.orientation + degrees) % 360;
displayOrientation = (360 - displayOrientation) % 360;
} else {
displayOrientation = (cameraInfo.orientation - degrees + 360) % 360;
}
mImageParameters.mDisplayOrientation = displayOrientation;
mImageParameters.mLayoutOrientation = degrees;
mCamera.setDisplayOrientation(mImageParameters.mDisplayOrientation);
}
private void setupCamera() {
// Never keep a global parameters
Camera.Parameters parameters = mCamera.getParameters();
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
// Set continuous picture focus, if it's supported
if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
final View changeCameraFlashModeBtn = getView().findViewById(R.id.flash);
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null && flashModes.contains(mFlashMode)) {
parameters.setFlashMode(mFlashMode);
changeCameraFlashModeBtn.setVisibility(View.VISIBLE);
} else {
changeCameraFlashModeBtn.setVisibility(View.INVISIBLE);
}
mCamera.setParameters(parameters);
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPreviewSizes(), PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPictureSizes(), PICTURE_SIZE_MAX_WIDTH);
}
private Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
Size size;
int numOfSizes = sizes.size();
for (int i = 0; i < numOfSizes; i++) {
size = sizes.get(i);
boolean isDesireRatio = (size.width / 4) == (size.height / 3);
boolean isBetterSize = (bestSize == null) || size.width > bestSize.width;
if (isDesireRatio && isBetterSize) {
bestSize = size;
}
}
if (bestSize == null) {
Log.d(TAG, "cannot find the best camera size");
return sizes.get(sizes.size() - 1);
}
return bestSize;
}
private int getFrontCameraID() {
PackageManager pm = getActivity().getPackageManager();
if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
return CameraInfo.CAMERA_FACING_FRONT;
}
return getBackCameraID();
}
private int getBackCameraID() {
return CameraInfo.CAMERA_FACING_BACK;
}
private void takePicture() {
if (mIsSafeToTakePhoto) {
setSafeToTakePhoto(false);
mOrientationListener.rememberOrientation();
Camera.ShutterCallback shutterCallback = null;
Camera.PictureCallback raw = null;
// postView callback occurs when a scaled, fully processed
// postView image is available.
Camera.PictureCallback postView = null;
mCamera.takePicture(shutterCallback, raw, postView, this);
}
}
#Override
public void onResume() {
super.onResume();
if (mCamera == null) {
restartPreview();
}
}
#Override
public void onStop() {
mOrientationListener.disable();
// stop the preview
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
CameraSettingPreferences.saveCameraFlashMode(getActivity(), mFlashMode);
super.onStop();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mSurfaceHolder = holder;
getCamera(mCameraID);
startCameraPreview();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// The surface is destroyed with the visibility of the SurfaceView is set to View.Invisible
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode != Activity.RESULT_OK) return;
switch (requestCode) {
case 1:
Uri imageUri = data.getData();
break;
default:
super.onActivityResult(requestCode, resultCode, data);
}
}
/**
* A picture has been taken
* #param data
* #param camera
*/
#Override
public void onPictureTaken(byte[] data, Camera camera) {
int rotation = getPhotoRotation();
getFragmentManager()
.beginTransaction()
.replace(
R.id.fragment_container,
EditSavePhotoFragment.newInstance(data, rotation, mImageParameters.createCopy()),
EditSavePhotoFragment.TAG)
.addToBackStack(null)
.commit();
setSafeToTakePhoto(true);
}
private int getPhotoRotation() {
int rotation;
int orientation = mOrientationListener.getRememberedNormalOrientation();
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(mCameraID, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
rotation = (info.orientation - orientation + 360) % 360;
} else {
rotation = (info.orientation + orientation) % 360;
}
return rotation;
}
/**
* When orientation changes, onOrientationChanged(int) of the listener will be called
*/
private static class CameraOrientationListener extends OrientationEventListener {
private int mCurrentNormalizedOrientation;
private int mRememberedNormalOrientation;
public CameraOrientationListener(Context context) {
super(context, SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
public void onOrientationChanged(int orientation) {
if (orientation != ORIENTATION_UNKNOWN) {
mCurrentNormalizedOrientation = normalize(orientation);
}
}
/**
* #param degrees Amount of clockwise rotation from the device's natural position
* #return Normalized degrees to just 0, 90, 180, 270
*/
private int normalize(int degrees) {
if (degrees > 315 || degrees <= 45) {
return 0;
}
if (degrees > 45 && degrees <= 135) {
return 90;
}
if (degrees > 135 && degrees <= 225) {
return 180;
}
if (degrees > 225 && degrees <= 315) {
return 270;
}
throw new RuntimeException("The physics as we know them are no more. Watch out for anomalies.");
}
public void rememberOrientation() {
mRememberedNormalOrientation = mCurrentNormalizedOrientation;
}
public int getRememberedNormalOrientation() {
rememberOrientation();
return mRememberedNormalOrientation;
}
}
}
EditSavePhotoFragment.java [here i display the preview of image]
this is code to switch activity , when i called this my app crashes
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
below is full source code
public class EditSavePhotoFragment extends Fragment {
public static final String TAG = EditSavePhotoFragment.class.getSimpleName();
public static String BITMAP_KEY = "bitmap_byte_array";
public static String ROTATION_KEY = "rotation";
public static String IMAGE_INFO = "image_info";
private static final int REQUEST_STORAGE = 1;
SharedPreferences pref;
SharedPreferences.Editor editor;
private static final String PHOTO_PREF = "PHOTO_PREF";
private static final String CAPTURE_IMAGE_PATH = "CAPTURE_IMAGE_PATH";
public static Fragment newInstance(byte[] bitmapByteArray, int rotation,
#NonNull ImageParameters parameters) {
Fragment fragment = new EditSavePhotoFragment();
Bundle args = new Bundle();
args.putByteArray(BITMAP_KEY, bitmapByteArray);
args.putInt(ROTATION_KEY, rotation);
args.putParcelable(IMAGE_INFO, parameters);
fragment.setArguments(args);
return fragment;
}
public EditSavePhotoFragment() {}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState)
{
View view = inflater.inflate(R.layout.squarecamera__fragment_edit_save_photo, container, false);
pref = getContext().getSharedPreferences(PHOTO_PREF, 0);
editor = pref.edit();
return view;
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
int rotation = getArguments().getInt(ROTATION_KEY);
byte[] data = getArguments().getByteArray(BITMAP_KEY);
ImageParameters imageParameters = getArguments().getParcelable(IMAGE_INFO);
if (imageParameters == null) {
return;
}
final ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
imageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
final View topView = view.findViewById(R.id.topView);
if (imageParameters.mIsPortrait) {
topView.getLayoutParams().height = imageParameters.mCoverHeight;
} else {
topView.getLayoutParams().width = imageParameters.mCoverWidth;
}
rotatePicture(rotation, data, photoImageView);
view.findViewById(R.id.save_photo).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
savePicture(photoImageView);
}
});
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
}
private void rotatePicture(int rotation, byte[] data, ImageView photoImageView) {
Bitmap bitmap = ImageUtility.decodeSampledBitmapFromByte(getActivity(), data);
// Log.d(TAG, "original bitmap width " + bitmap.getWidth() + " height " + bitmap.getHeight());
if (rotation != 0) {
Bitmap oldBitmap = bitmap;
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
bitmap = Bitmap.createBitmap(
oldBitmap, 0, 0, oldBitmap.getWidth(), oldBitmap.getHeight(), matrix, false
);
oldBitmap.recycle();
}
photoImageView.setImageBitmap(bitmap);
}
private void savePicture(ImageView imageView)
{
/*
try
{
SaveImageMethod(imageView);
}
catch (IOException e)
{
e.printStackTrace();
}
*/
requestForPermission();
}
private void SaveImageMethod(ImageView iv) throws IOException
{
BitmapDrawable draw = (BitmapDrawable) iv.getDrawable();
Bitmap bitmap = draw.getBitmap();
FileOutputStream outStream = null;
File sdCard = Environment.getExternalStorageDirectory();
File dir = new File(sdCard.getAbsolutePath() + "/SelfiLife");
dir.mkdirs();
String fileName = String.format("%d.jpg", System.currentTimeMillis());
File outFile = new File(dir, fileName);
Log.d("MainActivity","new IMAGE PATH = "+outFile);
editor.putString(CAPTURE_IMAGE_PATH, String.valueOf(outFile));
editor.commit();
outStream = new FileOutputStream(outFile);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
outStream.flush();
outStream.close();
}
private void requestForPermission() {
RuntimePermissionActivity.startActivity(EditSavePhotoFragment.this,
REQUEST_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (Activity.RESULT_OK != resultCode) return;
if (REQUEST_STORAGE == requestCode && data != null) {
final boolean isGranted = data.getBooleanExtra(RuntimePermissionActivity.REQUESTED_PERMISSION, false);
final View view = getView();
if (isGranted && view != null) {
ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
Bitmap bitmap = ((BitmapDrawable) photoImageView.getDrawable()).getBitmap();
Uri photoUri = ImageUtility.savePicture(getActivity(), bitmap);
((CameraActivity) getActivity()).returnPhotoUri(photoUri);
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
}
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final int REQUEST_CODE = 1234;
private int mScreenDensity;
private MediaProjectionManager mProjectionManager;
private static final int DISPLAY_WIDTH = 720;
private static final int DISPLAY_HEIGHT = 1280;
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private ToggleButton mToggleButton;
private MediaRecorder mMediaRecorder;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int REQUEST_PERMISSIONS = 10;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mScreenDensity = metrics.densityDpi;
mProjectionManager = (MediaProjectionManager) getSystemService
(Context.MEDIA_PROJECTION_SERVICE);
mToggleButton = (ToggleButton) findViewById(R.id.toggle);
mToggleButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (ContextCompat.checkSelfPermission(MainActivity.this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) + ContextCompat
.checkSelfPermission(MainActivity.this,
Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale
(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE) ||
ActivityCompat.shouldShowRequestPermissionRationale
(MainActivity.this, Manifest.permission.RECORD_AUDIO)) {
mToggleButton.setChecked(false);
Snackbar.make(findViewById(android.R.id.content), R.string.label_permissions,
Snackbar.LENGTH_INDEFINITE).setAction("ENABLE",
new View.OnClickListener() {
#Override
public void onClick(View v) {
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission
.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO},
REQUEST_PERMISSIONS);
}
}).show();
} else {
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission
.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO},
REQUEST_PERMISSIONS);
}
} else {
onToggleScreenShare(v);
}
}
});
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.d(TAG, " requestCode " + requestCode + " resultCode " + requestCode);
if (REQUEST_CODE == requestCode) {
if (resultCode == RESULT_OK) {
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
startRecording(); // defined below
} else {
Log.d(TAG, "Persmission denied");
}
}
}
private static final String VIDEO_MIME_TYPE = "video/avc";
private static final int VIDEO_WIDTH = 720;
private static final int VIDEO_HEIGHT = 1280;
// …
private boolean mMuxerStarted = false;
private Surface mInputSurface;
private MediaMuxer mMuxer;
private MediaCodec mVideoEncoder;
private MediaCodec.BufferInfo mVideoBufferInfo;
private int mTrackIndex = -1;
private final Handler mDrainHandler = new Handler(Looper.getMainLooper());
private Runnable mDrainEncoderRunnable = new Runnable() {
#Override
public void run() {
drainEncoder();
}
};
private void startRecording() {
DisplayManager dm = (DisplayManager) getSystemService(Context.DISPLAY_SERVICE);
Display defaultDisplay = dm.getDisplay(Display.DEFAULT_DISPLAY);
if (defaultDisplay == null) {
throw new RuntimeException("No display found.");
}
prepareVideoEncoder();
try {
mMuxer = new MediaMuxer(Environment.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DOWNLOADS) + "/video.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
// Get the display size and density.
DisplayMetrics metrics = getResources().getDisplayMetrics();
int screenWidth = metrics.widthPixels;
int screenHeight = metrics.heightPixels;
int screenDensity = metrics.densityDpi;
// Start the video input.
mMediaProjection.createVirtualDisplay("Recording Display", screenWidth,
screenHeight, screenDensity, 0 /* flags */, mInputSurface,
null /* callback */, null /* handler */);
// Start the encoders
drainEncoder();
}
private void prepareVideoEncoder() {
mVideoBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
int frameRate = 15; // 30 fps
// Set some required properties. The media codec may fail if these aren't defined.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000); // 6Mbps
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
//format.setInteger(MediaFormat.KEY_CAPTURE_RATE, frameRate);
// format.setInteger(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, 1000000 / frameRate);
//format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); // 1 seconds between I-frames
// Create a MediaCodec encoder and configure it. Get a Surface we can use for recording into.
try {
mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mVideoEncoder.createInputSurface();
mVideoEncoder.start();
} catch (IOException e) {
releaseEncoders();
}
}
private void releaseEncoders() {
mDrainHandler.removeCallbacks(mDrainEncoderRunnable);
if (mMuxer != null) {
if (mMuxerStarted) {
mMuxer.stop();
}
mMuxer.release();
mMuxer = null;
mMuxerStarted = false;
}
if (mVideoEncoder != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMediaProjection != null) {
mMediaProjection.stop();
mMediaProjection = null;
}
mVideoBufferInfo = null;
//mDrainEncoderRunnable = null;
mTrackIndex = -1;
}
private boolean drainEncoder() {
mDrainHandler.removeCallbacks(mDrainEncoderRunnable);
while (true) {
int bufferIndex = mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, 0);
if (bufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// nothing available yet
break;
} else if (bufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mTrackIndex >= 0) {
throw new RuntimeException("format changed twice");
}
mTrackIndex = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
if (!mMuxerStarted && mTrackIndex >= 0) {
mMuxer.start();
mMuxerStarted = true;
}
} else if (bufferIndex < 0) {
// not sure what's going on, ignore it
} else {
ByteBuffer encodedData = mVideoEncoder.getOutputBuffer(bufferIndex);
if (encodedData == null) {
throw new RuntimeException("couldn't fetch buffer at index " + bufferIndex);
}
if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
mVideoBufferInfo.size = 0;
}
if (mVideoBufferInfo.size != 0) {
if (mMuxerStarted) {
encodedData.position(mVideoBufferInfo.offset);
encodedData.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mVideoBufferInfo);
} else {
// muxer not started
}
}
mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
Log.d(TAG, "Recording");
}
mDrainHandler.postDelayed(mDrainEncoderRunnable, 10);
return false;
}
public void onToggleScreenShare(View view) {
if (((ToggleButton) view).isChecked()) {
if (mMediaProjection == null) {
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
} else {
startRecording();
}
} else {
releaseEncoders();
}
}
}
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
This line of code request permission for capturing screen. Each time my code call that and it show a dialog for permission. But if i click "don't show this again", it won't request permission but grant permission in the background. How can i take permission only once and grant all time without selecting don't show again?? Full code is given here
public void onToggleScreenShare(View view) {
if (((ToggleButton) view).isChecked()) {
if (mMediaProjection == null) {
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
} else {
startRecording();
}
} else {
releaseEncoders();
}
}
On this Method startActivityForResult() method prompt screen capturing permission. If grant permission or deny it the code transfer call to onActivityResultMethod()
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.d(TAG, " requestCode " + requestCode + " resultCode " + requestCode);
if (REQUEST_CODE == requestCode) {
if (resultCode == RESULT_OK) {
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
startRecording(); // defined below
} else {
Log.d(TAG, "Persmission denied");
}
}
}
On this method we get Intent data and resultCode. To further use MediaProjectionManager without requesting continuous permission, we have to save reference of the Intent and value of resultCode and use mediaProjectionManager via this line of code
mMediaProjection = mProjectionManager.getMediaProjection(saveResult, savedIntent);
So it won't request permission again as permission is already granted
i have integrated zxing library in my app, i used to call zxing via intent :
Intent intent = new Intent("com.google.zxing.client.android.SCAN");
intent.putExtra("SCAN_MODE", "ONE_D_MODE");
intent.putExtra("SCAN_FORMATS", "CODE_39,CODE_93,CODE_128,DATA_MATRIX,ITF,CODABAR,EAN_13,EAN_8,UPC_A,QR_CODE");
startActivityForResult(intent, 1);
But com.google.zxing.client.android.SCAN allows user to choose scan app if he has another barcode scanners, and i want that if user starts scan from my app than will stats my zxing CaptureActivity.class , i call it via intent
Intent intent = new Intent(this, CaptuureActivity.class);
intent.putExtra("SCAN_MODE", "ONE_D_MODE");
intent.putExtra("SCAN_FORMATS", "CODE_39,CODE_93,CODE_128,DATA_MATRIX,ITF,CODABAR,EAN_13,EAN_8,UPC_A,QR_CODE");
startActivityForResult(intent, 1);
But when Capture Activity detect barcode nothing happend, it didnt comes back to previous activity .
Here is CaptureACtivity :
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES =
EnumSet.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
private TextView statusView;
private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType,?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
}
#Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
resultView = findViewById(R.id.result_view);
statusView = (TextView) findViewById(R.id.status_view);
handler = null;
lastResult = null;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
// if (prefs.getBoolean(PreferencesActivity.KEY_DISABLE_AUTO_ORIENTATION, true)) {
// setRequestedOrientation(getCurrentOrientation());
// } else {
// setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE);
// }
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
}
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
if (customPromptMessage != null) {
statusView.setText(customPromptMessage);
}
} else if (dataString != null &&
dataString.contains("http://www.google") &&
dataString.contains("/m/products/scan")) {
// Scan only products and send the result to mobile Product Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
}
private int getCurrentOrientation() {
int rotation = getWindowManager().getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
}
private static boolean isZXingURL(String dataString) {
if (dataString == null) {
return false;
}
for (String url : ZXING_URLS) {
if (dataString.startsWith(url)) {
return true;
}
}
return false;
}
#Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
cameraManager.closeDriver();
if (!hasSurface) {
/* SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);*/
}
super.onPause();
}
#Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (source == IntentSource.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
finish();
return true;
}
if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) {
restartPreviewAfterDelay(0L);
return true;
}
break;
case KeyEvent.KEYCODE_FOCUS:
case KeyEvent.KEYCODE_CAMERA:
// Handle these events so they don't launch the Camera app
return true;
// Use volume up/down to turn on light
case KeyEvent.KEYCODE_VOLUME_DOWN:
cameraManager.setTorch(false);
return true;
case KeyEvent.KEYCODE_VOLUME_UP:
cameraManager.setTorch(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.capture, menu);
return super.onCreateOptionsMenu(menu);
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
switch (item.getItemId()) {
case R.id.menu_share:
intent.setClassName(this, ShareActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_history:
intent.setClassName(this, HistoryActivity.class.getName());
startActivityForResult(intent, HISTORY_REQUEST_CODE);
break;
case R.id.menu_settings:
intent.setClassName(this, PreferencesActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_help:
intent.setClassName(this, HelpActivity.class.getName());
startActivity(intent);
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK) {
if (requestCode == HISTORY_REQUEST_CODE) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (handler == null) {
savedResultToShow = result;
} else {
if (result != null) {
savedResultToShow = result;
}
if (savedResultToShow != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow);
handler.sendMessage(message);
}
savedResultToShow = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show the results.
*
* #param rawResult The contents of the barcode.
* #param scaleFactor amount by which thumbnail was scaled
* #param barcode A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
inactivityTimer.onActivity();
lastResult = rawResult;
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
boolean fromLiveScan = barcode != null;
if (fromLiveScan) {
historyManager.addHistoryItem(rawResult, resultHandler);
// Then not from history, so beep/vibrate and we have an image to draw on
beepManager.playBeepSoundAndVibrate();
//drawResultPoints(barcode, scaleFactor, rawResult);
}
switch (source) {
case NATIVE_APP_INTENT:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case PRODUCT_SEARCH_LINK:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case ZXING_LINK:
if (scanFromWebPageManager == null || !scanFromWebPageManager.isScanFromWebPage()) {
handleDecodeInternally(rawResult, resultHandler, barcode);
} else {
handleDecodeExternally(rawResult, resultHandler, barcode);
}
break;
case NONE:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (fromLiveScan && prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) {
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.msg_bulk_mode_scanned) + " (" + rawResult.getText() + ')',
Toast.LENGTH_SHORT).show();
// Wait a moment or else it will scan the same barcode continuously about 3 times
// restartPreviewAfterDelay(BULK_MODE_SCAN_DELAY_MS);
} else {
handleDecodeInternally(rawResult, resultHandler, barcode);
}
break;
}
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of the barcode.
*
* #param barcode A bitmap of the captured image.
* #param scaleFactor amount by which thumbnail was scaled
* #param rawResult The decoded results which contains the points to draw.
*/
private void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(getResources().getColor(R.color.result_points));
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1], scaleFactor);
} else if (points.length == 4 &&
(rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A ||
rawResult.getBarcodeFormat() == BarcodeFormat.EAN_13)) {
// Hacky special case -- draw two lines, for the barcode and metadata
drawLine(canvas, paint, points[0], points[1], scaleFactor);
drawLine(canvas, paint, points[2], points[3], scaleFactor);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
if (point != null) {
canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint);
}
}
}
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(),
scaleFactor * a.getY(),
scaleFactor * b.getX(),
scaleFactor * b.getY(),
paint);
}
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
CharSequence displayContents = resultHandler.getDisplayContents();
if (copyToClipboard && !resultHandler.areContentsSecure()) {
ClipboardInterface.setText(displayContents, this);
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (resultHandler.getDefaultButtonID() != null && prefs.getBoolean(PreferencesActivity.KEY_AUTO_OPEN_WEB, false)) {
resultHandler.handleButtonPress(resultHandler.getDefaultButtonID());
return;
}
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
resultView.setVisibility(View.VISIBLE);
/*ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
})*/
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formatter.format(new Date(rawResult.getTimestamp())));
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata = rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
contentsTextView.setText(displayContents);
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(
PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView,
resultHandler.getResult(),
historyManager,
this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
}
// Briefly show the contents of the barcode, then handle the result outside Barcode Scanner.
private void handleDecodeExternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
if (barcode != null) {
viewfinderView.drawResultBitmap(barcode);
}
long resultDurationMS;
if (getIntent() == null) {
resultDurationMS = 0;
} else {
resultDurationMS = getIntent().getLongExtra(Intents.Scan.RESULT_DISPLAY_DURATION_MS,
0);
}
if (resultDurationMS > 0) {
String rawResultString = String.valueOf(rawResult);
if (rawResultString.length() > 32) {
rawResultString = rawResultString.substring(0, 32) + " ...";
}
statusView.setText(getString(resultHandler.getDisplayTitle()) + " : " + rawResultString);
}
if (copyToClipboard && !resultHandler.areContentsSecure()) {
CharSequence text = resultHandler.getDisplayContents();
ClipboardInterface.setText(text, this);
}
if (source == IntentSource.NATIVE_APP_INTENT) {
// Hand back whatever action they requested - this can be changed to Intents.Scan.ACTION when
// the deprecated intent is retired.
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
setResult(Activity.RESULT_OK,intent);
byte[] rawBytes = rawResult.getRawBytes();
if (rawBytes != null && rawBytes.length > 0) {
intent.putExtra(Intents.Scan.RESULT_BYTES, rawBytes);
}
Map<ResultMetadataType,?> metadata = rawResult.getResultMetadata();
if (metadata != null) {
if (metadata.containsKey(ResultMetadataType.UPC_EAN_EXTENSION)) {
intent.putExtra(Intents.Scan.RESULT_UPC_EAN_EXTENSION,
metadata.get(ResultMetadataType.UPC_EAN_EXTENSION).toString());
}
Number orientation = (Number) metadata.get(ResultMetadataType.ORIENTATION);
if (orientation != null) {
intent.putExtra(Intents.Scan.RESULT_ORIENTATION, orientation.intValue());
}
String ecLevel = (String) metadata.get(ResultMetadataType.ERROR_CORRECTION_LEVEL);
if (ecLevel != null) {
intent.putExtra(Intents.Scan.RESULT_ERROR_CORRECTION_LEVEL, ecLevel);
}
#SuppressWarnings("unchecked")
Iterable<byte[]> byteSegments = (Iterable<byte[]>) metadata.get(ResultMetadataType.BYTE_SEGMENTS);
if (byteSegments != null) {
int i = 0;
for (byte[] byteSegment : byteSegments) {
intent.putExtra(Intents.Scan.RESULT_BYTE_SEGMENTS_PREFIX + i, byteSegment);
i++;
}
}
}
sendReplyMessage(R.id.return_scan_result, intent, 0);
} else if (source == IntentSource.PRODUCT_SEARCH_LINK) {
// Reformulate the URL which triggered us into a query, so that the request goes to the same
// TLD as the scan URL.
int end = sourceUrl.lastIndexOf("/scan");
String replyURL = sourceUrl.substring(0, end) + "?q=" + resultHandler.getDisplayContents() + "&source=zxing";
sendReplyMessage(R.id.launch_product_query, replyURL, 0);
} else if (source == IntentSource.ZXING_LINK) {
if (scanFromWebPageManager != null && scanFromWebPageManager.isScanFromWebPage()) {
String replyURL = scanFromWebPageManager.buildReplyURL(rawResult, resultHandler);
sendReplyMessage(R.id.launch_product_query, replyURL, 0);
}
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
if (handler != null) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
resultView.setVisibility(View.GONE);
statusView.setText(R.string.msg_default_status);
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
I set RESULT_OK in
if (source == IntentSource.NATIVE_APP_INTENT) {
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
setResult(Activity.RESULT_OK,intent)
......
What im doing wrong?
And one more question , when i call scan with com.google.zxing.client.android.SCAN when barcode was captured actvity didnt close immediately, it wait about 500-1000 milliseconds , how to disable this ?
Thanks for answers
Just add the third line here while starting the intent, thats all, no need for any change in Zxing code, these guys Rock
Intent intent = new Intent(getApplicationContext(), CaptureActivity.class);
intent.putExtra("SCAN_FORMATS", "QR_CODE,EAN_13,EAN_8,RSS_14,UPC_A,UPC_E,CODE_39,CODE_93,CODE_128,ITF,CODABAR,DATA_MATRIX");
intent.setAction(Intents.Scan.ACTION);
startActivityForResult(intent, 0);
By the way I'm using Zxing version 3.2.0
Just like to add the code that I used to receive the output from Zxing:
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == Activity.RESULT_OK) {
String contents = intent.getStringExtra(Intents.Scan.RESULT);
String formatName = intent.getStringExtra(Intents.Scan.RESULT_FORMAT);
tvStatus.setText(formatName);
tvScanResults.setText(contents + "\n\n" + formatName);
} else if (resultCode == Activity.RESULT_CANCELED) {
}
}